webviz-subsurface 0.2.36__py3-none-any.whl → 0.2.38__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- webviz_subsurface/__init__.py +1 -1
- webviz_subsurface/_components/color_picker.py +1 -1
- webviz_subsurface/_datainput/well_completions.py +2 -1
- webviz_subsurface/_providers/ensemble_polygon_provider/__init__.py +3 -0
- webviz_subsurface/_providers/ensemble_polygon_provider/_polygon_discovery.py +97 -0
- webviz_subsurface/_providers/ensemble_polygon_provider/_provider_impl_file.py +226 -0
- webviz_subsurface/_providers/ensemble_polygon_provider/ensemble_polygon_provider.py +53 -0
- webviz_subsurface/_providers/ensemble_polygon_provider/ensemble_polygon_provider_factory.py +99 -0
- webviz_subsurface/_providers/ensemble_polygon_provider/polygon_server.py +125 -0
- webviz_subsurface/plugins/_co2_leakage/_plugin.py +577 -293
- webviz_subsurface/plugins/_co2_leakage/_types.py +7 -0
- webviz_subsurface/plugins/_co2_leakage/_utilities/_misc.py +9 -0
- webviz_subsurface/plugins/_co2_leakage/_utilities/callbacks.py +226 -186
- webviz_subsurface/plugins/_co2_leakage/_utilities/co2volume.py +591 -128
- webviz_subsurface/plugins/_co2_leakage/_utilities/containment_data_provider.py +147 -0
- webviz_subsurface/plugins/_co2_leakage/_utilities/containment_info.py +31 -0
- webviz_subsurface/plugins/_co2_leakage/_utilities/ensemble_well_picks.py +105 -0
- webviz_subsurface/plugins/_co2_leakage/_utilities/generic.py +170 -2
- webviz_subsurface/plugins/_co2_leakage/_utilities/initialization.py +199 -97
- webviz_subsurface/plugins/_co2_leakage/_utilities/polygon_handler.py +60 -0
- webviz_subsurface/plugins/_co2_leakage/_utilities/summary_graphs.py +77 -173
- webviz_subsurface/plugins/_co2_leakage/_utilities/surface_publishing.py +122 -21
- webviz_subsurface/plugins/_co2_leakage/_utilities/unsmry_data_provider.py +108 -0
- webviz_subsurface/plugins/_co2_leakage/views/mainview/mainview.py +44 -19
- webviz_subsurface/plugins/_co2_leakage/views/mainview/settings.py +944 -359
- {webviz_subsurface-0.2.36.dist-info → webviz_subsurface-0.2.38.dist-info}/METADATA +2 -2
- {webviz_subsurface-0.2.36.dist-info → webviz_subsurface-0.2.38.dist-info}/RECORD +33 -20
- {webviz_subsurface-0.2.36.dist-info → webviz_subsurface-0.2.38.dist-info}/WHEEL +1 -1
- /webviz_subsurface/plugins/_co2_leakage/_utilities/{fault_polygons.py → fault_polygons_handler.py} +0 -0
- {webviz_subsurface-0.2.36.dist-info → webviz_subsurface-0.2.38.dist-info}/LICENSE +0 -0
- {webviz_subsurface-0.2.36.dist-info → webviz_subsurface-0.2.38.dist-info}/LICENSE.chromedriver +0 -0
- {webviz_subsurface-0.2.36.dist-info → webviz_subsurface-0.2.38.dist-info}/entry_points.txt +0 -0
- {webviz_subsurface-0.2.36.dist-info → webviz_subsurface-0.2.38.dist-info}/top_level.txt +0 -0
|
@@ -1,54 +1,53 @@
|
|
|
1
|
-
import
|
|
2
|
-
from typing import Iterable, List, Union
|
|
1
|
+
from typing import Union
|
|
3
2
|
|
|
4
3
|
import numpy as np
|
|
5
|
-
import pandas as pd
|
|
6
4
|
import plotly.colors
|
|
7
5
|
import plotly.graph_objects as go
|
|
8
6
|
|
|
9
|
-
from webviz_subsurface.
|
|
7
|
+
from webviz_subsurface.plugins._co2_leakage._utilities.containment_data_provider import (
|
|
8
|
+
ContainmentDataProvider,
|
|
9
|
+
)
|
|
10
10
|
from webviz_subsurface.plugins._co2_leakage._utilities.generic import (
|
|
11
11
|
Co2MassScale,
|
|
12
12
|
Co2VolumeScale,
|
|
13
13
|
)
|
|
14
|
+
from webviz_subsurface.plugins._co2_leakage._utilities.unsmry_data_provider import (
|
|
15
|
+
UnsmryDataProvider,
|
|
16
|
+
)
|
|
14
17
|
|
|
15
18
|
|
|
16
19
|
# pylint: disable=too-many-locals
|
|
17
20
|
def generate_summary_figure(
|
|
18
|
-
|
|
19
|
-
realizations_unsmry: List[int],
|
|
21
|
+
unsmry_provider: UnsmryDataProvider,
|
|
20
22
|
scale: Union[Co2MassScale, Co2VolumeScale],
|
|
21
|
-
|
|
22
|
-
realizations_containment: List[int],
|
|
23
|
+
containment_provider: ContainmentDataProvider,
|
|
23
24
|
) -> go.Figure:
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
df_unsmry = _read_dataframe(
|
|
27
|
-
table_provider_unsmry, realizations_unsmry, columns_unsmry, scale
|
|
28
|
-
)
|
|
29
|
-
df_containment = _read_dataframe_containment(
|
|
30
|
-
table_provider_containment, realizations_containment, columns_containment, scale
|
|
31
|
-
)
|
|
32
|
-
fig = go.Figure()
|
|
33
|
-
showlegend = True
|
|
25
|
+
df_unsmry = unsmry_provider.extract(scale)
|
|
26
|
+
df_containment = containment_provider.extract_condensed_dataframe(scale)
|
|
34
27
|
|
|
28
|
+
# TODO: expose these directly from data providers?
|
|
35
29
|
r_min = min(df_unsmry.REAL)
|
|
36
|
-
unsmry_last_total = df_unsmry[df_unsmry.REAL == r_min][
|
|
37
|
-
|
|
38
|
-
-1
|
|
39
|
-
]
|
|
40
|
-
unsmry_last_dissolved = df_unsmry[df_unsmry.REAL == r_min][
|
|
41
|
-
columns_unsmry.dissolved
|
|
30
|
+
unsmry_last_total = df_unsmry[df_unsmry.REAL == r_min][
|
|
31
|
+
unsmry_provider.colname_total
|
|
42
32
|
].iloc[-1]
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
]
|
|
46
|
-
containment_last_mobile = df_containment[df_containment.REAL == r_min][
|
|
47
|
-
columns_containment.mobile
|
|
33
|
+
unsmry_last_mobile = df_unsmry[df_unsmry.REAL == r_min][
|
|
34
|
+
unsmry_provider.colname_mobile
|
|
48
35
|
].iloc[-1]
|
|
49
|
-
|
|
50
|
-
|
|
36
|
+
unsmry_last_dissolved = df_unsmry[df_unsmry.REAL == r_min][
|
|
37
|
+
unsmry_provider.colname_dissolved
|
|
51
38
|
].iloc[-1]
|
|
39
|
+
|
|
40
|
+
containment_reference = df_containment[df_containment.REAL == r_min]
|
|
41
|
+
containment_last_total = containment_reference[
|
|
42
|
+
containment_reference["phase"] == "total"
|
|
43
|
+
]["amount"].iloc[-1]
|
|
44
|
+
containment_last_mobile = containment_reference[
|
|
45
|
+
containment_reference["phase"] == "free_gas"
|
|
46
|
+
]["amount"].iloc[-1]
|
|
47
|
+
containment_last_dissolved = containment_reference[
|
|
48
|
+
containment_reference["phase"] == "dissolved"
|
|
49
|
+
]["amount"].iloc[-1]
|
|
50
|
+
# ---
|
|
52
51
|
last_total_err_percentage = (
|
|
53
52
|
100.0 * abs(containment_last_total - unsmry_last_total) / unsmry_last_total
|
|
54
53
|
)
|
|
@@ -64,76 +63,75 @@ def generate_summary_figure(
|
|
|
64
63
|
last_mobile_err_percentage = np.round(last_mobile_err_percentage, 2)
|
|
65
64
|
last_dissolved_err_percentage = np.round(last_dissolved_err_percentage, 2)
|
|
66
65
|
|
|
66
|
+
_colors = {
|
|
67
|
+
"total": plotly.colors.qualitative.Plotly[3],
|
|
68
|
+
"mobile": plotly.colors.qualitative.Plotly[2],
|
|
69
|
+
"dissolved": plotly.colors.qualitative.Plotly[0],
|
|
70
|
+
"trapped": plotly.colors.qualitative.Plotly[1],
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
fig = go.Figure()
|
|
74
|
+
showlegend = True
|
|
67
75
|
for _, sub_df in df_unsmry.groupby("realization"):
|
|
68
|
-
colors = plotly.colors.qualitative.Plotly
|
|
69
76
|
fig.add_scatter(
|
|
70
|
-
x=sub_df[
|
|
71
|
-
y=sub_df[
|
|
77
|
+
x=sub_df[unsmry_provider.colname_date],
|
|
78
|
+
y=sub_df[unsmry_provider.colname_total],
|
|
72
79
|
name="UNSMRY",
|
|
73
|
-
legendgroup="
|
|
80
|
+
legendgroup="total",
|
|
74
81
|
legendgrouptitle_text=f"Total ({last_total_err_percentage} %)",
|
|
75
82
|
showlegend=showlegend,
|
|
76
|
-
marker_color=
|
|
83
|
+
marker_color=_colors["total"],
|
|
77
84
|
)
|
|
78
85
|
fig.add_scatter(
|
|
79
|
-
x=sub_df[
|
|
80
|
-
y=sub_df[
|
|
81
|
-
name=f"UNSMRY ({
|
|
82
|
-
legendgroup="
|
|
86
|
+
x=sub_df[unsmry_provider.colname_date],
|
|
87
|
+
y=sub_df[unsmry_provider.colname_mobile],
|
|
88
|
+
name=f"UNSMRY ({unsmry_provider.colname_mobile})",
|
|
89
|
+
legendgroup="mobile",
|
|
83
90
|
legendgrouptitle_text=f"Mobile ({last_mobile_err_percentage} %)",
|
|
84
91
|
showlegend=showlegend,
|
|
85
|
-
marker_color=
|
|
92
|
+
marker_color=_colors["mobile"],
|
|
86
93
|
)
|
|
87
94
|
fig.add_scatter(
|
|
88
|
-
x=sub_df[
|
|
89
|
-
y=sub_df[
|
|
90
|
-
name=f"UNSMRY ({
|
|
91
|
-
legendgroup="
|
|
95
|
+
x=sub_df[unsmry_provider.colname_date],
|
|
96
|
+
y=sub_df[unsmry_provider.colname_dissolved],
|
|
97
|
+
name=f"UNSMRY ({unsmry_provider.colname_dissolved})",
|
|
98
|
+
legendgroup="dissolved",
|
|
92
99
|
legendgrouptitle_text=f"Dissolved ({last_dissolved_err_percentage} %)",
|
|
93
100
|
showlegend=showlegend,
|
|
94
|
-
marker_color=
|
|
101
|
+
marker_color=_colors["dissolved"],
|
|
95
102
|
)
|
|
96
103
|
fig.add_scatter(
|
|
97
|
-
x=sub_df[
|
|
98
|
-
y=sub_df[
|
|
99
|
-
name=f"UNSMRY ({
|
|
100
|
-
legendgroup="
|
|
104
|
+
x=sub_df[unsmry_provider.colname_date],
|
|
105
|
+
y=sub_df[unsmry_provider.colname_trapped],
|
|
106
|
+
name=f"UNSMRY ({unsmry_provider.colname_trapped})",
|
|
107
|
+
legendgroup="trapped",
|
|
101
108
|
legendgrouptitle_text="Trapped",
|
|
102
109
|
showlegend=showlegend,
|
|
103
|
-
marker_color=
|
|
110
|
+
marker_color=_colors["trapped"],
|
|
104
111
|
)
|
|
105
112
|
showlegend = False
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
fig.add_scatter(
|
|
119
|
-
x=sub_df[columns_containment.time],
|
|
120
|
-
y=sub_df[columns_containment.mobile],
|
|
121
|
-
name=f"Containment script ({columns_containment.mobile})",
|
|
122
|
-
legendgroup="group_2",
|
|
123
|
-
showlegend=showlegend,
|
|
124
|
-
marker_color=colors[2],
|
|
125
|
-
line_dash="dash",
|
|
126
|
-
)
|
|
113
|
+
|
|
114
|
+
_col_names = {
|
|
115
|
+
"total": "total",
|
|
116
|
+
"free_gas": "mobile",
|
|
117
|
+
"dissolved": "dissolved",
|
|
118
|
+
"trapped_gas": "trapped",
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
first_real = None
|
|
122
|
+
for (real, phase), sub_df in df_containment.groupby(["REAL", "phase"]):
|
|
123
|
+
if first_real is None:
|
|
124
|
+
first_real = real
|
|
127
125
|
fig.add_scatter(
|
|
128
|
-
x=sub_df[
|
|
129
|
-
y=sub_df[
|
|
130
|
-
name=f"Containment script ({
|
|
131
|
-
legendgroup=
|
|
132
|
-
showlegend=
|
|
133
|
-
marker_color=
|
|
126
|
+
x=sub_df["date"],
|
|
127
|
+
y=sub_df["amount"],
|
|
128
|
+
name=f"Containment script ({phase})",
|
|
129
|
+
legendgroup=_col_names[phase],
|
|
130
|
+
showlegend=bool(first_real == real),
|
|
131
|
+
marker_color=_colors[_col_names[phase]],
|
|
134
132
|
line_dash="dash",
|
|
135
133
|
)
|
|
136
|
-
|
|
134
|
+
|
|
137
135
|
fig.layout.xaxis.title = "Time"
|
|
138
136
|
fig.layout.yaxis.title = f"Amount CO2 [{scale.value}]"
|
|
139
137
|
fig.layout.paper_bgcolor = "rgba(0,0,0,0)"
|
|
@@ -142,97 +140,3 @@ def generate_summary_figure(
|
|
|
142
140
|
fig.layout.margin.l = 10
|
|
143
141
|
fig.layout.margin.r = 10
|
|
144
142
|
return fig
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
@dataclasses.dataclass
|
|
148
|
-
class _ColumnNames:
|
|
149
|
-
time: str
|
|
150
|
-
dissolved: str
|
|
151
|
-
trapped: str
|
|
152
|
-
mobile: str
|
|
153
|
-
|
|
154
|
-
def values(self) -> Iterable[str]:
|
|
155
|
-
return dataclasses.asdict(self).values()
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
@dataclasses.dataclass
|
|
159
|
-
class _ColumnNamesContainment:
|
|
160
|
-
time: str
|
|
161
|
-
dissolved: str
|
|
162
|
-
mobile: str
|
|
163
|
-
|
|
164
|
-
def values(self) -> Iterable[str]:
|
|
165
|
-
return dataclasses.asdict(self).values()
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
def _read_dataframe(
|
|
169
|
-
table_provider: EnsembleTableProvider,
|
|
170
|
-
realizations: List[int],
|
|
171
|
-
columns: _ColumnNames,
|
|
172
|
-
co2_scale: Union[Co2MassScale, Co2VolumeScale],
|
|
173
|
-
) -> pd.DataFrame:
|
|
174
|
-
full = pd.concat(
|
|
175
|
-
[
|
|
176
|
-
table_provider.get_column_data(list(columns.values()), [real]).assign(
|
|
177
|
-
realization=real
|
|
178
|
-
)
|
|
179
|
-
for real in realizations
|
|
180
|
-
]
|
|
181
|
-
)
|
|
182
|
-
full["total"] = (
|
|
183
|
-
full[columns.dissolved] + full[columns.trapped] + full[columns.mobile]
|
|
184
|
-
)
|
|
185
|
-
for col in [columns.dissolved, columns.trapped, columns.mobile, "total"]:
|
|
186
|
-
if co2_scale == Co2MassScale.MTONS:
|
|
187
|
-
full[col] = full[col] / 1e9
|
|
188
|
-
elif co2_scale == Co2MassScale.NORMALIZE:
|
|
189
|
-
full[col] = full[col] / full["total"].max()
|
|
190
|
-
return full
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
def _read_dataframe_containment(
|
|
194
|
-
table_provider: EnsembleTableProvider,
|
|
195
|
-
realizations: List[int],
|
|
196
|
-
columns: _ColumnNamesContainment,
|
|
197
|
-
co2_scale: Union[Co2MassScale, Co2VolumeScale],
|
|
198
|
-
) -> pd.DataFrame:
|
|
199
|
-
full = pd.concat(
|
|
200
|
-
[
|
|
201
|
-
table_provider.get_column_data(list(columns.values()), [real]).assign(
|
|
202
|
-
realization=real
|
|
203
|
-
)
|
|
204
|
-
for real in realizations
|
|
205
|
-
]
|
|
206
|
-
)
|
|
207
|
-
full["total"] = full[columns.dissolved] + full[columns.mobile]
|
|
208
|
-
for col in [columns.dissolved, columns.mobile, "total"]:
|
|
209
|
-
if co2_scale == Co2MassScale.MTONS:
|
|
210
|
-
full[col] = full[col] / 1e9
|
|
211
|
-
elif co2_scale == Co2MassScale.NORMALIZE:
|
|
212
|
-
full[col] = full[col] / full["total"].max()
|
|
213
|
-
return full
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
def _column_subset_unsmry(table_provider: EnsembleTableProvider) -> _ColumnNames:
|
|
217
|
-
existing = set(table_provider.column_names())
|
|
218
|
-
assert "DATE" in existing
|
|
219
|
-
# Try PFLOTRAN names
|
|
220
|
-
col_names = _ColumnNames("DATE", "FGMDS", "FGMTR", "FGMGP")
|
|
221
|
-
if set(col_names.values()).issubset(existing):
|
|
222
|
-
return col_names
|
|
223
|
-
# Try Eclipse names
|
|
224
|
-
col_names = _ColumnNames("DATE", "FWCD", "FGCDI", "FGCDM")
|
|
225
|
-
if set(col_names.values()).issubset(existing):
|
|
226
|
-
return col_names
|
|
227
|
-
raise KeyError(f"Could not find suitable data columns among: {', '.join(existing)}")
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
def _column_subset_containment(
|
|
231
|
-
table_provider: EnsembleTableProvider,
|
|
232
|
-
) -> _ColumnNamesContainment:
|
|
233
|
-
existing = set(table_provider.column_names())
|
|
234
|
-
assert "date" in existing
|
|
235
|
-
col_names = _ColumnNamesContainment("date", "total_aqueous", "total_gas")
|
|
236
|
-
if set(col_names.values()).issubset(existing):
|
|
237
|
-
return col_names
|
|
238
|
-
raise KeyError(f"Could not find suitable data columns among: {', '.join(existing)}")
|
|
@@ -17,12 +17,15 @@ from webviz_subsurface._providers import (
|
|
|
17
17
|
from webviz_subsurface._providers.ensemble_surface_provider.ensemble_surface_provider import (
|
|
18
18
|
SurfaceStatistic,
|
|
19
19
|
)
|
|
20
|
-
from webviz_subsurface.plugins._co2_leakage._utilities.generic import
|
|
20
|
+
from webviz_subsurface.plugins._co2_leakage._utilities.generic import (
|
|
21
|
+
FilteredMapAttribute,
|
|
22
|
+
MapType,
|
|
23
|
+
)
|
|
21
24
|
from webviz_subsurface.plugins._co2_leakage._utilities.plume_extent import (
|
|
22
25
|
truncate_surfaces,
|
|
23
26
|
)
|
|
24
27
|
|
|
25
|
-
SCALE_DICT = {"kg":
|
|
28
|
+
SCALE_DICT = {"kg": 0.001, "tons": 1, "M tons": 1e6}
|
|
26
29
|
|
|
27
30
|
|
|
28
31
|
@dataclass
|
|
@@ -44,10 +47,33 @@ def publish_and_get_surface_metadata(
|
|
|
44
47
|
provider: EnsembleSurfaceProvider,
|
|
45
48
|
address: Union[SurfaceAddress, TruncatedSurfaceAddress],
|
|
46
49
|
visualization_info: Dict[str, Any],
|
|
47
|
-
map_attribute_names:
|
|
50
|
+
map_attribute_names: FilteredMapAttribute,
|
|
48
51
|
) -> Tuple[Optional[SurfaceImageMeta], Optional[str], Optional[Any]]:
|
|
49
52
|
if isinstance(address, TruncatedSurfaceAddress):
|
|
50
|
-
return
|
|
53
|
+
return (
|
|
54
|
+
*_publish_and_get_truncated_surface_metadata(server, provider, address),
|
|
55
|
+
None,
|
|
56
|
+
)
|
|
57
|
+
address_map_attribute = next(
|
|
58
|
+
(
|
|
59
|
+
key
|
|
60
|
+
for key, value in map_attribute_names.filtered_values.items()
|
|
61
|
+
if value == address.attribute
|
|
62
|
+
),
|
|
63
|
+
None,
|
|
64
|
+
)
|
|
65
|
+
assert address_map_attribute is not None
|
|
66
|
+
if MapType[address_map_attribute.name].value == "MIGRATION_TIME" and isinstance(
|
|
67
|
+
address, StatisticalSurfaceAddress
|
|
68
|
+
):
|
|
69
|
+
return (
|
|
70
|
+
*_publish_and_get_statistical_time_surface_metadata(
|
|
71
|
+
server,
|
|
72
|
+
provider,
|
|
73
|
+
address,
|
|
74
|
+
),
|
|
75
|
+
None,
|
|
76
|
+
)
|
|
51
77
|
provider_id: str = provider.provider_id()
|
|
52
78
|
qualified_address = QualifiedSurfaceAddress(provider_id, address)
|
|
53
79
|
surf_meta = server.get_surface_metadata(qualified_address)
|
|
@@ -61,22 +87,18 @@ def publish_and_get_surface_metadata(
|
|
|
61
87
|
if not surface:
|
|
62
88
|
warnings.warn(f"Could not find surface file with properties: {address}")
|
|
63
89
|
return None, None, None
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
map_attribute_names[MapAttribute.FREE],
|
|
67
|
-
map_attribute_names[MapAttribute.DISSOLVED],
|
|
68
|
-
]:
|
|
90
|
+
|
|
91
|
+
if MapType[address_map_attribute.name].value == "MASS":
|
|
69
92
|
surface.values = surface.values / SCALE_DICT[visualization_info["unit"]]
|
|
70
|
-
|
|
93
|
+
summed_mass = np.ma.sum(surface.values)
|
|
71
94
|
if (
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
map_attribute_names[MapAttribute.MIGRATION_TIME_SGAS],
|
|
75
|
-
map_attribute_names[MapAttribute.MIGRATION_TIME_AMFG],
|
|
76
|
-
]
|
|
77
|
-
and visualization_info["threshold"] >= 0
|
|
95
|
+
MapType[address_map_attribute.name].value not in ["PLUME", "MIGRATION_TIME"]
|
|
96
|
+
and visualization_info["thresholds"][visualization_info["attribute"]] >= 0
|
|
78
97
|
):
|
|
79
|
-
surface.operation(
|
|
98
|
+
surface.operation(
|
|
99
|
+
"elile",
|
|
100
|
+
visualization_info["thresholds"][visualization_info["attribute"]],
|
|
101
|
+
)
|
|
80
102
|
server.publish_surface(qualified_address, surface)
|
|
81
103
|
surf_meta = server.get_surface_metadata(qualified_address)
|
|
82
104
|
return surf_meta, server.encode_partial_url(qualified_address), summed_mass
|
|
@@ -86,7 +108,7 @@ def _publish_and_get_truncated_surface_metadata(
|
|
|
86
108
|
server: SurfaceImageServer,
|
|
87
109
|
provider: EnsembleSurfaceProvider,
|
|
88
110
|
address: TruncatedSurfaceAddress,
|
|
89
|
-
) -> Tuple[Optional[SurfaceImageMeta], str
|
|
111
|
+
) -> Tuple[Optional[SurfaceImageMeta], str]:
|
|
90
112
|
qualified_address = QualifiedSurfaceAddress(
|
|
91
113
|
provider.provider_id(),
|
|
92
114
|
# TODO: Should probably use a dedicated address type for this. Statistical surface
|
|
@@ -102,15 +124,13 @@ def _publish_and_get_truncated_surface_metadata(
|
|
|
102
124
|
),
|
|
103
125
|
)
|
|
104
126
|
surf_meta = server.get_surface_metadata(qualified_address)
|
|
105
|
-
summed_mass = None
|
|
106
127
|
if surf_meta is None:
|
|
107
128
|
surface = _generate_surface(provider, address)
|
|
108
129
|
if surface is None:
|
|
109
130
|
raise ValueError(f"Could not generate surface for address: {address}")
|
|
110
|
-
summed_mass = np.ma.sum(surface.values)
|
|
111
131
|
server.publish_surface(qualified_address, surface)
|
|
112
132
|
surf_meta = server.get_surface_metadata(qualified_address)
|
|
113
|
-
return surf_meta, server.encode_partial_url(qualified_address)
|
|
133
|
+
return surf_meta, server.encode_partial_url(qualified_address)
|
|
114
134
|
|
|
115
135
|
|
|
116
136
|
def _generate_surface(
|
|
@@ -136,3 +156,84 @@ def _generate_surface(
|
|
|
136
156
|
template.values = plume_count
|
|
137
157
|
template.values.mask = plume_count < 1e-4 # type: ignore
|
|
138
158
|
return template
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def _publish_and_get_statistical_time_surface_metadata(
|
|
162
|
+
server: SurfaceImageServer,
|
|
163
|
+
provider: EnsembleSurfaceProvider,
|
|
164
|
+
address: StatisticalSurfaceAddress,
|
|
165
|
+
) -> Tuple[Optional[SurfaceImageMeta], str]:
|
|
166
|
+
qualified_address = QualifiedSurfaceAddress(
|
|
167
|
+
provider.provider_id(),
|
|
168
|
+
StatisticalSurfaceAddress(
|
|
169
|
+
address.attribute,
|
|
170
|
+
address.name,
|
|
171
|
+
address.datestr,
|
|
172
|
+
address.statistic,
|
|
173
|
+
address.realizations,
|
|
174
|
+
),
|
|
175
|
+
)
|
|
176
|
+
surf_meta = server.get_surface_metadata(qualified_address)
|
|
177
|
+
if surf_meta is None:
|
|
178
|
+
surface = _generate_statisical_time_surface(provider, address)
|
|
179
|
+
if surface is None:
|
|
180
|
+
raise ValueError(f"Could not generate surface for address: {address}")
|
|
181
|
+
server.publish_surface(qualified_address, surface)
|
|
182
|
+
surf_meta = server.get_surface_metadata(qualified_address)
|
|
183
|
+
return surf_meta, server.encode_partial_url(qualified_address)
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
def _generate_statisical_time_surface(
|
|
187
|
+
provider: EnsembleSurfaceProvider,
|
|
188
|
+
address: StatisticalSurfaceAddress,
|
|
189
|
+
) -> Optional[xtgeo.RegularSurface]:
|
|
190
|
+
surfaces = [
|
|
191
|
+
provider.get_surface(
|
|
192
|
+
SimulatedSurfaceAddress(
|
|
193
|
+
attribute=address.attribute,
|
|
194
|
+
name=address.name,
|
|
195
|
+
datestr=address.datestr,
|
|
196
|
+
realization=r,
|
|
197
|
+
)
|
|
198
|
+
)
|
|
199
|
+
for r in address.realizations
|
|
200
|
+
]
|
|
201
|
+
surfaces = [s for s in surfaces if s is not None]
|
|
202
|
+
if len(surfaces) == 0:
|
|
203
|
+
return None
|
|
204
|
+
statistical_map = _statistics_on_time_map(surfaces, address.statistic)
|
|
205
|
+
if statistical_map is None:
|
|
206
|
+
return None
|
|
207
|
+
template: xtgeo.RegularSurface = surfaces[0].copy() # type: ignore
|
|
208
|
+
template.values = statistical_map
|
|
209
|
+
return template
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
# pylint: disable=too-many-return-statements
|
|
213
|
+
def _statistics_on_time_map(
|
|
214
|
+
surfaces: List[xtgeo.RegularSurface],
|
|
215
|
+
statistic: SurfaceStatistic,
|
|
216
|
+
) -> Optional[np.ndarray]:
|
|
217
|
+
maps = np.zeros((len(surfaces), *surfaces[0].values.shape))
|
|
218
|
+
for i, surface in enumerate(surfaces):
|
|
219
|
+
maps[i, :, :] = surface.values
|
|
220
|
+
masked = np.where(surface.values.mask)
|
|
221
|
+
maps[i, masked[0], masked[1]] = np.inf
|
|
222
|
+
if statistic == SurfaceStatistic.MEAN:
|
|
223
|
+
return _turn_inf_to_nan(np.mean(maps, axis=0))
|
|
224
|
+
if statistic == SurfaceStatistic.STDDEV:
|
|
225
|
+
return _turn_inf_to_nan(np.std(maps, axis=0))
|
|
226
|
+
if statistic == SurfaceStatistic.MINIMUM:
|
|
227
|
+
return _turn_inf_to_nan(np.min(maps, axis=0))
|
|
228
|
+
if statistic == SurfaceStatistic.MAXIMUM:
|
|
229
|
+
return _turn_inf_to_nan(np.max(maps, axis=0))
|
|
230
|
+
if statistic == SurfaceStatistic.P10:
|
|
231
|
+
return _turn_inf_to_nan(np.percentile(maps, 10, axis=0))
|
|
232
|
+
if statistic == SurfaceStatistic.P90:
|
|
233
|
+
return _turn_inf_to_nan(np.percentile(maps, 90, axis=0))
|
|
234
|
+
return None
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
def _turn_inf_to_nan(surface: np.ndarray) -> np.ndarray:
|
|
238
|
+
surface[np.where(surface == np.inf)] = np.nan
|
|
239
|
+
return surface
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
from typing import Tuple, Union
|
|
2
|
+
|
|
3
|
+
import pandas as pd
|
|
4
|
+
|
|
5
|
+
from webviz_subsurface._providers import EnsembleTableProvider
|
|
6
|
+
from webviz_subsurface.plugins._co2_leakage._utilities.generic import (
|
|
7
|
+
Co2MassScale,
|
|
8
|
+
Co2VolumeScale,
|
|
9
|
+
MenuOptions,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
_PFLOTRAN_COLNAMES = ("DATE", "FGMDS", "FGMTR", "FGMGP")
|
|
13
|
+
_ECLIPSE_COLNAMES = ("DATE", "FWCD", "FGCDI", "FGCDM")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class UnsmryDataValidationError(Exception):
|
|
17
|
+
pass
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class UnsmryDataProvider:
|
|
21
|
+
def __init__(self, provider: EnsembleTableProvider):
|
|
22
|
+
UnsmryDataProvider._validate(provider)
|
|
23
|
+
self._provider = provider
|
|
24
|
+
(
|
|
25
|
+
self._colname_date,
|
|
26
|
+
self._colname_dissolved,
|
|
27
|
+
self._colname_trapped,
|
|
28
|
+
self._colname_mobile,
|
|
29
|
+
) = UnsmryDataProvider._column_subset_unsmry(provider)
|
|
30
|
+
self._colname_total = "TOTAL"
|
|
31
|
+
|
|
32
|
+
@property
|
|
33
|
+
def menu_options(self) -> MenuOptions:
|
|
34
|
+
return {
|
|
35
|
+
"zones": [],
|
|
36
|
+
"regions": [],
|
|
37
|
+
"phases": ["total", "gas", "dissolved"],
|
|
38
|
+
"plume_groups": [],
|
|
39
|
+
"dates": [],
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
@property
|
|
43
|
+
def colname_date(self) -> str:
|
|
44
|
+
return self._colname_date
|
|
45
|
+
|
|
46
|
+
@property
|
|
47
|
+
def colname_dissolved(self) -> str:
|
|
48
|
+
return self._colname_dissolved
|
|
49
|
+
|
|
50
|
+
@property
|
|
51
|
+
def colname_trapped(self) -> str:
|
|
52
|
+
return self._colname_trapped
|
|
53
|
+
|
|
54
|
+
@property
|
|
55
|
+
def colname_mobile(self) -> str:
|
|
56
|
+
return self._colname_mobile
|
|
57
|
+
|
|
58
|
+
@property
|
|
59
|
+
def colname_total(self) -> str:
|
|
60
|
+
return self._colname_total
|
|
61
|
+
|
|
62
|
+
def extract(self, scale: Union[Co2MassScale, Co2VolumeScale]) -> pd.DataFrame:
|
|
63
|
+
columns = [
|
|
64
|
+
self._colname_date,
|
|
65
|
+
self._colname_dissolved,
|
|
66
|
+
self._colname_trapped,
|
|
67
|
+
self._colname_mobile,
|
|
68
|
+
]
|
|
69
|
+
full = pd.concat(
|
|
70
|
+
[
|
|
71
|
+
self._provider.get_column_data(columns, [real]).assign(realization=real)
|
|
72
|
+
for real in self._provider.realizations()
|
|
73
|
+
]
|
|
74
|
+
)
|
|
75
|
+
full[self._colname_total] = (
|
|
76
|
+
full[self._colname_dissolved]
|
|
77
|
+
+ full[self._colname_trapped]
|
|
78
|
+
+ full[self.colname_mobile]
|
|
79
|
+
)
|
|
80
|
+
total_max = full[self._colname_total].max()
|
|
81
|
+
for col in columns[1:] + [self._colname_total]:
|
|
82
|
+
if scale == Co2MassScale.MTONS:
|
|
83
|
+
full[col] = full[col] / 1e9
|
|
84
|
+
elif scale == Co2MassScale.NORMALIZE:
|
|
85
|
+
full[col] = full[col] / total_max
|
|
86
|
+
return full
|
|
87
|
+
|
|
88
|
+
@staticmethod
|
|
89
|
+
def _column_subset_unsmry(
|
|
90
|
+
provider: EnsembleTableProvider,
|
|
91
|
+
) -> Tuple[str, str, str, str]:
|
|
92
|
+
existing = set(provider.column_names())
|
|
93
|
+
# Try PFLOTRAN names
|
|
94
|
+
if set(_PFLOTRAN_COLNAMES).issubset(existing):
|
|
95
|
+
return _PFLOTRAN_COLNAMES
|
|
96
|
+
# Try Eclipse names
|
|
97
|
+
if set(_ECLIPSE_COLNAMES).issubset(existing):
|
|
98
|
+
return _ECLIPSE_COLNAMES
|
|
99
|
+
raise KeyError(
|
|
100
|
+
f"Could not find suitable data columns among: {', '.join(existing)}"
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
@staticmethod
|
|
104
|
+
def _validate(provider: EnsembleTableProvider) -> None:
|
|
105
|
+
try:
|
|
106
|
+
UnsmryDataProvider._column_subset_unsmry(provider)
|
|
107
|
+
except KeyError as e:
|
|
108
|
+
raise UnsmryDataValidationError from e
|