webviz-subsurface 0.2.37__py3-none-any.whl → 0.2.39__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. tests/unit_tests/plugin_tests/test_tornado_data.py +10 -1
  2. webviz_subsurface/_components/tornado/_tornado_bar_chart.py +31 -11
  3. webviz_subsurface/_components/tornado/_tornado_data.py +20 -2
  4. webviz_subsurface/_datainput/well_completions.py +2 -1
  5. webviz_subsurface/_providers/ensemble_table_provider/ensemble_table_provider_factory.py +4 -0
  6. webviz_subsurface/_utils/design_matrix.py +36 -0
  7. webviz_subsurface/plugins/_co2_leakage/_plugin.py +623 -493
  8. webviz_subsurface/plugins/_co2_leakage/_types.py +7 -0
  9. webviz_subsurface/plugins/_co2_leakage/_utilities/callbacks.py +96 -52
  10. webviz_subsurface/plugins/_co2_leakage/_utilities/co2volume.py +300 -82
  11. webviz_subsurface/plugins/_co2_leakage/_utilities/containment_info.py +31 -0
  12. webviz_subsurface/plugins/_co2_leakage/_utilities/initialization.py +16 -7
  13. webviz_subsurface/plugins/_co2_leakage/_utilities/surface_publishing.py +102 -9
  14. webviz_subsurface/plugins/_co2_leakage/views/mainview/mainview.py +14 -1
  15. webviz_subsurface/plugins/_co2_leakage/views/mainview/settings.py +181 -58
  16. webviz_subsurface/plugins/_parameter_analysis/_types.py +1 -0
  17. webviz_subsurface/plugins/_parameter_analysis/_utils/_parameters_model.py +10 -2
  18. webviz_subsurface/plugins/_parameter_analysis/_views/_parameter_distributions_view/_settings/_visualization_type.py +2 -1
  19. {webviz_subsurface-0.2.37.dist-info → webviz_subsurface-0.2.39.dist-info}/METADATA +1 -1
  20. {webviz_subsurface-0.2.37.dist-info → webviz_subsurface-0.2.39.dist-info}/RECORD +25 -22
  21. {webviz_subsurface-0.2.37.dist-info → webviz_subsurface-0.2.39.dist-info}/LICENSE +0 -0
  22. {webviz_subsurface-0.2.37.dist-info → webviz_subsurface-0.2.39.dist-info}/LICENSE.chromedriver +0 -0
  23. {webviz_subsurface-0.2.37.dist-info → webviz_subsurface-0.2.39.dist-info}/WHEEL +0 -0
  24. {webviz_subsurface-0.2.37.dist-info → webviz_subsurface-0.2.39.dist-info}/entry_points.txt +0 -0
  25. {webviz_subsurface-0.2.37.dist-info → webviz_subsurface-0.2.39.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,31 @@
1
+ from dataclasses import dataclass
2
+ from typing import List, Optional
3
+
4
+ from webviz_subsurface._utils.enum_shim import StrEnum
5
+
6
+
7
+ class StatisticsTabOption(StrEnum):
8
+ PROBABILITY_PLOT = "probability-plot"
9
+ BOX_PLOT = "box-plot"
10
+
11
+
12
+ # pylint: disable=too-many-instance-attributes
13
+ @dataclass(frozen=True) # NBNB-AS: Removed slots=True (python>=3.10)
14
+ class ContainmentInfo:
15
+ zone: Optional[str]
16
+ region: Optional[str]
17
+ zones: List[str]
18
+ regions: Optional[List[str]]
19
+ phase: Optional[str]
20
+ containment: Optional[str]
21
+ plume_group: Optional[str]
22
+ color_choice: str
23
+ mark_choice: str
24
+ sorting: str
25
+ phases: List[str]
26
+ containments: List[str]
27
+ plume_groups: List[str]
28
+ use_stats: bool
29
+ date_option: str
30
+ statistics_tab_option: StatisticsTabOption
31
+ box_show_points: str
@@ -77,14 +77,21 @@ def build_mapping(
77
77
  def init_map_attribute_names(
78
78
  webviz_settings: WebvizSettings,
79
79
  ensembles: List[str],
80
- mapping: Optional[Dict[str, str]],
80
+ input_mapping: Optional[Dict[str, str]],
81
81
  ) -> FilteredMapAttribute:
82
- if mapping is None:
83
- # Based on name convention of xtgeoapp_grd3dmaps:
84
- mapping = build_mapping(webviz_settings, ensembles)
82
+ default_mapping = build_mapping(webviz_settings, ensembles)
83
+ final_mapping = dict(default_mapping)
84
+ if input_mapping is not None:
85
+ for key, value in input_mapping.items():
86
+ if key in final_mapping and final_mapping[key] != value:
87
+ LOGGER.info(
88
+ f"Conflict on attribute '{key}': prioritizing '{value}' (from input attributes)"
89
+ f" over '{final_mapping[key]}' (from default attributes)"
90
+ )
91
+ final_mapping[key] = value
85
92
  final_attributes = {
86
93
  (MapAttribute[key].value if key in MapAttribute.__members__ else key): value
87
- for key, value in mapping.items()
94
+ for key, value in final_mapping.items()
88
95
  }
89
96
  return FilteredMapAttribute(final_attributes)
90
97
 
@@ -124,7 +131,7 @@ def init_polygon_provider_handlers(
124
131
  options: Optional[BoundarySettings],
125
132
  ) -> Dict[str, PolygonHandler]:
126
133
  filled_options: BoundarySettings = {
127
- "polygon_file_pattern": "share/results/polygon/*.csv",
134
+ "polygon_file_pattern": "share/results/polygons/*.csv",
128
135
  "attribute": "boundary",
129
136
  "hazardous_name": "hazardous",
130
137
  "containment_name": "containment",
@@ -157,8 +164,10 @@ def init_unsmry_data_providers(
157
164
 
158
165
  def init_containment_data_providers(
159
166
  ensemble_roots: Dict[str, str],
160
- table_rel_path: str,
167
+ table_rel_path: Optional[str],
161
168
  ) -> Dict[str, ContainmentDataProvider]:
169
+ if table_rel_path is None:
170
+ return {}
162
171
  factory = EnsembleTableProviderFactory.instance()
163
172
  providers = {
164
173
  ens: _init_ensemble_table_provider(factory, ens, ens_path, table_rel_path)
@@ -54,6 +54,26 @@ def publish_and_get_surface_metadata(
54
54
  *_publish_and_get_truncated_surface_metadata(server, provider, address),
55
55
  None,
56
56
  )
57
+ address_map_attribute = next(
58
+ (
59
+ key
60
+ for key, value in map_attribute_names.filtered_values.items()
61
+ if value == address.attribute
62
+ ),
63
+ None,
64
+ )
65
+ assert address_map_attribute is not None
66
+ if MapType[address_map_attribute.name].value == "MIGRATION_TIME" and isinstance(
67
+ address, StatisticalSurfaceAddress
68
+ ):
69
+ return (
70
+ *_publish_and_get_statistical_time_surface_metadata(
71
+ server,
72
+ provider,
73
+ address,
74
+ ),
75
+ None,
76
+ )
57
77
  provider_id: str = provider.provider_id()
58
78
  qualified_address = QualifiedSurfaceAddress(provider_id, address)
59
79
  surf_meta = server.get_surface_metadata(qualified_address)
@@ -67,15 +87,7 @@ def publish_and_get_surface_metadata(
67
87
  if not surface:
68
88
  warnings.warn(f"Could not find surface file with properties: {address}")
69
89
  return None, None, None
70
- address_map_attribute = next(
71
- (
72
- key
73
- for key, value in map_attribute_names.filtered_values.items()
74
- if value == address.attribute
75
- ),
76
- None,
77
- )
78
- assert address_map_attribute is not None
90
+
79
91
  if MapType[address_map_attribute.name].value == "MASS":
80
92
  surface.values = surface.values / SCALE_DICT[visualization_info["unit"]]
81
93
  summed_mass = np.ma.sum(surface.values)
@@ -144,3 +156,84 @@ def _generate_surface(
144
156
  template.values = plume_count
145
157
  template.values.mask = plume_count < 1e-4 # type: ignore
146
158
  return template
159
+
160
+
161
+ def _publish_and_get_statistical_time_surface_metadata(
162
+ server: SurfaceImageServer,
163
+ provider: EnsembleSurfaceProvider,
164
+ address: StatisticalSurfaceAddress,
165
+ ) -> Tuple[Optional[SurfaceImageMeta], str]:
166
+ qualified_address = QualifiedSurfaceAddress(
167
+ provider.provider_id(),
168
+ StatisticalSurfaceAddress(
169
+ address.attribute,
170
+ address.name,
171
+ address.datestr,
172
+ address.statistic,
173
+ address.realizations,
174
+ ),
175
+ )
176
+ surf_meta = server.get_surface_metadata(qualified_address)
177
+ if surf_meta is None:
178
+ surface = _generate_statisical_time_surface(provider, address)
179
+ if surface is None:
180
+ raise ValueError(f"Could not generate surface for address: {address}")
181
+ server.publish_surface(qualified_address, surface)
182
+ surf_meta = server.get_surface_metadata(qualified_address)
183
+ return surf_meta, server.encode_partial_url(qualified_address)
184
+
185
+
186
+ def _generate_statisical_time_surface(
187
+ provider: EnsembleSurfaceProvider,
188
+ address: StatisticalSurfaceAddress,
189
+ ) -> Optional[xtgeo.RegularSurface]:
190
+ surfaces = [
191
+ provider.get_surface(
192
+ SimulatedSurfaceAddress(
193
+ attribute=address.attribute,
194
+ name=address.name,
195
+ datestr=address.datestr,
196
+ realization=r,
197
+ )
198
+ )
199
+ for r in address.realizations
200
+ ]
201
+ surfaces = [s for s in surfaces if s is not None]
202
+ if len(surfaces) == 0:
203
+ return None
204
+ statistical_map = _statistics_on_time_map(surfaces, address.statistic)
205
+ if statistical_map is None:
206
+ return None
207
+ template: xtgeo.RegularSurface = surfaces[0].copy() # type: ignore
208
+ template.values = statistical_map
209
+ return template
210
+
211
+
212
+ # pylint: disable=too-many-return-statements
213
+ def _statistics_on_time_map(
214
+ surfaces: List[xtgeo.RegularSurface],
215
+ statistic: SurfaceStatistic,
216
+ ) -> Optional[np.ndarray]:
217
+ maps = np.zeros((len(surfaces), *surfaces[0].values.shape))
218
+ for i, surface in enumerate(surfaces):
219
+ maps[i, :, :] = surface.values
220
+ masked = np.where(surface.values.mask)
221
+ maps[i, masked[0], masked[1]] = np.inf
222
+ if statistic == SurfaceStatistic.MEAN:
223
+ return _turn_inf_to_nan(np.mean(maps, axis=0))
224
+ if statistic == SurfaceStatistic.STDDEV:
225
+ return _turn_inf_to_nan(np.std(maps, axis=0))
226
+ if statistic == SurfaceStatistic.MINIMUM:
227
+ return _turn_inf_to_nan(np.min(maps, axis=0))
228
+ if statistic == SurfaceStatistic.MAXIMUM:
229
+ return _turn_inf_to_nan(np.max(maps, axis=0))
230
+ if statistic == SurfaceStatistic.P10:
231
+ return _turn_inf_to_nan(np.percentile(maps, 10, axis=0))
232
+ if statistic == SurfaceStatistic.P90:
233
+ return _turn_inf_to_nan(np.percentile(maps, 90, axis=0))
234
+ return None
235
+
236
+
237
+ def _turn_inf_to_nan(surface: np.ndarray) -> np.ndarray:
238
+ surface[np.where(surface == np.inf)] = np.nan
239
+ return surface
@@ -2,12 +2,14 @@ from typing import Any, Dict, List
2
2
 
3
3
  import plotly.graph_objects as go
4
4
  import webviz_core_components as wcc
5
- from dash import html
5
+ from dash import dcc, html
6
6
  from dash.development.base_component import Component
7
7
  from webviz_config.utils import StrEnum
8
8
  from webviz_config.webviz_plugin_subclasses import ViewABC, ViewElementABC
9
9
  from webviz_subsurface_components import SubsurfaceViewer
10
10
 
11
+ from webviz_subsurface.plugins._co2_leakage._types import LegendData
12
+
11
13
 
12
14
  class MainView(ViewABC):
13
15
  class Ids(StrEnum):
@@ -32,6 +34,7 @@ class MapViewElement(ViewElementABC):
32
34
  SIZE_SLIDER = "size-slider"
33
35
  TOP_ELEMENT = "top-element"
34
36
  BOTTOM_ELEMENT = "bottom-element"
37
+ LEGEND_DATA_STORE = "legend-data-store"
35
38
 
36
39
  def __init__(
37
40
  self, color_scales: List[Dict[str, Any]], content: Dict[str, bool]
@@ -107,6 +110,16 @@ class MapViewElement(ViewElementABC):
107
110
  ],
108
111
  )
109
112
  )
113
+ layout_elements.append(
114
+ dcc.Store(
115
+ id=self.register_component_unique_id(self.Ids.LEGEND_DATA_STORE),
116
+ data=LegendData(
117
+ bar_legendonly=None,
118
+ time_legendonly=None,
119
+ stats_legendonly=None,
120
+ ),
121
+ )
122
+ )
110
123
  if self._content["maps"] and self._content["any_table"]:
111
124
  layout_elements.append(
112
125
  html.Div(