webviz-subsurface 0.2.36__py3-none-any.whl → 0.2.38__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. webviz_subsurface/__init__.py +1 -1
  2. webviz_subsurface/_components/color_picker.py +1 -1
  3. webviz_subsurface/_datainput/well_completions.py +2 -1
  4. webviz_subsurface/_providers/ensemble_polygon_provider/__init__.py +3 -0
  5. webviz_subsurface/_providers/ensemble_polygon_provider/_polygon_discovery.py +97 -0
  6. webviz_subsurface/_providers/ensemble_polygon_provider/_provider_impl_file.py +226 -0
  7. webviz_subsurface/_providers/ensemble_polygon_provider/ensemble_polygon_provider.py +53 -0
  8. webviz_subsurface/_providers/ensemble_polygon_provider/ensemble_polygon_provider_factory.py +99 -0
  9. webviz_subsurface/_providers/ensemble_polygon_provider/polygon_server.py +125 -0
  10. webviz_subsurface/plugins/_co2_leakage/_plugin.py +577 -293
  11. webviz_subsurface/plugins/_co2_leakage/_types.py +7 -0
  12. webviz_subsurface/plugins/_co2_leakage/_utilities/_misc.py +9 -0
  13. webviz_subsurface/plugins/_co2_leakage/_utilities/callbacks.py +226 -186
  14. webviz_subsurface/plugins/_co2_leakage/_utilities/co2volume.py +591 -128
  15. webviz_subsurface/plugins/_co2_leakage/_utilities/containment_data_provider.py +147 -0
  16. webviz_subsurface/plugins/_co2_leakage/_utilities/containment_info.py +31 -0
  17. webviz_subsurface/plugins/_co2_leakage/_utilities/ensemble_well_picks.py +105 -0
  18. webviz_subsurface/plugins/_co2_leakage/_utilities/generic.py +170 -2
  19. webviz_subsurface/plugins/_co2_leakage/_utilities/initialization.py +199 -97
  20. webviz_subsurface/plugins/_co2_leakage/_utilities/polygon_handler.py +60 -0
  21. webviz_subsurface/plugins/_co2_leakage/_utilities/summary_graphs.py +77 -173
  22. webviz_subsurface/plugins/_co2_leakage/_utilities/surface_publishing.py +122 -21
  23. webviz_subsurface/plugins/_co2_leakage/_utilities/unsmry_data_provider.py +108 -0
  24. webviz_subsurface/plugins/_co2_leakage/views/mainview/mainview.py +44 -19
  25. webviz_subsurface/plugins/_co2_leakage/views/mainview/settings.py +944 -359
  26. {webviz_subsurface-0.2.36.dist-info → webviz_subsurface-0.2.38.dist-info}/METADATA +2 -2
  27. {webviz_subsurface-0.2.36.dist-info → webviz_subsurface-0.2.38.dist-info}/RECORD +33 -20
  28. {webviz_subsurface-0.2.36.dist-info → webviz_subsurface-0.2.38.dist-info}/WHEEL +1 -1
  29. /webviz_subsurface/plugins/_co2_leakage/_utilities/{fault_polygons.py → fault_polygons_handler.py} +0 -0
  30. {webviz_subsurface-0.2.36.dist-info → webviz_subsurface-0.2.38.dist-info}/LICENSE +0 -0
  31. {webviz_subsurface-0.2.36.dist-info → webviz_subsurface-0.2.38.dist-info}/LICENSE.chromedriver +0 -0
  32. {webviz_subsurface-0.2.36.dist-info → webviz_subsurface-0.2.38.dist-info}/entry_points.txt +0 -0
  33. {webviz_subsurface-0.2.36.dist-info → webviz_subsurface-0.2.38.dist-info}/top_level.txt +0 -0
@@ -5,7 +5,7 @@ from typing import Dict, Optional
5
5
  import jsonschema
6
6
  import webviz_config
7
7
  import yaml
8
- from pkg_resources import DistributionNotFound, get_distribution
8
+ from pkg_resources import DistributionNotFound, get_distribution # type: ignore
9
9
 
10
10
  from webviz_subsurface._utils.user_defined_vector_definitions import (
11
11
  USER_DEFINED_VECTOR_DEFINITIONS_JSON_SCHEMA,
@@ -171,7 +171,7 @@ class ColorPicker:
171
171
  if not cell:
172
172
  raise PreventUpdate
173
173
  row_no = cell["row"]
174
- return dash_daq.ColorPicker( # pylint: disable=not-callable
174
+ return dash_daq.ColorPicker(
175
175
  {"id": self._uuid, "element": "picker"},
176
176
  label=f"Color for {[col for col in self._dframe.iloc[row_no] if col != 'COLOR']}",
177
177
  value={"hex": current_color_store[row_no]},
@@ -15,7 +15,8 @@ import pandas as pd
15
15
  # NOTE: Functions in this file cannot be used
16
16
  # on non-Linux OSes.
17
17
  try:
18
- from res2df.resdatafiles import ResdataFiles, common
18
+ from res2df import common
19
+ from res2df.resdatafiles import ResdataFiles
19
20
  except ImportError:
20
21
  pass
21
22
 
@@ -0,0 +1,3 @@
1
+ from .ensemble_polygon_provider import EnsemblePolygonProvider, SimulatedPolygonsAddress
2
+ from .ensemble_polygon_provider_factory import EnsemblePolygonProviderFactory
3
+ from .polygon_server import PolygonsAddress, PolygonServer
@@ -0,0 +1,97 @@
1
+ import glob
2
+ import os
3
+ import re
4
+ from dataclasses import dataclass
5
+ from pathlib import Path
6
+ from typing import Dict, List, Optional
7
+
8
+ # The fmu.ensemble dependency resdata is only available for Linux,
9
+ # hence, ignore any import exception here to make
10
+ # it still possible to use the PvtPlugin on
11
+ # machines with other OSes.
12
+ #
13
+ # NOTE: Functions in this file cannot be used
14
+ # on non-Linux OSes.
15
+ try:
16
+ from fmu.ensemble import ScratchEnsemble
17
+ except ImportError:
18
+ pass
19
+
20
+
21
+ @dataclass(frozen=True)
22
+ class PolygonsFileInfo:
23
+ path: str
24
+ real: int
25
+ name: str
26
+ attribute: str
27
+
28
+
29
+ def _discover_ensemble_realizations_fmu(ens_path: str) -> Dict[int, str]:
30
+ """Returns dict indexed by realization number and with runpath as value"""
31
+ scratch_ensemble = ScratchEnsemble("dummyEnsembleName", paths=ens_path).filter("OK")
32
+ real_dict = {i: r.runpath() for i, r in scratch_ensemble.realizations.items()}
33
+ return real_dict
34
+
35
+
36
+ def _discover_ensemble_realizations(ens_path: str) -> Dict[int, str]:
37
+ # Much faster than FMU impl above, but is it risky?
38
+ # Do we need to check for OK-file?
39
+ real_dict: Dict[int, str] = {}
40
+
41
+ realidxregexp = re.compile(r"realization-(\d+)")
42
+ globbed_real_dirs = sorted(glob.glob(str(ens_path)))
43
+ for real_dir in globbed_real_dirs:
44
+ realnum: Optional[int] = None
45
+ for path_comp in reversed(real_dir.split(os.path.sep)):
46
+ realmatch = re.match(realidxregexp, path_comp)
47
+ if realmatch:
48
+ realnum = int(realmatch.group(1))
49
+ break
50
+
51
+ if realnum is not None:
52
+ real_dict[realnum] = real_dir
53
+
54
+ return real_dict
55
+
56
+
57
+ @dataclass(frozen=True)
58
+ class PolygonsIdent:
59
+ name: str
60
+ attribute: str
61
+
62
+
63
+ def _polygons_ident_from_filename(filename: str) -> Optional[PolygonsIdent]:
64
+ """Split the stem part of the fault polygons filename into fault polygons name and attribute"""
65
+ delimiter: str = "--"
66
+ parts = Path(filename).stem.split(delimiter)
67
+ if len(parts) != 2:
68
+ return None
69
+
70
+ return PolygonsIdent(name=parts[0], attribute=parts[1])
71
+
72
+
73
+ def discover_per_realization_polygons_files(
74
+ ens_path: str,
75
+ polygons_pattern: str,
76
+ ) -> List[PolygonsFileInfo]:
77
+ polygons_files: List[PolygonsFileInfo] = []
78
+
79
+ real_dict = _discover_ensemble_realizations_fmu(ens_path)
80
+ for realnum, runpath in sorted(real_dict.items()):
81
+ if Path(polygons_pattern).is_absolute():
82
+ filenames = [polygons_pattern]
83
+ else:
84
+ filenames = glob.glob(str(Path(runpath) / polygons_pattern))
85
+ for polygons_filename in sorted(filenames):
86
+ polygons_ident = _polygons_ident_from_filename(polygons_filename)
87
+ if polygons_ident:
88
+ polygons_files.append(
89
+ PolygonsFileInfo(
90
+ path=polygons_filename,
91
+ real=realnum,
92
+ name=polygons_ident.name,
93
+ attribute=polygons_ident.attribute,
94
+ )
95
+ )
96
+
97
+ return polygons_files
@@ -0,0 +1,226 @@
1
+ import logging
2
+ import shutil
3
+ from pathlib import Path
4
+ from typing import List, Optional
5
+
6
+ import pandas as pd
7
+ import xtgeo
8
+
9
+ from webviz_subsurface._utils.enum_shim import StrEnum
10
+ from webviz_subsurface._utils.perf_timer import PerfTimer
11
+
12
+ from ._polygon_discovery import PolygonsFileInfo
13
+ from .ensemble_polygon_provider import (
14
+ EnsemblePolygonProvider,
15
+ PolygonsAddress,
16
+ SimulatedPolygonsAddress,
17
+ )
18
+
19
+ LOGGER = logging.getLogger(__name__)
20
+
21
+ REL_SIM_DIR = "sim"
22
+
23
+
24
+ # pylint: disable=too-few-public-methods
25
+ class Col:
26
+ TYPE = "type"
27
+ REAL = "real"
28
+ ATTRIBUTE = "attribute"
29
+ NAME = "name"
30
+ ORIGINAL_PATH = "original_path"
31
+ REL_PATH = "rel_path"
32
+
33
+
34
+ class PolygonType(StrEnum):
35
+ SIMULATED = "simulated"
36
+ HAZARDUOUS_BOUNDARY = "hazarduous_boundary"
37
+ CONTAINMENT_BOUNDARY = "containment_boundary"
38
+
39
+
40
+ class ProviderImplFile(EnsemblePolygonProvider):
41
+ def __init__(
42
+ self,
43
+ provider_id: str,
44
+ provider_dir: Path,
45
+ polygon_inventory_df: pd.DataFrame,
46
+ ) -> None:
47
+ self._provider_id = provider_id
48
+ self._provider_dir = provider_dir
49
+ self._inventory_df = polygon_inventory_df
50
+
51
+ @staticmethod
52
+ # pylint: disable=too-many-locals
53
+ def write_backing_store(
54
+ storage_dir: Path,
55
+ storage_key: str,
56
+ sim_polygons: List[PolygonsFileInfo],
57
+ ) -> None:
58
+ timer = PerfTimer()
59
+
60
+ # All data for this provider will be stored inside a sub-directory
61
+ # given by the storage key
62
+ provider_dir = storage_dir / storage_key
63
+ LOGGER.debug(f"Writing polygon backing store to: {provider_dir}")
64
+ provider_dir.mkdir(parents=True, exist_ok=True)
65
+ (provider_dir / REL_SIM_DIR).mkdir(parents=True, exist_ok=True)
66
+
67
+ type_arr: List[PolygonType] = []
68
+ real_arr: List[int] = []
69
+ attribute_arr: List[str] = []
70
+ name_arr: List[str] = []
71
+ rel_path_arr: List[str] = []
72
+ original_path_arr: List[str] = []
73
+
74
+ for polygon_info in sim_polygons:
75
+ rel_path_in_store = _compose_rel_sim_polygons_path(
76
+ real=polygon_info.real,
77
+ attribute=polygon_info.attribute,
78
+ name=polygon_info.name,
79
+ extension=Path(polygon_info.path).suffix,
80
+ )
81
+ type_arr.append(PolygonType.SIMULATED)
82
+ real_arr.append(polygon_info.real)
83
+ attribute_arr.append(polygon_info.attribute)
84
+ name_arr.append(polygon_info.name)
85
+ rel_path_arr.append(str(rel_path_in_store))
86
+ original_path_arr.append(polygon_info.path)
87
+
88
+ LOGGER.debug(f"Copying {len(original_path_arr)} polygons into backing store...")
89
+ timer.lap_s()
90
+ _copy_polygons_into_provider_dir(original_path_arr, rel_path_arr, provider_dir)
91
+ et_copy_s = timer.lap_s()
92
+
93
+ polygons_inventory_df = pd.DataFrame(
94
+ {
95
+ Col.TYPE: type_arr,
96
+ Col.REAL: real_arr,
97
+ Col.ATTRIBUTE: attribute_arr,
98
+ Col.NAME: name_arr,
99
+ Col.REL_PATH: rel_path_arr,
100
+ Col.ORIGINAL_PATH: original_path_arr,
101
+ }
102
+ )
103
+
104
+ parquet_file_name = provider_dir / "polygons_inventory.parquet"
105
+ polygons_inventory_df.to_parquet(path=parquet_file_name)
106
+
107
+ LOGGER.debug(
108
+ f"Wrote polygon backing store in: {timer.elapsed_s():.2f}s ("
109
+ f"copy={et_copy_s:.2f}s)"
110
+ )
111
+
112
+ @staticmethod
113
+ def from_backing_store(
114
+ storage_dir: Path,
115
+ storage_key: str,
116
+ ) -> Optional["ProviderImplFile"]:
117
+ provider_dir = storage_dir / storage_key
118
+ parquet_file_name = provider_dir / "polygons_inventory.parquet"
119
+
120
+ try:
121
+ polygons_inventory_df = pd.read_parquet(path=parquet_file_name)
122
+ return ProviderImplFile(storage_key, provider_dir, polygons_inventory_df)
123
+ except FileNotFoundError:
124
+ return None
125
+
126
+ def provider_id(self) -> str:
127
+ return self._provider_id
128
+
129
+ def attributes(self) -> List[str]:
130
+ return sorted(list(self._inventory_df[Col.ATTRIBUTE].unique()))
131
+
132
+ def fault_polygons_names_for_attribute(self, polygons_attribute: str) -> List[str]:
133
+ return sorted(
134
+ list(
135
+ self._inventory_df.loc[
136
+ self._inventory_df[Col.ATTRIBUTE] == polygons_attribute
137
+ ][Col.NAME].unique()
138
+ )
139
+ )
140
+
141
+ def realizations(self) -> List[int]:
142
+ unique_reals = self._inventory_df[Col.REAL].unique()
143
+
144
+ # Sort and strip out any entries with real == -1
145
+ return sorted([r for r in unique_reals if r >= 0])
146
+
147
+ def get_polygons(
148
+ self,
149
+ address: PolygonsAddress,
150
+ ) -> Optional[xtgeo.Polygons]:
151
+ if isinstance(address, SimulatedPolygonsAddress):
152
+ return self._get_simulated_polygons(address)
153
+
154
+ raise TypeError("Unknown type of fault polygons address")
155
+
156
+ def _get_simulated_polygons(
157
+ self, address: SimulatedPolygonsAddress
158
+ ) -> Optional[xtgeo.Polygons]:
159
+ """Returns a Xtgeo fault polygons instance of a single realization fault polygons"""
160
+
161
+ timer = PerfTimer()
162
+
163
+ polygons_fns: List[Path] = self._locate_simulated_polygons(
164
+ attribute=address.attribute,
165
+ name=address.name,
166
+ realizations=[address.realization],
167
+ )
168
+
169
+ if len(polygons_fns) == 0:
170
+ LOGGER.warning(f"No simulated polygons found for {address}")
171
+ return None
172
+ if len(polygons_fns) > 1:
173
+ LOGGER.warning(
174
+ f"Multiple simulated polygonss found for: {address}"
175
+ "Returning first fault polygons."
176
+ )
177
+
178
+ if polygons_fns[0].suffix == ".csv":
179
+ polygons = xtgeo.Polygons(pd.read_csv(polygons_fns[0]))
180
+ else:
181
+ polygons = xtgeo.polygons_from_file(polygons_fns[0])
182
+
183
+ LOGGER.debug(f"Loaded simulated fault polygons in: {timer.elapsed_s():.2f}s")
184
+
185
+ return polygons
186
+
187
+ def _locate_simulated_polygons(
188
+ self, attribute: str, name: str, realizations: List[int]
189
+ ) -> List[Path]:
190
+ """Returns list of file names matching the specified filter criteria"""
191
+ df = self._inventory_df.loc[
192
+ self._inventory_df[Col.TYPE] == PolygonType.SIMULATED
193
+ ]
194
+
195
+ df = df.loc[
196
+ (df[Col.ATTRIBUTE] == attribute)
197
+ & (df[Col.NAME] == name)
198
+ & (df[Col.REAL].isin(realizations))
199
+ ]
200
+
201
+ return [self._provider_dir / rel_path for rel_path in df[Col.REL_PATH]]
202
+
203
+
204
+ def _copy_polygons_into_provider_dir(
205
+ original_path_arr: List[str],
206
+ rel_path_arr: List[str],
207
+ provider_dir: Path,
208
+ ) -> None:
209
+ for src_path, dst_rel_path in zip(original_path_arr, rel_path_arr):
210
+ # LOGGER.debug(f"copying fault polygons from: {src_path}")
211
+ shutil.copyfile(src_path, provider_dir / dst_rel_path)
212
+
213
+ # full_dst_path_arr = [storage_dir / dst_rel_path for dst_rel_path in store_path_arr]
214
+ # with ProcessPoolExecutor() as executor:
215
+ # executor.map(shutil.copyfile, original_path_arr, full_dst_path_arr)
216
+
217
+
218
+ def _compose_rel_sim_polygons_path(
219
+ real: int,
220
+ attribute: str,
221
+ name: str,
222
+ extension: str,
223
+ ) -> Path:
224
+ """Compose path to simulated fault polygons file, relative to provider's directory"""
225
+ fname = f"{real}--{name}--{attribute}{extension}"
226
+ return Path(REL_SIM_DIR) / fname
@@ -0,0 +1,53 @@
1
+ import abc
2
+ from dataclasses import dataclass
3
+ from typing import List, Optional
4
+
5
+ import xtgeo
6
+
7
+
8
+ @dataclass(frozen=True)
9
+ class SimulatedPolygonsAddress:
10
+ """Specifies a unique simulated polygon set for a given ensemble realization"""
11
+
12
+ attribute: str
13
+ name: str
14
+ realization: int
15
+
16
+
17
+ # Type aliases used for signature readability
18
+ PolygonsAddress = SimulatedPolygonsAddress
19
+
20
+
21
+ # Class provides data for ensemble surfaces
22
+ class EnsemblePolygonProvider(abc.ABC):
23
+ @abc.abstractmethod
24
+ def provider_id(self) -> str:
25
+ """Returns string ID of the provider."""
26
+
27
+ @abc.abstractmethod
28
+ def attributes(self) -> List[str]:
29
+ """Returns list of all available attributes."""
30
+
31
+ @abc.abstractmethod
32
+ def realizations(self) -> List[int]:
33
+ """Returns list of all available realizations."""
34
+
35
+ @abc.abstractmethod
36
+ def get_polygons(
37
+ self,
38
+ address: PolygonsAddress,
39
+ ) -> Optional[xtgeo.Polygons]:
40
+ """Returns fault polygons for a given fault polygons address"""
41
+
42
+ # @abc.abstractmethod
43
+ # def get_surface_bounds(self, surface: EnsembleSurfaceContext) -> List[float]:
44
+ # """Returns the bounds for a surface [xmin,ymin, xmax,ymax]"""
45
+
46
+ # @abc.abstractmethod
47
+ # def get_surface_value_range(self, surface: EnsembleSurfaceContext) -> List[float]:
48
+ # """Returns the value range for a given surface context [zmin, zmax]"""
49
+
50
+ # @abc.abstractmethod
51
+ # def get_surface_as_rgba(self, surface: EnsembleSurfaceContext) -> io.BytesIO:
52
+ # """Returns surface as a greyscale png RGBA with encoded elevation values
53
+ # in a bytestream"""
@@ -0,0 +1,99 @@
1
+ import hashlib
2
+ import logging
3
+ import os
4
+ from pathlib import Path
5
+
6
+ from webviz_config.webviz_factory import WebvizFactory
7
+ from webviz_config.webviz_factory_registry import WEBVIZ_FACTORY_REGISTRY
8
+ from webviz_config.webviz_instance_info import WebvizRunMode
9
+
10
+ from webviz_subsurface._utils.perf_timer import PerfTimer
11
+
12
+ from ._polygon_discovery import discover_per_realization_polygons_files
13
+ from ._provider_impl_file import ProviderImplFile
14
+ from .ensemble_polygon_provider import EnsemblePolygonProvider
15
+
16
+ LOGGER = logging.getLogger(__name__)
17
+
18
+
19
+ class EnsemblePolygonProviderFactory(WebvizFactory):
20
+ def __init__(self, root_storage_folder: Path, allow_storage_writes: bool) -> None:
21
+ self._storage_dir = Path(root_storage_folder) / __name__
22
+ self._allow_storage_writes = allow_storage_writes
23
+
24
+ LOGGER.info(
25
+ f"EnsemblePolygonProviderFactory init: storage_dir={self._storage_dir}"
26
+ )
27
+
28
+ if self._allow_storage_writes:
29
+ os.makedirs(self._storage_dir, exist_ok=True)
30
+
31
+ @staticmethod
32
+ def instance() -> "EnsemblePolygonProviderFactory":
33
+ """Static method to access the singleton instance of the factory."""
34
+
35
+ factory = WEBVIZ_FACTORY_REGISTRY.get_factory(EnsemblePolygonProviderFactory)
36
+ if not factory:
37
+ app_instance_info = WEBVIZ_FACTORY_REGISTRY.app_instance_info
38
+ storage_folder = app_instance_info.storage_folder
39
+ allow_writes = app_instance_info.run_mode != WebvizRunMode.PORTABLE
40
+
41
+ factory = EnsemblePolygonProviderFactory(storage_folder, allow_writes)
42
+
43
+ # Store the factory object in the global factory registry
44
+ WEBVIZ_FACTORY_REGISTRY.set_factory(EnsemblePolygonProviderFactory, factory)
45
+
46
+ return factory
47
+
48
+ def create_from_ensemble_polygon_files(
49
+ self,
50
+ ens_path: str,
51
+ polygon_path_pattern: str,
52
+ ) -> EnsemblePolygonProvider:
53
+ timer = PerfTimer()
54
+
55
+ storage_key = f"ens__{_make_hash_string(ens_path)}"
56
+ provider = ProviderImplFile.from_backing_store(self._storage_dir, storage_key)
57
+ if provider:
58
+ LOGGER.info(
59
+ f"Loaded polygon provider from backing store in {timer.elapsed_s():.2f}s ("
60
+ f"ens_path={ens_path})"
61
+ )
62
+ return provider
63
+
64
+ # We can only import data from data source if storage writes are allowed
65
+ if not self._allow_storage_writes:
66
+ raise ValueError(f"Failed to load polygon provider for {ens_path}")
67
+
68
+ LOGGER.info(f"Importing/copying polygon data for: {ens_path}")
69
+
70
+ timer.lap_s()
71
+ sim_polygons_files = discover_per_realization_polygons_files(
72
+ ens_path,
73
+ polygon_path_pattern,
74
+ )
75
+
76
+ et_discover_s = timer.lap_s()
77
+
78
+ ProviderImplFile.write_backing_store(
79
+ self._storage_dir,
80
+ storage_key,
81
+ sim_polygons=sim_polygons_files,
82
+ )
83
+ et_write_s = timer.lap_s()
84
+
85
+ provider = ProviderImplFile.from_backing_store(self._storage_dir, storage_key)
86
+ if not provider:
87
+ raise ValueError(f"Failed to load/create polygon provider for {ens_path}")
88
+
89
+ LOGGER.info(
90
+ f"Saved polygon provider to backing store in {timer.elapsed_s():.2f}s ("
91
+ f"discover={et_discover_s:.2f}s, write={et_write_s:.2f}s, ens_path={ens_path})"
92
+ )
93
+
94
+ return provider
95
+
96
+
97
+ def _make_hash_string(string_to_hash: str) -> str:
98
+ # There is no security risk here and chances of collision should be very slim
99
+ return hashlib.md5(string_to_hash.encode()).hexdigest() # nosec
@@ -0,0 +1,125 @@
1
+ import json
2
+ import logging
3
+ from dataclasses import asdict, dataclass
4
+ from typing import Dict, Optional
5
+ from urllib.parse import quote
6
+
7
+ import flask
8
+ import geojson
9
+ import xtgeo
10
+ from dash import Dash
11
+
12
+ from .ensemble_polygon_provider import EnsemblePolygonProvider, PolygonsAddress
13
+
14
+ LOGGER = logging.getLogger(__name__)
15
+
16
+ _ROOT_URL_PATH = "/PolygonServer"
17
+
18
+ _POLYGONS_SERVER_INSTANCE: Optional["PolygonServer"] = None
19
+
20
+
21
+ @dataclass(frozen=True)
22
+ class QualifiedAddress:
23
+ provider_id: str
24
+ address: PolygonsAddress
25
+
26
+
27
+ class PolygonServer:
28
+ def __init__(self, app: Dash) -> None:
29
+ self._setup_url_rule(app)
30
+ self._id_to_provider_dict: Dict[str, EnsemblePolygonProvider] = {}
31
+
32
+ @staticmethod
33
+ def instance(app: Dash) -> "PolygonServer":
34
+ # pylint: disable=global-statement
35
+ global _POLYGONS_SERVER_INSTANCE
36
+ if not _POLYGONS_SERVER_INSTANCE:
37
+ LOGGER.debug("Initializing PolygonServer instance")
38
+ _POLYGONS_SERVER_INSTANCE = PolygonServer(app)
39
+
40
+ return _POLYGONS_SERVER_INSTANCE
41
+
42
+ def add_provider(self, provider: EnsemblePolygonProvider) -> None:
43
+ provider_id = provider.provider_id()
44
+ LOGGER.debug(f"Adding provider with id={provider_id}")
45
+
46
+ existing_provider = self._id_to_provider_dict.get(provider_id)
47
+ if existing_provider:
48
+ # Issue a warning if there already is a provider registered with the same
49
+ # id AND if the actual provider instance is different.
50
+ # This should not be a problem, but will happen until the provider factory
51
+ # gets caching.
52
+ if existing_provider is not provider:
53
+ LOGGER.warning(
54
+ f"Provider with id={provider_id} ignored, the id is already present"
55
+ )
56
+ return
57
+
58
+ self._id_to_provider_dict[provider_id] = provider
59
+
60
+ def encode_partial_url(
61
+ self,
62
+ provider_id: str,
63
+ polygons_address: PolygonsAddress,
64
+ ) -> str:
65
+ if not provider_id in self._id_to_provider_dict:
66
+ raise ValueError("Could not find provider")
67
+
68
+ url_path: str = (
69
+ f"{_ROOT_URL_PATH}/{quote(provider_id)}"
70
+ f"/{quote(json.dumps(asdict(polygons_address)))}"
71
+ )
72
+
73
+ return url_path
74
+
75
+ def _setup_url_rule(self, app: Dash) -> None:
76
+ @app.server.route(_ROOT_URL_PATH + "/<provider_id>/<polygons_address>")
77
+ def _handle_polygons_request(
78
+ provider_id: str,
79
+ polygons_address: str,
80
+ ) -> flask.Response:
81
+ LOGGER.debug(
82
+ f"Handling polygons_request: "
83
+ f"full_polygons_address={polygons_address} "
84
+ )
85
+
86
+ polygons_geojson = None
87
+ # try:
88
+
89
+ address = PolygonsAddress(**json.loads(polygons_address))
90
+ provider = self._id_to_provider_dict[provider_id]
91
+ polygons = provider.get_polygons(address)
92
+ if polygons is not None:
93
+ polygons_geojson = _create_polygons_geojson(
94
+ polygons=polygons,
95
+ )
96
+
97
+ # except Exception as e:
98
+ # LOGGER.error("Error decoding polygons address")
99
+ # print(e)
100
+ # # flask.abort(404)
101
+ featurecoll = (
102
+ polygons_geojson
103
+ if polygons_geojson is not None
104
+ else {
105
+ "type": "FeatureCollection",
106
+ "features": [],
107
+ }
108
+ )
109
+
110
+ return flask.Response(
111
+ geojson.dumps(featurecoll), mimetype="application/geo+json"
112
+ )
113
+
114
+
115
+ def _create_polygons_geojson(polygons: xtgeo.Polygons) -> Dict:
116
+ feature_arr = []
117
+ prop_style = {"color": [0, 0, 0, 255]}
118
+ for name, polygon in polygons.dataframe.groupby("POLY_ID"):
119
+ coords = [list(zip(polygon.X_UTME, polygon.Y_UTMN))]
120
+ feature = geojson.Feature(
121
+ geometry=geojson.Polygon(coords),
122
+ properties={"name": f"id:{name}", **prop_style},
123
+ )
124
+ feature_arr.append(feature)
125
+ return geojson.FeatureCollection(features=feature_arr)