fmu-sumo 2.3.7__py3-none-any.whl → 2.3.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,19 +1,28 @@
1
+ from __future__ import annotations
2
+
3
+ import json
1
4
  import uuid
2
5
  import warnings
3
6
  from datetime import datetime
4
7
  from io import BytesIO
5
- from typing import Dict, List, Tuple
8
+ from typing import TYPE_CHECKING, Any, Dict, List, Tuple, Union
6
9
 
7
10
  import deprecation
8
11
  import httpx
9
- from sumo.wrapper import SumoClient
10
12
 
11
- import fmu.sumo.explorer.objects as objects
13
+ from fmu.sumo.explorer import objects
12
14
  from fmu.sumo.explorer.cache import LRUCache
13
15
 
16
+ if TYPE_CHECKING:
17
+ from sumo.wrapper import SumoClient
18
+
19
+
20
+ # Type aliases
21
+ SelectArg = Union[bool, str, Dict[str, Union[str, List[str]]], List[str]]
22
+
14
23
 
15
24
  def _gen_filter_none():
16
- def _fn(value):
25
+ def _fn(_):
17
26
  return None, None
18
27
 
19
28
  return _fn
@@ -128,7 +137,11 @@ _filterspec = {
128
137
  "relative_path": [_gen_filter_gen, "file.relative_path.keyword"],
129
138
  "tagname": [_gen_filter_gen, "data.tagname.keyword"],
130
139
  "dataformat": [_gen_filter_gen, "data.format.keyword"],
131
- "iteration": [_gen_filter_gen, "fmu.iteration.name.keyword"],
140
+ "iteration": [
141
+ _gen_filter_gen,
142
+ "fmu.iteration.name.keyword",
143
+ ], # FIXME: to be removed
144
+ "ensemble": [_gen_filter_gen, "fmu.ensemble.name.keyword"],
132
145
  "realization": [_gen_filter_gen, "fmu.realization.id"],
133
146
  "aggregation": [_gen_filter_gen, "fmu.aggregation.operation.keyword"],
134
147
  "stage": [_gen_filter_gen, "fmu.context.stage.keyword"],
@@ -142,6 +155,7 @@ _filterspec = {
142
155
  "stratigraphic": [_gen_filter_bool, "data.stratigraphic"],
143
156
  "is_observation": [_gen_filter_bool, "data.is_observation"],
144
157
  "is_prediction": [_gen_filter_bool, "data.is_prediction"],
158
+ "standard_result": [_gen_filter_gen, "data.standard_result.name.keyword"],
145
159
  "complex": [_gen_filter_complex, None],
146
160
  "has": [_gen_filter_none, None],
147
161
  }
@@ -211,7 +225,7 @@ class Pit:
211
225
  self._id = res.json()["id"]
212
226
  return self
213
227
 
214
- def __exit__(self, exc_type, exc_value, traceback):
228
+ def __exit__(self, *_):
215
229
  if self._id is not None:
216
230
  self._sumo.delete("/pit", params={"id": self._id})
217
231
  pass
@@ -224,7 +238,7 @@ class Pit:
224
238
  self._id = res.json()["id"]
225
239
  return self
226
240
 
227
- async def __aexit__(self, exc_type, exc_value, traceback):
241
+ async def __aexit__(self, *_):
228
242
  if self._id is not None:
229
243
  await self._sumo.delete_async("/pit", params={"id": self._id})
230
244
  pass
@@ -254,14 +268,34 @@ class SearchContext:
254
268
  self._visible = visible
255
269
  self._hidden = hidden
256
270
  self._field_values = {}
271
+ self._field_values_and_counts = {}
257
272
  self._hits = None
258
273
  self._cache = LRUCache(capacity=200)
259
274
  self._length = None
260
- self._select = {
275
+ self._select: SelectArg = {
261
276
  "excludes": ["fmu.realization.parameters"],
262
277
  }
263
278
  return
264
279
 
280
+ def __str__(self):
281
+ length = len(self)
282
+ if length == 0:
283
+ return "None"
284
+ else:
285
+ preview = [self[i].metadata for i in range(min(5, length))]
286
+ return f"Data Preview:\n{json.dumps(preview, indent=4)}"
287
+
288
+ def __repr__(self):
289
+ cls = self.__class__.__name__
290
+ length = len(self)
291
+ if length == 0:
292
+ return f"<{cls}: {length} objects>"
293
+ else:
294
+ if len(self.classes) == 1:
295
+ return f"<{cls}: {length} objects of type {self.classes[0]}>"
296
+ else:
297
+ return f"<{cls}: {length} objects of types {self.classes}>"
298
+
265
299
  @property
266
300
  def _query(self):
267
301
  must = self._must[:]
@@ -282,7 +316,7 @@ class SearchContext:
282
316
  else:
283
317
  return {"bool": {"must": must, "must_not": must_not}}
284
318
 
285
- def _to_sumo(self, obj, blob=None):
319
+ def _to_sumo(self, obj, blob=None) -> objects.Document:
286
320
  cls = obj["_source"]["class"]
287
321
  if cls == "case":
288
322
  return objects.Case(self._sumo, obj)
@@ -295,6 +329,9 @@ class SearchContext:
295
329
  "table": objects.Table,
296
330
  "cpgrid": objects.CPGrid,
297
331
  "cpgrid_property": objects.CPGridProperty,
332
+ "iteration": objects.Iteration, # FIXME: to be removed
333
+ "ensemble": objects.Ensemble,
334
+ "realization": objects.Realization,
298
335
  }.get(cls)
299
336
  if constructor is None:
300
337
  warnings.warn(f"No constructor for class {cls}")
@@ -319,7 +356,7 @@ class SearchContext:
319
356
  self._length = res["count"]
320
357
  return self._length
321
358
 
322
- def __search_all(self, query, size=1000, select=False):
359
+ def __search_all(self, query, size: int = 1000, select: SelectArg = False):
323
360
  all_hits = []
324
361
  query = {
325
362
  "query": query,
@@ -356,10 +393,12 @@ class SearchContext:
356
393
  pass
357
394
  return all_hits
358
395
 
359
- def _search_all(self, select=False):
396
+ def _search_all(self, select: SelectArg = False):
360
397
  return self.__search_all(query=self._query, size=1000, select=select)
361
398
 
362
- async def __search_all_async(self, query, size=1000, select=False):
399
+ async def __search_all_async(
400
+ self, query, size: int = 1000, select: SelectArg = False
401
+ ):
363
402
  all_hits = []
364
403
  query = {
365
404
  "query": query,
@@ -398,7 +437,7 @@ class SearchContext:
398
437
  pass
399
438
  return all_hits
400
439
 
401
- async def _search_all_async(self, select=False):
440
+ async def _search_all_async(self, select: SelectArg = False):
402
441
  return await self.__search_all_async(
403
442
  query=self._query, size=1000, select=select
404
443
  )
@@ -477,7 +516,7 @@ class SearchContext:
477
516
  assert await self.length_async() == 1
478
517
  return await self.getitem_async(0)
479
518
 
480
- def select(self, sel):
519
+ def select(self, sel) -> SearchContext:
481
520
  """Specify what should be returned from elasticsearch.
482
521
  Has the side effect of clearing the lru cache.
483
522
  sel is either a single string value, a list of string value,
@@ -521,7 +560,7 @@ class SearchContext:
521
560
  self._cache.clear()
522
561
  return self
523
562
 
524
- def get_object(self, uuid: str) -> Dict:
563
+ def get_object(self, uuid: str) -> objects.Document:
525
564
  """Get metadata object by uuid
526
565
 
527
566
  Args:
@@ -549,7 +588,7 @@ class SearchContext:
549
588
 
550
589
  return self._to_sumo(obj)
551
590
 
552
- async def get_object_async(self, uuid: str) -> Dict:
591
+ async def get_object_async(self, uuid: str) -> objects.Document:
553
592
  """Get metadata object by uuid
554
593
 
555
594
  Args:
@@ -579,6 +618,7 @@ class SearchContext:
579
618
  return self._to_sumo(obj)
580
619
 
581
620
  def _maybe_prefetch(self, index):
621
+ assert isinstance(self._hits, list)
582
622
  uuid = self._hits[index]
583
623
  if self._cache.has(uuid):
584
624
  return
@@ -595,6 +635,7 @@ class SearchContext:
595
635
  return
596
636
 
597
637
  async def _maybe_prefetch_async(self, index):
638
+ assert isinstance(self._hits, list)
598
639
  uuid = self._hits[index]
599
640
  if self._cache.has(uuid):
600
641
  return
@@ -613,13 +654,13 @@ class SearchContext:
613
654
  def get_objects(
614
655
  self,
615
656
  uuids: List[str],
616
- select: List[str] = None,
657
+ select: SelectArg,
617
658
  ) -> List[Dict]:
618
659
  size = (
619
660
  1000
620
661
  if select is False
621
662
  else 100
622
- if isinstance(select, list)
663
+ if isinstance(select, (list, dict))
623
664
  else 10
624
665
  )
625
666
  return self.__search_all(
@@ -627,15 +668,13 @@ class SearchContext:
627
668
  )
628
669
 
629
670
  async def get_objects_async(
630
- self,
631
- uuids: List[str],
632
- select: List[str] = None,
671
+ self, uuids: List[str], select: SelectArg
633
672
  ) -> List[Dict]:
634
673
  size = (
635
674
  1000
636
675
  if select is False
637
676
  else 100
638
- if isinstance(select, list)
677
+ if isinstance(select, (list, dict))
639
678
  else 10
640
679
  )
641
680
  return await self.__search_all_async(
@@ -763,6 +802,23 @@ class SearchContext:
763
802
 
764
803
  return all_buckets
765
804
 
805
+ def get_field_values_and_counts(self, field: str) -> Dict[str, int]:
806
+ """Get List of unique values with occurrence counts for a given field
807
+
808
+ Arguments:
809
+ - field (str): a metadata field
810
+
811
+ Returns:
812
+ A mapping from unique values to count.
813
+ """
814
+ if field not in self._field_values_and_counts:
815
+ buckets = {
816
+ b["key"]: b["doc_count"] for b in self._get_buckets(field)
817
+ }
818
+ self._field_values_and_counts[field] = buckets
819
+
820
+ return self._field_values_and_counts[field]
821
+
766
822
  def get_field_values(self, field: str) -> List:
767
823
  """Get List of unique values for a given field
768
824
 
@@ -792,6 +848,26 @@ class SearchContext:
792
848
  """
793
849
  return self.get_field_values(field)
794
850
 
851
+ async def get_field_values_and_counts_async(
852
+ self, field: str
853
+ ) -> Dict[str, int]:
854
+ """Get List of unique values with occurrence counts for a given field
855
+
856
+ Arguments:
857
+ - field (str): a metadata field
858
+
859
+ Returns:
860
+ A mapping from unique values to count.
861
+ """
862
+ if field not in self._field_values_and_counts:
863
+ buckets = {
864
+ b["key"]: b["doc_count"]
865
+ for b in await self._get_buckets_async(field)
866
+ }
867
+ self._field_values_and_counts[field] = buckets
868
+
869
+ return self._field_values_and_counts[field]
870
+
795
871
  async def get_field_values_async(self, field: str) -> List:
796
872
  """Get List of unique values for a given field
797
873
 
@@ -874,17 +950,33 @@ class SearchContext:
874
950
  return objects.Cases(self, uuids)
875
951
 
876
952
  @property
953
+ @deprecation.deprecated(details="Use the method 'ensembles' instead.")
877
954
  def iterations(self):
878
955
  """Iterations from current selection."""
879
956
  uuids = self.get_field_values("fmu.iteration.uuid.keyword")
880
957
  return objects.Iterations(self, uuids)
881
958
 
882
959
  @property
960
+ @deprecation.deprecated(
961
+ details="Use the method 'ensembles_async' instead."
962
+ )
883
963
  async def iterations_async(self):
884
964
  """Iterations from current selection."""
885
965
  uuids = await self.get_field_values_async("fmu.iteration.uuid.keyword")
886
966
  return objects.Iterations(self, uuids)
887
967
 
968
+ @property
969
+ def ensembles(self):
970
+ """Ensembles from current selection."""
971
+ uuids = self.get_field_values("fmu.ensemble.uuid.keyword")
972
+ return objects.Ensembles(self, uuids)
973
+
974
+ @property
975
+ async def ensembles_async(self):
976
+ """Ensembles from current selection."""
977
+ uuids = await self.get_field_values_async("fmu.ensemble.uuid.keyword")
978
+ return objects.Ensembles(self, uuids)
979
+
888
980
  @property
889
981
  def realizations(self):
890
982
  """Realizations from current selection."""
@@ -900,8 +992,8 @@ class SearchContext:
900
992
  return objects.Realizations(self, uuids)
901
993
 
902
994
  @property
903
- def template_paths(search_context): # noqa: N805
904
- return {obj.template_path for obj in search_context}
995
+ def template_paths(self) -> List[str]:
996
+ return {obj.template_path for obj in self}
905
997
 
906
998
  @property
907
999
  def metrics(self):
@@ -987,7 +1079,6 @@ class SearchContext:
987
1079
  f = filters.get(k)
988
1080
  if f is None:
989
1081
  raise Exception(f"Don't know how to generate filter for {k}")
990
- pass
991
1082
  _must, _must_not = f(v)
992
1083
  if _must:
993
1084
  must.append(_must)
@@ -1023,46 +1114,46 @@ class SearchContext:
1023
1114
  return sc
1024
1115
 
1025
1116
  @property
1026
- def surfaces(self):
1117
+ def surfaces(self) -> SearchContext:
1027
1118
  return self._context_for_class("surface")
1028
1119
 
1029
1120
  @property
1030
- def tables(self):
1121
+ def tables(self) -> SearchContext:
1031
1122
  return self._context_for_class("table")
1032
1123
 
1033
1124
  @property
1034
- def cubes(self):
1125
+ def cubes(self) -> SearchContext:
1035
1126
  return self._context_for_class("cube")
1036
1127
 
1037
1128
  @property
1038
- def polygons(self):
1129
+ def polygons(self) -> SearchContext:
1039
1130
  return self._context_for_class("polygons")
1040
1131
 
1041
1132
  @property
1042
- def dictionaries(self):
1133
+ def dictionaries(self) -> SearchContext:
1043
1134
  return self._context_for_class("dictionary")
1044
1135
 
1045
1136
  @property
1046
- def grids(self):
1137
+ def grids(self) -> SearchContext:
1047
1138
  return self._context_for_class("cpgrid")
1048
1139
 
1049
1140
  @property
1050
- def grid_properties(self):
1141
+ def grid_properties(self) -> SearchContext:
1051
1142
  return self._context_for_class("cpgrid_property")
1052
1143
 
1053
- def _get_object_by_class_and_uuid(self, cls, uuid):
1144
+ def _get_object_by_class_and_uuid(self, cls, uuid) -> Any:
1054
1145
  obj = self.get_object(uuid)
1055
1146
  if obj.metadata["class"] != cls:
1056
1147
  raise Exception(f"Document of type {cls} not found: {uuid}")
1057
1148
  return obj
1058
1149
 
1059
- async def _get_object_by_class_and_uuid_async(self, cls, uuid):
1150
+ async def _get_object_by_class_and_uuid_async(self, cls, uuid) -> Any:
1060
1151
  obj = await self.get_object_async(uuid)
1061
1152
  if obj.metadata["class"] != cls:
1062
1153
  raise Exception(f"Document of type {cls} not found: {uuid}")
1063
1154
  return obj
1064
1155
 
1065
- def get_case_by_uuid(self, uuid: str):
1156
+ def get_case_by_uuid(self, uuid: str) -> objects.Case:
1066
1157
  """Get case object by uuid
1067
1158
 
1068
1159
  Args:
@@ -1073,7 +1164,7 @@ class SearchContext:
1073
1164
  """
1074
1165
  return self._get_object_by_class_and_uuid("case", uuid)
1075
1166
 
1076
- async def get_case_by_uuid_async(self, uuid: str):
1167
+ async def get_case_by_uuid_async(self, uuid: str) -> objects.Case:
1077
1168
  """Get case object by uuid
1078
1169
 
1079
1170
  Args:
@@ -1091,6 +1182,7 @@ class SearchContext:
1091
1182
  "_source": {
1092
1183
  "includes": [
1093
1184
  "$schema",
1185
+ "class",
1094
1186
  "source",
1095
1187
  "version",
1096
1188
  "access",
@@ -1101,7 +1193,7 @@ class SearchContext:
1101
1193
  },
1102
1194
  }
1103
1195
 
1104
- def get_iteration_by_uuid(self, uuid: str):
1196
+ def get_iteration_by_uuid(self, uuid: str) -> objects.Iteration:
1105
1197
  """Get iteration object by uuid
1106
1198
 
1107
1199
  Args:
@@ -1109,14 +1201,27 @@ class SearchContext:
1109
1201
 
1110
1202
  Returns: iteration object
1111
1203
  """
1112
- res = self._sumo.post(
1113
- "/search", json=self._iteration_query(uuid)
1114
- ).json()
1115
- obj = res["hits"]["hits"][0]
1116
- obj["_id"] = uuid
1117
- return objects.Iteration(self._sumo, obj)
1118
-
1119
- async def get_iteration_by_uuid_async(self, uuid: str):
1204
+ try:
1205
+ obj = self.get_object(uuid)
1206
+ assert isinstance(obj, objects.Iteration)
1207
+ return obj
1208
+ except Exception:
1209
+ res = self._sumo.post(
1210
+ "/search", json=self._iteration_query(uuid)
1211
+ ).json()
1212
+ hits = res["hits"]["hits"]
1213
+ if len(hits) == 0:
1214
+ raise Exception(f"Document not found: {uuid}")
1215
+ obj = hits[0]
1216
+ obj["_id"] = uuid
1217
+ obj["_source"]["class"] = "iteration"
1218
+ ret = self._to_sumo(obj)
1219
+ self._cache.put(uuid, ret)
1220
+ return ret
1221
+
1222
+ async def get_iteration_by_uuid_async(
1223
+ self, uuid: str
1224
+ ) -> objects.Iteration:
1120
1225
  """Get iteration object by uuid
1121
1226
 
1122
1227
  Args:
@@ -1124,16 +1229,99 @@ class SearchContext:
1124
1229
 
1125
1230
  Returns: iteration object
1126
1231
  """
1127
- res = (
1128
- await self._sumo.post_async(
1129
- "/search", json=self._iteration_query(uuid)
1130
- )
1131
- ).json()
1132
- obj = res["hits"]["hits"][0]
1133
- obj["_id"] = uuid
1134
- return objects.Iteration(self._sumo, obj)
1232
+ try:
1233
+ obj = await self.get_object_async(uuid)
1234
+ assert isinstance(obj, objects.Iteration)
1235
+ return obj
1236
+ except Exception:
1237
+ res = (
1238
+ await self._sumo.post_async(
1239
+ "/search", json=self._iteration_query(uuid)
1240
+ )
1241
+ ).json()
1242
+ hits = res["hits"]["hits"]
1243
+ if len(hits) == 0:
1244
+ raise Exception(f"Document not found: {uuid}")
1245
+ obj = hits[0]
1246
+ obj["_id"] = uuid
1247
+ obj["_source"]["class"] = "iteration"
1248
+ ret = self._to_sumo(obj)
1249
+ self._cache.put(uuid, ret)
1250
+ return ret
1251
+
1252
+ def _ensemble_query(self, uuid):
1253
+ return {
1254
+ "query": {"term": {"fmu.ensemble.uuid.keyword": {"value": uuid}}},
1255
+ "size": 1,
1256
+ "_source": {
1257
+ "includes": [
1258
+ "$schema",
1259
+ "class",
1260
+ "source",
1261
+ "version",
1262
+ "access",
1263
+ "masterdata",
1264
+ "fmu.case",
1265
+ "fmu.ensemble",
1266
+ ],
1267
+ },
1268
+ }
1269
+
1270
+ def get_ensemble_by_uuid(self, uuid: str) -> objects.Ensemble:
1271
+ """Get ensemble object by uuid
1272
+
1273
+ Args:
1274
+ uuid (str): ensemble uuid
1275
+
1276
+ Returns: ensemble object
1277
+ """
1278
+ try:
1279
+ obj = self.get_object(uuid)
1280
+ assert isinstance(obj, objects.Ensemble)
1281
+ return obj
1282
+ except Exception:
1283
+ res = self._sumo.post(
1284
+ "/search", json=self._ensemble_query(uuid)
1285
+ ).json()
1286
+ hits = res["hits"]["hits"]
1287
+ if len(hits) == 0:
1288
+ raise Exception(f"Document not found: {uuid}")
1289
+ obj = hits[0]
1290
+ obj["_id"] = uuid
1291
+ obj["_source"]["class"] = "ensemble"
1292
+ ret = self._to_sumo(obj)
1293
+ self._cache.put(uuid, ret)
1294
+ return ret
1295
+
1296
+ async def get_ensemble_by_uuid_async(self, uuid: str) -> objects.Ensemble:
1297
+ """Get ensemble object by uuid
1135
1298
 
1136
- def _realization_query(self, uuid):
1299
+ Args:
1300
+ uuid (str): ensemble uuid
1301
+
1302
+ Returns: ensemble object
1303
+ """
1304
+ try:
1305
+ obj = await self.get_object_async(uuid)
1306
+ assert isinstance(obj, objects.Ensemble)
1307
+ return obj
1308
+ except Exception:
1309
+ res = (
1310
+ await self._sumo.post_async(
1311
+ "/search", json=self._ensemble_query(uuid)
1312
+ )
1313
+ ).json()
1314
+ hits = res["hits"]["hits"]
1315
+ if len(hits) == 0:
1316
+ raise Exception(f"Document not found: {uuid}")
1317
+ obj = hits[0]
1318
+ obj["_id"] = uuid
1319
+ obj["_source"]["class"] = "ensemble"
1320
+ ret = self._to_sumo(obj)
1321
+ self._cache.put(uuid, ret)
1322
+ return ret
1323
+
1324
+ def _realization_query(self, uuid) -> Dict:
1137
1325
  return {
1138
1326
  "query": {
1139
1327
  "term": {"fmu.realization.uuid.keyword": {"value": uuid}}
@@ -1142,18 +1330,20 @@ class SearchContext:
1142
1330
  "_source": {
1143
1331
  "includes": [
1144
1332
  "$schema",
1333
+ "class",
1145
1334
  "source",
1146
1335
  "version",
1147
1336
  "access",
1148
1337
  "masterdata",
1149
1338
  "fmu.case",
1150
1339
  "fmu.iteration",
1340
+ "fmu.ensemble",
1151
1341
  "fmu.realization",
1152
1342
  ],
1153
1343
  },
1154
1344
  }
1155
1345
 
1156
- def get_realization_by_uuid(self, uuid: str):
1346
+ def get_realization_by_uuid(self, uuid: str) -> objects.Realization:
1157
1347
  """Get realization object by uuid
1158
1348
 
1159
1349
  Args:
@@ -1161,14 +1351,25 @@ class SearchContext:
1161
1351
 
1162
1352
  Returns: realization object
1163
1353
  """
1164
- res = self._sumo.post(
1165
- "/search", json=self._realization_query(uuid)
1166
- ).json()
1167
- obj = res["hits"]["hits"][0]
1168
- obj["_id"] = uuid
1169
- return objects.Realization(self._sumo, obj)
1354
+ try:
1355
+ obj = self.get_object(uuid)
1356
+ assert isinstance(obj, objects.Realization)
1357
+ return obj
1358
+ except Exception:
1359
+ res = self._sumo.post(
1360
+ "/search", json=self._realization_query(uuid)
1361
+ ).json()
1362
+ hits = res["hits"]["hits"]
1363
+ if len(hits) == 0:
1364
+ raise Exception(f"Document not found: {uuid}")
1365
+ obj = hits[0]
1366
+ obj["_id"] = uuid
1367
+ obj["_source"]["class"] = "realization"
1368
+ return self._to_sumo(obj)
1170
1369
 
1171
- async def get_realization_by_uuid_async(self, uuid: str):
1370
+ async def get_realization_by_uuid_async(
1371
+ self, uuid: str
1372
+ ) -> objects.Realization:
1172
1373
  """Get realization object by uuid
1173
1374
 
1174
1375
  Args:
@@ -1176,16 +1377,25 @@ class SearchContext:
1176
1377
 
1177
1378
  Returns: realization object
1178
1379
  """
1179
- res = (
1180
- await self._sumo.post_async(
1181
- "/search", json=self._realization_query(uuid)
1182
- )
1183
- ).json()
1184
- obj = res["hits"]["hits"][0]
1185
- obj["_id"] = uuid
1186
- return objects.Realization(self._sumo, obj)
1380
+ try:
1381
+ obj = await self.get_object_async(uuid)
1382
+ assert isinstance(obj, objects.Realization)
1383
+ return obj
1384
+ except Exception:
1385
+ res = (
1386
+ await self._sumo.post_async(
1387
+ "/search", json=self._realization_query(uuid)
1388
+ )
1389
+ ).json()
1390
+ hits = res["hits"]["hits"]
1391
+ if len(hits) == 0:
1392
+ raise Exception(f"Document not found: {uuid}")
1393
+ obj = hits[0]
1394
+ obj["_id"] = uuid
1395
+ obj["_source"]["class"] = "realization"
1396
+ return self._to_sumo(obj)
1187
1397
 
1188
- def get_surface_by_uuid(self, uuid: str):
1398
+ def get_surface_by_uuid(self, uuid: str) -> objects.Surface:
1189
1399
  """Get surface object by uuid
1190
1400
 
1191
1401
  Args:
@@ -1196,7 +1406,7 @@ class SearchContext:
1196
1406
  """
1197
1407
  return self._get_object_by_class_and_uuid("surface", uuid)
1198
1408
 
1199
- async def get_surface_by_uuid_async(self, uuid: str):
1409
+ async def get_surface_by_uuid_async(self, uuid: str) -> objects.Surface:
1200
1410
  """Get surface object by uuid
1201
1411
 
1202
1412
  Args:
@@ -1207,7 +1417,7 @@ class SearchContext:
1207
1417
  """
1208
1418
  return await self._get_object_by_class_and_uuid_async("surface", uuid)
1209
1419
 
1210
- def get_polygons_by_uuid(self, uuid: str):
1420
+ def get_polygons_by_uuid(self, uuid: str) -> objects.Polygons:
1211
1421
  """Get polygons object by uuid
1212
1422
 
1213
1423
  Args:
@@ -1218,7 +1428,7 @@ class SearchContext:
1218
1428
  """
1219
1429
  return self._get_object_by_class_and_uuid("polygons", uuid)
1220
1430
 
1221
- async def get_polygons_by_uuid_async(self, uuid: str):
1431
+ async def get_polygons_by_uuid_async(self, uuid: str) -> objects.Polygons:
1222
1432
  """Get polygons object by uuid
1223
1433
 
1224
1434
  Args:
@@ -1229,7 +1439,7 @@ class SearchContext:
1229
1439
  """
1230
1440
  return await self._get_object_by_class_and_uuid_async("polygons", uuid)
1231
1441
 
1232
- def get_table_by_uuid(self, uuid: str):
1442
+ def get_table_by_uuid(self, uuid: str) -> objects.Table:
1233
1443
  """Get table object by uuid
1234
1444
 
1235
1445
  Args:
@@ -1240,7 +1450,7 @@ class SearchContext:
1240
1450
  """
1241
1451
  return self._get_object_by_class_and_uuid("table", uuid)
1242
1452
 
1243
- async def get_table_by_uuid_async(self, uuid: str):
1453
+ async def get_table_by_uuid_async(self, uuid: str) -> objects.Table:
1244
1454
  """Get table object by uuid
1245
1455
 
1246
1456
  Args:
@@ -1251,7 +1461,9 @@ class SearchContext:
1251
1461
  """
1252
1462
  return await self._get_object_by_class_and_uuid_async("table", uuid)
1253
1463
 
1254
- def _verify_aggregation_operation(self):
1464
+ def _verify_aggregation_operation(
1465
+ self,
1466
+ ) -> Tuple[Dict, List[str], List[int]]:
1255
1467
  query = {
1256
1468
  "query": self._query,
1257
1469
  "size": 1,
@@ -1261,7 +1473,7 @@ class SearchContext:
1261
1473
  for k in [
1262
1474
  "fmu.case.uuid",
1263
1475
  "class",
1264
- "fmu.iteration.name",
1476
+ "fmu.ensemble.name",
1265
1477
  "data.name",
1266
1478
  "data.tagname",
1267
1479
  "data.content",
@@ -1269,6 +1481,8 @@ class SearchContext:
1269
1481
  },
1270
1482
  }
1271
1483
  sres = self._sumo.post("/search", json=query).json()
1484
+ if len(sres["hits"]["hits"]) == 0:
1485
+ raise Exception("No matching realizations found.")
1272
1486
  prototype = sres["hits"]["hits"][0]
1273
1487
  conflicts = [
1274
1488
  k
@@ -1291,8 +1505,15 @@ class SearchContext:
1291
1505
  rids = [hit["_source"]["fmu"]["realization"]["id"] for hit in hits]
1292
1506
  return prototype, uuids, rids
1293
1507
 
1294
- def _aggregate(self, columns=None, operation=None):
1295
- prototype, uuids, rids = self._verify_aggregation_operation()
1508
+ def _aggregate(self, columns=None, operation=None) -> objects.Child:
1509
+ assert (
1510
+ operation != "collection"
1511
+ or columns is not None
1512
+ and len(columns) == 1
1513
+ ), "Exactly one column required for collection aggregation."
1514
+ prototype, uuids, rids = self.filter(
1515
+ column=columns
1516
+ )._verify_aggregation_operation()
1296
1517
  spec = {
1297
1518
  "object_ids": uuids,
1298
1519
  "operations": [operation],
@@ -1300,9 +1521,8 @@ class SearchContext:
1300
1521
  del prototype["_source"]["fmu"]["realization"]
1301
1522
  del prototype["_source"]["_sumo"]
1302
1523
  del prototype["_source"]["file"]
1303
- del prototype["_source"]["access"]
1304
1524
  if "context" in prototype["_source"]["fmu"]:
1305
- prototype["_source"]["fmu"]["context"]["stage"] = "iteration"
1525
+ prototype["_source"]["fmu"]["context"]["stage"] = "ensemble"
1306
1526
  pass
1307
1527
  prototype["_source"]["fmu"]["aggregation"] = {
1308
1528
  "id": str(uuid.uuid4()),
@@ -1330,10 +1550,11 @@ class SearchContext:
1330
1550
  raise ex
1331
1551
  blob = BytesIO(res.content)
1332
1552
  res = self._to_sumo(prototype, blob)
1553
+ assert isinstance(res, objects.Child)
1333
1554
  res._blob = blob
1334
1555
  return res
1335
1556
 
1336
- def aggregate(self, columns=None, operation=None):
1557
+ def aggregate(self, columns=None, operation=None) -> objects.Child:
1337
1558
  if len(self.hidden) > 0:
1338
1559
  return self.hidden._aggregate(columns=columns, operation=operation)
1339
1560
  else:
@@ -1341,7 +1562,9 @@ class SearchContext:
1341
1562
  columns=columns, operation=operation
1342
1563
  )
1343
1564
 
1344
- async def _verify_aggregation_operation_async(self):
1565
+ async def _verify_aggregation_operation_async(
1566
+ self,
1567
+ ) -> Tuple[Dict, List[str], List[int]]:
1345
1568
  query = {
1346
1569
  "query": self._query,
1347
1570
  "size": 1,
@@ -1351,7 +1574,7 @@ class SearchContext:
1351
1574
  for k in [
1352
1575
  "fmu.case.uuid",
1353
1576
  "class",
1354
- "fmu.iteration.name",
1577
+ "fmu.ensemble.name",
1355
1578
  "data.name",
1356
1579
  "data.tagname",
1357
1580
  "data.content",
@@ -1359,6 +1582,8 @@ class SearchContext:
1359
1582
  },
1360
1583
  }
1361
1584
  sres = (await self._sumo.post_async("/search", json=query)).json()
1585
+ if len(sres["hits"]["hits"]) == 0:
1586
+ raise Exception("No matching realizations found.")
1362
1587
  prototype = sres["hits"]["hits"][0]
1363
1588
  conflicts = [
1364
1589
  k
@@ -1381,12 +1606,21 @@ class SearchContext:
1381
1606
  rids = [hit["_source"]["fmu"]["realization"]["id"] for hit in hits]
1382
1607
  return prototype, uuids, rids
1383
1608
 
1384
- async def _aggregate_async(self, columns=None, operation=None):
1609
+ async def _aggregate_async(
1610
+ self, columns=None, operation=None
1611
+ ) -> objects.Child:
1612
+ assert (
1613
+ operation != "collection"
1614
+ or columns is not None
1615
+ and len(columns) == 1
1616
+ ), "Exactly one column required for collection aggregation."
1385
1617
  (
1386
1618
  prototype,
1387
1619
  uuids,
1388
1620
  rids,
1389
- ) = await self._verify_aggregation_operation_async()
1621
+ ) = await self.filter(
1622
+ column=columns
1623
+ )._verify_aggregation_operation_async()
1390
1624
  spec = {
1391
1625
  "object_ids": uuids,
1392
1626
  "operations": [operation],
@@ -1394,9 +1628,8 @@ class SearchContext:
1394
1628
  del prototype["_source"]["fmu"]["realization"]
1395
1629
  del prototype["_source"]["_sumo"]
1396
1630
  del prototype["_source"]["file"]
1397
- del prototype["_source"]["access"]
1398
1631
  if "context" in prototype["_source"]["fmu"]:
1399
- prototype["_source"]["fmu"]["context"]["stage"] = "iteration"
1632
+ prototype["_source"]["fmu"]["context"]["stage"] = "ensemble"
1400
1633
  pass
1401
1634
  prototype["_source"]["fmu"]["aggregation"] = {
1402
1635
  "id": str(uuid.uuid4()),
@@ -1424,10 +1657,13 @@ class SearchContext:
1424
1657
  raise ex
1425
1658
  blob = BytesIO(res.content)
1426
1659
  res = self._to_sumo(prototype, blob)
1660
+ assert isinstance(res, objects.Child)
1427
1661
  res._blob = blob
1428
1662
  return res
1429
1663
 
1430
- async def aggregate_async(self, columns=None, operation=None):
1664
+ async def aggregate_async(
1665
+ self, columns=None, operation=None
1666
+ ) -> objects.Child:
1431
1667
  length_hidden = await self.hidden.length_async()
1432
1668
  if length_hidden > 0:
1433
1669
  return await self.hidden._aggregate_async(
@@ -1438,33 +1674,55 @@ class SearchContext:
1438
1674
  columns=columns, operation=operation
1439
1675
  )
1440
1676
 
1441
- def aggregation(self, column=None, operation=None):
1677
+ def aggregation(self, column=None, operation=None) -> objects.Child:
1442
1678
  assert operation is not None
1443
1679
  assert column is None or isinstance(column, str)
1444
1680
  sc = self.filter(aggregation=operation, column=column)
1445
1681
  numaggs = len(sc)
1446
1682
  assert numaggs <= 1
1447
1683
  if numaggs == 1:
1448
- return sc[0]
1449
- else:
1450
- return self.filter(realization=True).aggregate(
1451
- columns=[column] if column is not None else None,
1452
- operation=operation,
1453
- )
1684
+ agg = sc.single
1685
+ assert isinstance(agg, objects.Child)
1686
+ ts = agg.metadata["_sumo"]["timestamp"]
1687
+ reals = self.filter(
1688
+ realization=True,
1689
+ complex={"range": {"_sumo.timestamp": {"lt": ts}}},
1690
+ ).realizationids
1691
+ if set(reals) == set(
1692
+ agg.metadata["fmu"]["aggregation"]["realization_ids"]
1693
+ ):
1694
+ return agg
1695
+ # ELSE
1696
+ return self.filter(realization=True).aggregate(
1697
+ columns=[column] if column is not None else None,
1698
+ operation=operation,
1699
+ )
1454
1700
 
1455
- async def aggregation_async(self, column=None, operation=None):
1701
+ async def aggregation_async(
1702
+ self, column=None, operation=None
1703
+ ) -> objects.Child:
1456
1704
  assert operation is not None
1457
1705
  assert column is None or isinstance(column, str)
1458
1706
  sc = self.filter(aggregation=operation, column=column)
1459
1707
  numaggs = await sc.length_async()
1460
1708
  assert numaggs <= 1
1461
1709
  if numaggs == 1:
1462
- return await sc.getitem_async(0)
1463
- else:
1464
- return await self.filter(realization=True).aggregate_async(
1465
- columns=[column] if column is not None else None,
1466
- operation=operation,
1467
- )
1710
+ agg = await sc.single_async
1711
+ assert isinstance(agg, objects.Child)
1712
+ ts = agg.metadata["_sumo"]["timestamp"]
1713
+ reals = await self.filter(
1714
+ realization=True,
1715
+ complex={"range": {"_sumo.timestamp": {"lt": ts}}},
1716
+ ).realizationids_async
1717
+ if set(reals) == set(
1718
+ agg.metadata["fmu"]["aggregation"]["realization_ids"]
1719
+ ):
1720
+ return agg
1721
+ # ELSE
1722
+ return await self.filter(realization=True).aggregate_async(
1723
+ columns=[column] if column is not None else None,
1724
+ operation=operation,
1725
+ )
1468
1726
 
1469
1727
  @deprecation.deprecated(
1470
1728
  details="Use the method 'aggregate' instead, with parameter 'operation'."
@@ -1648,6 +1906,28 @@ class SearchContext:
1648
1906
  """List of unique object names."""
1649
1907
  return await self.get_field_values_async("data.name.keyword")
1650
1908
 
1909
+ @property
1910
+ def classes(self) -> List[str]:
1911
+ """List of class names."""
1912
+ return self.get_field_values("class.keyword")
1913
+
1914
+ @property
1915
+ async def classes_async(self) -> List[str]:
1916
+ """List of class names."""
1917
+ return await self.get_field_values_async("class.keyword")
1918
+
1919
+ @property
1920
+ def standard_results(self) -> List[str]:
1921
+ """List of standard result names."""
1922
+ return self.get_field_values("data.standard_result.name.keyword")
1923
+
1924
+ @property
1925
+ async def standard_results_async(self) -> List[str]:
1926
+ """List of standard result names."""
1927
+ return await self.get_field_values_async(
1928
+ "data.standard_result.name.keyword"
1929
+ )
1930
+
1651
1931
 
1652
1932
  def _gen_filter_doc(spec):
1653
1933
  fmap = {
@@ -1702,7 +1982,7 @@ Examples:
1702
1982
  Match one value::
1703
1983
 
1704
1984
  surfs = case.surfaces.filter(
1705
- iteration="iter-0",
1985
+ ensemble="iter-0",
1706
1986
  name="my_surface_name"
1707
1987
  )
1708
1988