fmu-sumo 2.4.3__tar.gz → 2.4.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/PKG-INFO +1 -1
  2. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/docs/explorer.rst +10 -10
  3. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/examples/explorer.ipynb +11 -9
  4. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/examples/explorer2.ipynb +32 -34
  5. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/examples/grids-and-properties.ipynb +4 -6
  6. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/examples/table-aggregation.ipynb +12 -14
  7. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/examples/tables.ipynb +6 -6
  8. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/_version.py +2 -2
  9. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/_search_context.py +113 -143
  10. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu_sumo.egg-info/PKG-INFO +1 -1
  11. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/.github/pull_request_template.md +0 -0
  12. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/.github/workflows/build_docs.yaml +0 -0
  13. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/.github/workflows/check_formatting.yml +0 -0
  14. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/.github/workflows/publish_release.yaml +0 -0
  15. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/.github/workflows/run_tests.yaml +0 -0
  16. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/.gitignore +0 -0
  17. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/.pre-commit-config.yaml +0 -0
  18. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/.readthedocs.yml +0 -0
  19. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/CONTRIBUTING.md +0 -0
  20. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/LICENSE +0 -0
  21. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/README.md +0 -0
  22. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/SECURITY.md +0 -0
  23. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/docs/_static/equinor-logo.png +0 -0
  24. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/docs/_static/equinor-logo2.jpg +0 -0
  25. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/docs/_static/equinor_logo.jpg +0 -0
  26. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/docs/_static/equinor_logo_only.jpg +0 -0
  27. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/docs/_templates/layout.html +0 -0
  28. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/docs/conf.py +0 -0
  29. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/docs/index.rst +0 -0
  30. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/examples/metrics.ipynb +0 -0
  31. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/pyproject.toml +0 -0
  32. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/setup.cfg +0 -0
  33. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/__init__.py +0 -0
  34. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/__init__.py +0 -0
  35. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/__init__.py +0 -0
  36. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/cache.py +0 -0
  37. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/explorer.py +0 -0
  38. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/filters.py +0 -0
  39. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/__init__.py +0 -0
  40. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/_child.py +0 -0
  41. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/_document.py +0 -0
  42. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/_metrics.py +0 -0
  43. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/case.py +0 -0
  44. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/cases.py +0 -0
  45. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/cpgrid.py +0 -0
  46. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/cpgrid_property.py +0 -0
  47. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/cube.py +0 -0
  48. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/dictionary.py +0 -0
  49. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/ensemble.py +0 -0
  50. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/ensembles.py +0 -0
  51. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/iteration.py +0 -0
  52. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/iterations.py +0 -0
  53. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/polygons.py +0 -0
  54. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/realization.py +0 -0
  55. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/realizations.py +0 -0
  56. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/surface.py +0 -0
  57. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/objects/table.py +0 -0
  58. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu/sumo/explorer/timefilter.py +0 -0
  59. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu_sumo.egg-info/SOURCES.txt +0 -0
  60. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu_sumo.egg-info/dependency_links.txt +0 -0
  61. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu_sumo.egg-info/requires.txt +0 -0
  62. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/src/fmu_sumo.egg-info/top_level.txt +0 -0
  63. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/tests/conftest.py +0 -0
  64. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/tests/context.py +0 -0
  65. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/tests/test_explorer.py +0 -0
  66. {fmu_sumo-2.4.3 → fmu_sumo-2.4.5}/tests/test_objects_table.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fmu-sumo
3
- Version: 2.4.3
3
+ Version: 2.4.5
4
4
  Summary: Python package for interacting with Sumo in an FMU setting
5
5
  Author: Equinor
6
6
  License: Apache License
@@ -79,8 +79,8 @@ as a base class for certain other classes in `fmu.sumo.explorer`:
79
79
  * `Case` objects are search contexts that match objects in a specific
80
80
  case.
81
81
 
82
- * `Iteration` objects are search contexts that match objects in a
83
- specific iteration.
82
+ * `Ensemble` objects are search contexts that match objects in a
83
+ specific ensemble. (Previously `Iteration`)
84
84
 
85
85
  * `Realization` objects are search contexts that match objects in a
86
86
  specific realization.
@@ -309,7 +309,7 @@ method can be used to further refine the set of matching objects:
309
309
 
310
310
  case = sumo.get_case_by_uuid("1234567")
311
311
 
312
- surfaces = case.surfaces.filter(iteration="iter-0")
312
+ surfaces = case.surfaces.filter(ensemble="iter-0")
313
313
 
314
314
  contents = surfaces.contents
315
315
 
@@ -341,7 +341,7 @@ are useful parameters to `.filter()`:
341
341
  * `tagname`
342
342
  * `content`
343
343
  * `dataformat`
344
- * `iteration`
344
+ * `ensemble`
345
345
  * `realization`
346
346
  * `aggregation`
347
347
  * `stage`
@@ -379,7 +379,7 @@ We can get list of filter values for the following properties:
379
379
  * `contents`
380
380
  * `tagnames`
381
381
  * `dataformats`
382
- * `iterations`
382
+ * `ensemble`
383
383
  * `realizations`
384
384
  * `aggregations`
385
385
  * `stages`
@@ -467,7 +467,7 @@ surfaces:
467
467
  case = explorer.get_case_by_uuid("dec73fae-bb11-41f2-be37-73ba005c4967")
468
468
 
469
469
  surface_collection: SurfaceCollection = case.surfaces.filter(
470
- iteration="iter-1",
470
+ ensemble="iter-1",
471
471
  )
472
472
 
473
473
 
@@ -490,7 +490,7 @@ This can now be reduced to:
490
490
  case = explorer.get_case_by_uuid("dec73fae-bb11-41f2-be37-73ba005c4967")
491
491
 
492
492
  surface_collection: SurfaceCollection = case.surfaces.filter(
493
- iteration="iter-1",
493
+ ensemble="iter-1",
494
494
  )
495
495
 
496
496
  async def main():
@@ -593,7 +593,7 @@ this is currently implemented for `surfaces` and `tables`.
593
593
  surfaces = case.surfaces.filter(
594
594
  stage="realization",
595
595
  content="depth",
596
- iteration="iter-0",
596
+ ensemble="iter-0",
597
597
  name="Valysar Fm.",
598
598
  tagname="FACIES_Fraction_Channel"
599
599
  stratigraphic="false"
@@ -610,7 +610,7 @@ this is currently implemented for `surfaces` and `tables`.
610
610
 
611
611
  In this example we perform aggregations on all realized instance of
612
612
  the surface `Valysar Fm. (FACIES_Fraction_Channel)` in
613
- iteration 0. The aggregation methods return `xtgeo.RegularSurface`
613
+ ensemble 0. The aggregation methods return `xtgeo.RegularSurface`
614
614
  objects.
615
615
 
616
616
  .. note:: The methods `.mean()`, `.min()`, etc are deprecated; the
@@ -625,7 +625,7 @@ For `table` aggregation it is also necessary to specify the columns you want:
625
625
 
626
626
  sumo = Explorer(env="dev")
627
627
  case = sumo.get_case_by_uuid("5b558daf-61c5-400a-9aa2-c602bb471a16")
628
- tables = case.tables.filter(iteration="iter-0", realization=True,
628
+ tables = case.tables.filter(ensemble="iter-0", realization=True,
629
629
  tagname=summary, column="FOPT")
630
630
  agg = tables.aggregate(operation="collection", columns=["FOPT"])
631
631
  agg.to_pandas()
@@ -106,11 +106,11 @@
106
106
  "# Get case surfaces\n",
107
107
  "surfs = case.surfaces.filter(realization=True)\n",
108
108
  "\n",
109
- "# Get available iterations\n",
110
- "print(\"Iterations:\", surfs.iterations)\n",
109
+ "# Get available ensembles\n",
110
+ "print(\"Ensembles:\", surfs.ensembles)\n",
111
111
  "\n",
112
- "# Filter on iteration\n",
113
- "surfs = surfs.filter(iteration=\"iter-0\")\n",
112
+ "# Filter on ensemble\n",
113
+ "surfs = surfs.filter(ensemble=\"iter-0\")\n",
114
114
  "\n",
115
115
  "# Get available contents\n",
116
116
  "print(\"Contents:\", surfs.contents)\n",
@@ -151,7 +151,7 @@
151
151
  "print(\"Content:\", surf.content)\n",
152
152
  "print(\"Tagname:\", surf.tagname)\n",
153
153
  "print(\"Format:\", surf.dataformat)\n",
154
- "print(\"Iteration:\", surf.iteration)\n",
154
+ "print(\"Ensemble:\", surf.ensemble)\n",
155
155
  "print(\"Realization:\", surf.realization)\n",
156
156
  "print(\"vertical domain:\", surf.vertical_domain)\n",
157
157
  "\n",
@@ -179,9 +179,9 @@
179
179
  "\n",
180
180
  "cubes = seismic_case.cubes\n",
181
181
  "\n",
182
- "print(\"Iterations\\t\", cubes.iterations)\n",
182
+ "print(\"Ensembles\\t\", cubes.ensembles)\n",
183
183
  "\n",
184
- "cubes.filter(iteration=\"iter-0\")\n",
184
+ "cubes.filter(ensemble=\"iter-0\")\n",
185
185
  "\n",
186
186
  "print(\"Names:\\t\\t\", cubes.names)\n",
187
187
  "\n",
@@ -200,7 +200,7 @@
200
200
  "print(\"ID:\", cube.uuid)\n",
201
201
  "print(\"Name:\", cube.name)\n",
202
202
  "print(\"Tagname:\", cube.tagname)\n",
203
- "print(\"Iteration:\", cube.iteration)\n",
203
+ "print(\"Ensemble:\", cube.ensemble)\n",
204
204
  "print(\"Realization:\", cube.realization)\n",
205
205
  "print(\"is_observation:\", cube[\"data\"][\"is_observation\"])\n",
206
206
  "print(\"is_prediction:\", cube[\"data\"][\"is_prediction\"])\n",
@@ -232,7 +232,9 @@
232
232
  "source": [
233
233
  "# Perform aggregation on SurfaceCollection\n",
234
234
  "\n",
235
- "regsurf = surfs.min() # .max(), .mean(), .std(), .p10(), .p90(), .p50()\n",
235
+ "regsurf = surfs.aggregate(\n",
236
+ " operation=\"min\"\n",
237
+ ") # operations: max, mean, std, p10, p90, p50\n",
236
238
  "regsurf.to_regular_surface().quickplot()"
237
239
  ]
238
240
  },
@@ -21,10 +21,10 @@
21
21
  "outputs": [],
22
22
  "source": [
23
23
  "env = \"dev\"\n",
24
- "caseuuid = \"d872b3ce-0322-4357-b192-32bde70d7dac\"\n",
24
+ "caseuuid = \"1abbf342-8460-45d7-afa6-814eac9dc9d3\"\n",
25
25
  "name = \"DROGON\"\n",
26
26
  "tagname = \"summary\"\n",
27
- "iteration = \"iter-0\""
27
+ "ensemble = \"iter-0\""
28
28
  ]
29
29
  },
30
30
  {
@@ -35,39 +35,38 @@
35
35
  "outputs": [],
36
36
  "source": [
37
37
  "exp = Explorer(env=env)\n",
38
- "len(exp.cases)"
38
+ "exp.cases"
39
39
  ]
40
40
  },
41
41
  {
42
42
  "cell_type": "code",
43
43
  "execution_count": null,
44
- "id": "c5ecded7-bdaf-4c7e-814f-6cec002ef895",
44
+ "id": "9a4fa479-ed06-406f-8cdf-e3392c7453ea",
45
45
  "metadata": {},
46
46
  "outputs": [],
47
47
  "source": [
48
- "exp.cases"
48
+ "[(case.name, case.uuid) for case in exp.cases]"
49
49
  ]
50
50
  },
51
51
  {
52
52
  "cell_type": "code",
53
53
  "execution_count": null,
54
- "id": "9a4fa479-ed06-406f-8cdf-e3392c7453ea",
54
+ "id": "b4c047ae-4b00-48c8-a148-e425aea81928",
55
55
  "metadata": {},
56
56
  "outputs": [],
57
57
  "source": [
58
- "[(case.name, case.uuid) for case in exp.cases]"
58
+ "case = exp.get_object(caseuuid)\n",
59
+ "case"
59
60
  ]
60
61
  },
61
62
  {
62
63
  "cell_type": "code",
63
64
  "execution_count": null,
64
- "id": "b4c047ae-4b00-48c8-a148-e425aea81928",
65
+ "id": "cb418b43",
65
66
  "metadata": {},
66
67
  "outputs": [],
67
68
  "source": [
68
- "case = exp.get_object(caseuuid)\n",
69
- "print(f\"Case name: {case.name}\")\n",
70
- "print(f\"Case UUID: {case.uuid}\")"
69
+ "case.overview"
71
70
  ]
72
71
  },
73
72
  {
@@ -81,7 +80,7 @@
81
80
  " cls=\"table\",\n",
82
81
  " name=name,\n",
83
82
  " tagname=tagname,\n",
84
- " iteration=iteration,\n",
83
+ " ensemble=ensemble,\n",
85
84
  " realization=True,\n",
86
85
  ")\n",
87
86
  "print(f\"Number of realizations: {len(realizations)}\")\n",
@@ -153,9 +152,7 @@
153
152
  "metadata": {},
154
153
  "outputs": [],
155
154
  "source": [
156
- "realizations = case.filter(\n",
157
- " cls=\"surface\", iteration=iteration, realization=True\n",
158
- ")\n",
155
+ "realizations = case.filter(cls=\"surface\", ensemble=ensemble, realization=True)\n",
159
156
  "print(f\"Number of realizations: {len(realizations)}\")\n",
160
157
  "print(realizations.names)\n",
161
158
  "print(realizations.tagnames)\n",
@@ -170,7 +167,7 @@
170
167
  "outputs": [],
171
168
  "source": [
172
169
  "surfaces = realizations.filter(\n",
173
- " name=\"Valysar Fm.\", content=\"depth\", tagname=\"apstrend_aps_Channel_Average\"\n",
170
+ " name=\"Valysar Fm.\", content=\"depth\", tagname=\"probcube_aps_Channel_Average\"\n",
174
171
  ")\n",
175
172
  "print(len(surfaces))\n",
176
173
  "print(surfaces.tagnames)\n",
@@ -259,7 +256,8 @@
259
256
  "metadata": {},
260
257
  "outputs": [],
261
258
  "source": [
262
- "len(case.iterations)"
259
+ "len(case.ensembles)\n",
260
+ "print(case.ensembles)"
263
261
  ]
264
262
  },
265
263
  {
@@ -269,7 +267,7 @@
269
267
  "metadata": {},
270
268
  "outputs": [],
271
269
  "source": [
272
- "iteration = exp.get_iteration_by_uuid(\"23c63921-b54c-449d-49a3-c08332faf7cc\")"
270
+ "ensemble = exp.get_ensemble_by_uuid(\"4e2f012d-15a4-fd57-2368-a349ff2e56a0\")"
273
271
  ]
274
272
  },
275
273
  {
@@ -279,7 +277,7 @@
279
277
  "metadata": {},
280
278
  "outputs": [],
281
279
  "source": [
282
- "len(iteration)"
280
+ "len(ensemble)"
283
281
  ]
284
282
  },
285
283
  {
@@ -289,7 +287,7 @@
289
287
  "metadata": {},
290
288
  "outputs": [],
291
289
  "source": [
292
- "iteration._get_field_values(\"class.keyword\")"
290
+ "ensemble.get_field_values(\"class.keyword\")"
293
291
  ]
294
292
  },
295
293
  {
@@ -299,7 +297,7 @@
299
297
  "metadata": {},
300
298
  "outputs": [],
301
299
  "source": [
302
- "len(iteration.cases)"
300
+ "ensemble.cases"
303
301
  ]
304
302
  },
305
303
  {
@@ -309,7 +307,7 @@
309
307
  "metadata": {},
310
308
  "outputs": [],
311
309
  "source": [
312
- "len(iteration.realizations)"
310
+ "ensemble.realizations"
313
311
  ]
314
312
  },
315
313
  {
@@ -319,7 +317,7 @@
319
317
  "metadata": {},
320
318
  "outputs": [],
321
319
  "source": [
322
- "len(iteration.filter(realization=13))"
320
+ "ensemble.filter(realization=13)"
323
321
  ]
324
322
  },
325
323
  {
@@ -329,7 +327,7 @@
329
327
  "metadata": {},
330
328
  "outputs": [],
331
329
  "source": [
332
- "len(iteration.filter(realization=159))"
330
+ "ensemble.filter(realization=79)"
333
331
  ]
334
332
  },
335
333
  {
@@ -339,7 +337,7 @@
339
337
  "metadata": {},
340
338
  "outputs": [],
341
339
  "source": [
342
- "len(iteration.filter(realization=160))"
340
+ "ensemble.filter(realization=100)"
343
341
  ]
344
342
  },
345
343
  {
@@ -349,7 +347,7 @@
349
347
  "metadata": {},
350
348
  "outputs": [],
351
349
  "source": [
352
- "iteration.aggregations"
350
+ "ensemble.aggregations"
353
351
  ]
354
352
  },
355
353
  {
@@ -359,7 +357,7 @@
359
357
  "metadata": {},
360
358
  "outputs": [],
361
359
  "source": [
362
- "len(iteration.filter(aggregation=True))"
360
+ "ensemble.filter(aggregation=True)"
363
361
  ]
364
362
  },
365
363
  {
@@ -369,7 +367,7 @@
369
367
  "metadata": {},
370
368
  "outputs": [],
371
369
  "source": [
372
- "len(iteration.filter(realization=True))"
370
+ "ensemble.filter(realization=True)"
373
371
  ]
374
372
  },
375
373
  {
@@ -399,9 +397,9 @@
399
397
  "metadata": {},
400
398
  "outputs": [],
401
399
  "source": [
402
- "from fmu.sumo.explorer import filters\n",
400
+ "from fmu.sumo.explorer.filters import Filters\n",
403
401
  "\n",
404
- "len(exp.cases.filter(has=filters.seismic4d))"
402
+ "exp.cases.filter(has=Filters.seismic4d)"
405
403
  ]
406
404
  },
407
405
  {
@@ -411,7 +409,7 @@
411
409
  "metadata": {},
412
410
  "outputs": [],
413
411
  "source": [
414
- "len(exp.filter(has=filters.seismic4d))"
412
+ "exp.filter(has=Filters.seismic4d)"
415
413
  ]
416
414
  },
417
415
  {
@@ -421,7 +419,7 @@
421
419
  "metadata": {},
422
420
  "outputs": [],
423
421
  "source": [
424
- "[case.name for case in list(exp.filter(has=filters.seismic4d).cases)[:10]]"
422
+ "[case.name for case in list(exp.filter(has=Filters.seismic4d).cases)[:10]]"
425
423
  ]
426
424
  },
427
425
  {
@@ -431,7 +429,7 @@
431
429
  "metadata": {},
432
430
  "outputs": [],
433
431
  "source": [
434
- "myrealization = iteration.realizations.filter(\n",
432
+ "myrealization = ensemble.realizations.filter(\n",
435
433
  " complex={\"term\": {\"fmu.realization.id\": 0}}\n",
436
434
  ")[0]\n",
437
435
  "myrealization"
@@ -454,7 +452,7 @@
454
452
  "metadata": {},
455
453
  "outputs": [],
456
454
  "source": [
457
- "print(json.dumps(iteration.metadata, indent=2))"
455
+ "print(json.dumps(ensemble.metadata, indent=2))"
458
456
  ]
459
457
  },
460
458
  {
@@ -105,7 +105,7 @@
105
105
  "metadata": {},
106
106
  "outputs": [],
107
107
  "source": [
108
- "grids[0].iteration, grids[0].realization"
108
+ "grids[0].ensemble, grids[0].realization"
109
109
  ]
110
110
  },
111
111
  {
@@ -115,7 +115,7 @@
115
115
  "metadata": {},
116
116
  "outputs": [],
117
117
  "source": [
118
- "{g.iteration for g in grids}"
118
+ "{g.ensemble for g in grids}"
119
119
  ]
120
120
  },
121
121
  {
@@ -167,9 +167,7 @@
167
167
  "outputs": [],
168
168
  "source": [
169
169
  "with Timer():\n",
170
- " print(\n",
171
- " set(grprops._get_field_values(\"data.geometry.relative_path.keyword\"))\n",
172
- " )"
170
+ " print(set(grprops.get_field_values(\"data.geometry.relative_path.keyword\")))"
173
171
  ]
174
172
  },
175
173
  {
@@ -220,7 +218,7 @@
220
218
  "metadata": {},
221
219
  "outputs": [],
222
220
  "source": [
223
- "len(gridproperties)"
221
+ "gridproperties"
224
222
  ]
225
223
  },
226
224
  {
@@ -80,20 +80,26 @@
80
80
  " print(agg.to_pandas().sort_values(by=[\"REAL\", \"DATE\"]))\n",
81
81
  "\n",
82
82
  "\n",
83
- "def run_exp(caseuuid, itername, tagname, columns):\n",
83
+ "def run_exp(caseuuid, ensemblename, tagname, columns):\n",
84
84
  " case = exp.get_case_by_uuid(caseuuid)\n",
85
85
  " print(f\"{case.asset}; {case.name}; {caseuuid}; {case.status}\")\n",
86
86
  " rels = case.tables.visible.filter(\n",
87
- " iteration=itername, realization=True, tagname=tagname, column=columns\n",
87
+ " ensemble=ensemblename,\n",
88
+ " realization=True,\n",
89
+ " tagname=tagname,\n",
90
+ " column=columns,\n",
88
91
  " )\n",
89
92
  " do_aggregate(\"Full-sized tables\", tagname, rels, columns)\n",
90
93
  " rels = case.tables.hidden.filter(\n",
91
- " iteration=itername, realization=True, tagname=tagname, column=columns\n",
94
+ " ensemble=ensemblename,\n",
95
+ " realization=True,\n",
96
+ " tagname=tagname,\n",
97
+ " column=columns,\n",
92
98
  " )\n",
93
99
  " do_aggregate(\"Split tables\", tagname, rels, columns)\n",
94
100
  " with Timer(\"Fetch single-vector table\"):\n",
95
101
  " aggs = case.tables.filter(\n",
96
- " iteration=itername,\n",
102
+ " ensemble=ensemblename,\n",
97
103
  " aggregation=True,\n",
98
104
  " tagname=tagname,\n",
99
105
  " column=columns[0],\n",
@@ -157,7 +163,7 @@
157
163
  "outputs": [],
158
164
  "source": [
159
165
  "caseuuid = \"8ffeb5f8-ca60-42ee-998e-53d34e47d3e2\"\n",
160
- "iteration = \"iter-0\"\n",
166
+ "ensemble = \"iter-0\"\n",
161
167
  "case = exp.get_case_by_uuid(caseuuid)\n",
162
168
  "hidden = case.tables.hidden\n",
163
169
  "cols_f0 = hidden.filter(complex={\"term\": {\"_sumo.fragment\": 0}}).columns\n",
@@ -200,16 +206,8 @@
200
206
  "metadata": {},
201
207
  "outputs": [],
202
208
  "source": [
203
- "run_exp(caseuuid, iteration, \"summary\", some_f0)"
209
+ "run_exp(caseuuid, ensemble, \"summary\", some_f0)"
204
210
  ]
205
- },
206
- {
207
- "cell_type": "code",
208
- "execution_count": null,
209
- "id": "379ec147-876e-444b-b62c-1597459905d5",
210
- "metadata": {},
211
- "outputs": [],
212
- "source": []
213
211
  }
214
212
  ],
215
213
  "metadata": {
@@ -122,9 +122,9 @@
122
122
  "metadata": {},
123
123
  "source": [
124
124
  "Most tables are stored during the running of an fmu run. So you will in that case have one version per realization (i.e per file path realization-n/iter-m) <br>\n",
125
- "This means that it will be a slow process to get to results from one ensemble, meaning one iteration. <br>\n",
125
+ "This means that it will be a slow process to get to results from one ensemble. <br>\n",
126
126
  "Below is an example of how to fetch one table. General syntax: <br>\n",
127
- "``table_collection = case.tables.filter(realization=<realization number>, iteration=<iteration name>,``<br>\n",
127
+ "``table_collection = case.tables.filter(realization=<realization number>, ensemble=<ensemble name>,``<br>\n",
128
128
  "&emsp;&emsp;&emsp;&emsp;&emsp;&emsp;&emsp;&emsp;&emsp;&emsp;&emsp;&emsp;&emsp;&emsp;&emsp;&emsp;&emsp;&emsp;&emsp;&emsp;&emsp;``name=<table name>, tagname=<name of tag>)``<br>\n",
129
129
  "Any of the input arguements are optional, so you could end up with more than one resulting table, and in theory even if you have <br>\n",
130
130
  "used all arguements you could end up with several (long story, please ask). But you can check how many tables you have with using <br>\n",
@@ -144,7 +144,7 @@
144
144
  "source": [
145
145
  "# Filter using the key\n",
146
146
  "one_table = tables.filter(\n",
147
- " realization=0, iteration=\"iter-0\", name=\"DROGON\", tagname=\"compdat\"\n",
147
+ " realization=0, ensemble=\"iter-0\", name=\"DROGON\", tagname=\"compdat\"\n",
148
148
  ")[0]\n",
149
149
  "\n",
150
150
  "# Give back the name and tag\n",
@@ -207,7 +207,7 @@
207
207
  "outputs": [],
208
208
  "source": [
209
209
  "sim_tables = tables.filter(\n",
210
- " name=\"DROGON\", iteration=\"iter-0\", aggregation=\"collection\"\n",
210
+ " name=\"DROGON\", ensemble=\"iter-0\", aggregation=\"collection\"\n",
211
211
  ")\n",
212
212
  "sim_tables.tagnames"
213
213
  ]
@@ -325,7 +325,7 @@
325
325
  "\n",
326
326
  "Here ``<DATATYPE>`` can be of collection, index, mean, min, max, p10 or p90\n",
327
327
  "\n",
328
- "This class gives you the aggregated tables that share name and tagname for one iteration as one object, <br> \n",
328
+ "This class gives you the aggregated tables that share name and tagname for one ensemble as one object, <br> \n",
329
329
  "so you don't need to know that what you are dealing with is a collection of objects\n"
330
330
  ]
331
331
  },
@@ -463,7 +463,7 @@
463
463
  "metadata": {},
464
464
  "outputs": [],
465
465
  "source": [
466
- "COMPDAT = AggregatedTable(case, \"DROGON\", \"compdat\", iteration=\"iter-0\")\n",
466
+ "COMPDAT = AggregatedTable(case, \"DROGON\", \"compdat\", ensemble=\"iter-0\")\n",
467
467
  "COMPDAT[\"KH\"].to_pandas()"
468
468
  ]
469
469
  },
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '2.4.3'
21
- __version_tuple__ = version_tuple = (2, 4, 3)
20
+ __version__ = version = '2.4.5'
21
+ __version_tuple__ = version_tuple = (2, 4, 5)
@@ -1,10 +1,8 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import json
4
- import uuid
5
4
  import warnings
6
5
  from datetime import datetime
7
- from io import BytesIO
8
6
  from typing import TYPE_CHECKING, Any, Dict, List, Tuple, Union
9
7
 
10
8
  import deprecation
@@ -1164,6 +1162,46 @@ class SearchContext:
1164
1162
  def grid_properties(self) -> SearchContext:
1165
1163
  return self._context_for_class("cpgrid_property")
1166
1164
 
1165
+ @property
1166
+ def parameters(self) -> SearchContext:
1167
+ return self.filter(
1168
+ complex={
1169
+ "bool": {
1170
+ "must": [
1171
+ {"term": {"data.name.keyword": "parameters"}},
1172
+ {"term": {"data.content.keyword": "parameters"}},
1173
+ ],
1174
+ "should": [
1175
+ {
1176
+ "bool": {
1177
+ "must": [
1178
+ {"term": {"class.keyword": "dictionary"}},
1179
+ {
1180
+ "exists": {
1181
+ "field": "fmu.realization.id"
1182
+ }
1183
+ },
1184
+ ]
1185
+ }
1186
+ },
1187
+ {
1188
+ "bool": {
1189
+ "must": [
1190
+ {"term": {"class.keyword": "table"}},
1191
+ {
1192
+ "exists": {
1193
+ "field": "fmu.aggregation.operation"
1194
+ }
1195
+ },
1196
+ ]
1197
+ }
1198
+ },
1199
+ ],
1200
+ "minimum_should_match": 1,
1201
+ }
1202
+ }
1203
+ )
1204
+
1167
1205
  def _get_object_by_class_and_uuid(self, cls, uuid) -> Any:
1168
1206
  obj = self.get_object(uuid)
1169
1207
  if obj.metadata["class"] != cls:
@@ -1484,12 +1522,10 @@ class SearchContext:
1484
1522
  """
1485
1523
  return await self._get_object_by_class_and_uuid_async("table", uuid)
1486
1524
 
1487
- def _verify_aggregation_operation(
1488
- self,
1489
- ) -> Tuple[Dict, List[str], List[int]]:
1490
- query = {
1525
+ def __prepare_verify_aggregation_query(self) -> Dict:
1526
+ return {
1491
1527
  "query": self._query,
1492
- "size": 1,
1528
+ "size": 0,
1493
1529
  "track_total_hits": True,
1494
1530
  "aggs": {
1495
1531
  k: {"terms": {"field": k + ".keyword", "size": 1}}
@@ -1497,85 +1533,83 @@ class SearchContext:
1497
1533
  "fmu.case.uuid",
1498
1534
  "class",
1499
1535
  "fmu.ensemble.name",
1536
+ "fmu.entity.uuid",
1500
1537
  "data.name",
1501
1538
  "data.tagname",
1502
1539
  "data.content",
1503
1540
  ]
1504
1541
  },
1505
1542
  }
1506
- sres = self._sumo.post("/search", json=query).json()
1507
- if len(sres["hits"]["hits"]) == 0:
1543
+
1544
+ def __verify_aggregation_operation(
1545
+ self, sres
1546
+ ) -> Tuple[str, str, str, str]:
1547
+ tot_hits = sres["hits"]["total"]["value"]
1548
+ if tot_hits == 0:
1508
1549
  raise Exception("No matching realizations found.")
1509
- prototype = sres["hits"]["hits"][0]
1510
1550
  conflicts = [
1511
1551
  k
1512
1552
  for (k, v) in sres["aggregations"].items()
1513
1553
  if (
1514
- ("sum_other_doc_count" in v) and (v["sum_other_doc_count"] > 0)
1554
+ ("sum_other_doc_count" in v)
1555
+ and (v["sum_other_doc_count"] > 0)
1556
+ or v["buckets"][0]["doc_count"] != tot_hits
1515
1557
  )
1516
1558
  ]
1517
1559
  if len(conflicts) > 0:
1518
1560
  raise Exception(f"Conflicting values for {conflicts}")
1561
+ entityuuid = sres["aggregations"]["fmu.entity.uuid"]["buckets"][0][
1562
+ "key"
1563
+ ]
1564
+ caseuuid = sres["aggregations"]["fmu.case.uuid"]["buckets"][0]["key"]
1565
+ ensemblename = sres["aggregations"]["fmu.ensemble.name"]["buckets"][0][
1566
+ "key"
1567
+ ]
1568
+ classname = sres["aggregations"]["class"]["buckets"][0]["key"]
1569
+ return caseuuid, classname, entityuuid, ensemblename
1519
1570
 
1520
- hits = self._search_all(select=["fmu.realization.id"])
1521
-
1522
- if any(
1523
- hit["_source"]["fmu"].get("realization") is None for hit in hits
1524
- ):
1525
- raise Exception("Selection contains non-realization data.")
1526
-
1527
- uuids = [hit["_id"] for hit in hits]
1528
- rids = [hit["_source"]["fmu"]["realization"]["id"] for hit in hits]
1529
- return prototype, uuids, rids
1571
+ def _verify_aggregation_operation(
1572
+ self, columns, operation
1573
+ ) -> Tuple[str, str, str, str]:
1574
+ assert columns is None or len(columns) == 1, (
1575
+ "Exactly one column required for collection aggregation."
1576
+ )
1577
+ sc = self if columns is None else self.filter(column=columns)
1578
+ query = sc.__prepare_verify_aggregation_query()
1579
+ sres = sc._sumo.post("/search", json=query).json()
1580
+ return sc.__verify_aggregation_operation(sres)
1530
1581
 
1531
- def _aggregate(self, columns=None, operation=None) -> objects.Child:
1532
- assert (
1533
- operation != "collection"
1534
- or columns is not None
1535
- and len(columns) == 1
1536
- ), "Exactly one column required for collection aggregation."
1537
- prototype, uuids, rids = self.filter(
1538
- column=columns
1539
- )._verify_aggregation_operation()
1582
+ def __prepare_aggregation_spec(
1583
+ self, caseuuid, classname, entityuuid, ensemblename, operation, columns
1584
+ ):
1540
1585
  spec = {
1541
- "object_ids": uuids,
1586
+ "case_uuid": caseuuid,
1587
+ "class": classname,
1588
+ "entity_uuid": entityuuid,
1589
+ "ensemble_name": ensemblename,
1590
+ "iteration_name": ensemblename,
1542
1591
  "operations": [operation],
1543
1592
  }
1544
- del prototype["_source"]["fmu"]["realization"]
1545
- del prototype["_source"]["_sumo"]
1546
- del prototype["_source"]["file"]
1547
- if "context" in prototype["_source"]["fmu"]:
1548
- prototype["_source"]["fmu"]["context"]["stage"] = "ensemble"
1549
- pass
1550
- prototype["_source"]["fmu"]["aggregation"] = {
1551
- "id": str(uuid.uuid4()),
1552
- "realization_ids": rids,
1553
- "operation": operation,
1554
- }
1555
1593
  if columns is not None:
1556
1594
  spec["columns"] = columns
1557
- cols = columns[:]
1558
- table_index = prototype["_source"]["data"].get("table_index")
1559
- if (
1560
- table_index is not None
1561
- and len(table_index) != 0
1562
- and table_index[0] not in cols
1563
- ):
1564
- cols.insert(0, table_index[0])
1565
- pass
1566
- prototype["_source"]["data"]["spec"]["columns"] = cols
1567
- pass
1595
+ return spec
1596
+
1597
+ def _aggregate(self, columns=None, operation=None) -> objects.Child:
1598
+ caseuuid, classname, entityuuid, ensemblename = (
1599
+ self._verify_aggregation_operation(columns, operation)
1600
+ )
1601
+ spec = self.__prepare_aggregation_spec(
1602
+ caseuuid, classname, entityuuid, ensemblename, operation, columns
1603
+ )
1604
+ spec["object_ids"] = self.uuids
1568
1605
  try:
1569
1606
  res = self._sumo.post("/aggregations", json=spec)
1570
1607
  except httpx.HTTPStatusError as ex:
1571
1608
  print(ex.response.reason_phrase)
1572
1609
  print(ex.response.text)
1573
1610
  raise ex
1574
- blob = BytesIO(res.content)
1575
- res = self._to_sumo(prototype, blob)
1576
- assert isinstance(res, objects.Child)
1577
- res._blob = blob
1578
- return res
1611
+ res = self._sumo.poll(res).json()
1612
+ return self._to_sumo(res)
1579
1613
 
1580
1614
  def aggregate(self, columns=None, operation=None) -> objects.Child:
1581
1615
  if len(self.hidden) > 0:
@@ -1586,103 +1620,39 @@ class SearchContext:
1586
1620
  )
1587
1621
 
1588
1622
  async def _verify_aggregation_operation_async(
1589
- self,
1590
- ) -> Tuple[Dict, List[str], List[int]]:
1591
- query = {
1592
- "query": self._query,
1593
- "size": 1,
1594
- "track_total_hits": True,
1595
- "aggs": {
1596
- k: {"terms": {"field": k + ".keyword", "size": 1}}
1597
- for k in [
1598
- "fmu.case.uuid",
1599
- "class",
1600
- "fmu.ensemble.name",
1601
- "data.name",
1602
- "data.tagname",
1603
- "data.content",
1604
- ]
1605
- },
1606
- }
1607
- sres = (await self._sumo.post_async("/search", json=query)).json()
1608
- if len(sres["hits"]["hits"]) == 0:
1609
- raise Exception("No matching realizations found.")
1610
- prototype = sres["hits"]["hits"][0]
1611
- conflicts = [
1612
- k
1613
- for (k, v) in sres["aggregations"].items()
1614
- if (
1615
- ("sum_other_doc_count" in v) and (v["sum_other_doc_count"] > 0)
1616
- )
1617
- ]
1618
- if len(conflicts) > 0:
1619
- raise Exception(f"Conflicting values for {conflicts}")
1620
-
1621
- hits = await self._search_all_async(select=["fmu.realization.id"])
1622
-
1623
- if any(
1624
- hit["_source"]["fmu"].get("realization") is None for hit in hits
1625
- ):
1626
- raise Exception("Selection contains non-realization data.")
1627
-
1628
- uuids = [hit["_id"] for hit in hits]
1629
- rids = [hit["_source"]["fmu"]["realization"]["id"] for hit in hits]
1630
- return prototype, uuids, rids
1631
-
1632
- async def _aggregate_async(
1633
- self, columns=None, operation=None
1634
- ) -> objects.Child:
1623
+ self, columns, operation
1624
+ ) -> Tuple[str, str, str, str]:
1635
1625
  assert (
1636
1626
  operation != "collection"
1637
1627
  or columns is not None
1638
1628
  and len(columns) == 1
1639
1629
  ), "Exactly one column required for collection aggregation."
1630
+ sc = self if columns is None else self.filter(column=columns)
1631
+ query = sc.__prepare_verify_aggregation_query()
1632
+ sres = (await self._sumo.post_async("/search", json=query)).json()
1633
+ return sc.__verify_aggregation_operation(sres)
1634
+
1635
+ async def _aggregate_async(
1636
+ self, columns=None, operation=None
1637
+ ) -> objects.Child:
1640
1638
  (
1641
- prototype,
1642
- uuids,
1643
- rids,
1644
- ) = await self.filter(
1645
- column=columns
1646
- )._verify_aggregation_operation_async()
1647
- spec = {
1648
- "object_ids": uuids,
1649
- "operations": [operation],
1650
- }
1651
- del prototype["_source"]["fmu"]["realization"]
1652
- del prototype["_source"]["_sumo"]
1653
- del prototype["_source"]["file"]
1654
- if "context" in prototype["_source"]["fmu"]:
1655
- prototype["_source"]["fmu"]["context"]["stage"] = "ensemble"
1656
- pass
1657
- prototype["_source"]["fmu"]["aggregation"] = {
1658
- "id": str(uuid.uuid4()),
1659
- "realization_ids": rids,
1660
- "operation": operation,
1661
- }
1662
- if columns is not None:
1663
- spec["columns"] = columns
1664
- cols = columns[:]
1665
- table_index = prototype["_source"]["data"].get("table_index")
1666
- if (
1667
- table_index is not None
1668
- and len(table_index) != 0
1669
- and table_index[0] not in cols
1670
- ):
1671
- cols.insert(0, table_index[0])
1672
- pass
1673
- prototype["_source"]["data"]["spec"]["columns"] = cols
1674
- pass
1639
+ caseuuid,
1640
+ classname,
1641
+ entityuuid,
1642
+ ensemblename,
1643
+ ) = await self._verify_aggregation_operation_async(columns, operation)
1644
+ spec = self.__prepare_aggregation_spec(
1645
+ caseuuid, classname, entityuuid, ensemblename, operation, columns
1646
+ )
1647
+ spec["object_ids"] = await self.uuids_async
1675
1648
  try:
1676
1649
  res = await self._sumo.post_async("/aggregations", json=spec)
1677
1650
  except httpx.HTTPStatusError as ex:
1678
1651
  print(ex.response.reason_phrase)
1679
1652
  print(ex.response.text)
1680
1653
  raise ex
1681
- blob = BytesIO(res.content)
1682
- res = self._to_sumo(prototype, blob)
1683
- assert isinstance(res, objects.Child)
1684
- res._blob = blob
1685
- return res
1654
+ res = self._sumo.poll(res).json()
1655
+ return self._to_sumo(res)
1686
1656
 
1687
1657
  async def aggregate_async(
1688
1658
  self, columns=None, operation=None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fmu-sumo
3
- Version: 2.4.3
3
+ Version: 2.4.5
4
4
  Summary: Python package for interacting with Sumo in an FMU setting
5
5
  Author: Equinor
6
6
  License: Apache License
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes