fmu-sumo 2.5.0__tar.gz → 2.6.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. {fmu_sumo-2.5.0/src/fmu_sumo.egg-info → fmu_sumo-2.6.1}/PKG-INFO +1 -1
  2. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/_version.py +3 -3
  3. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/_metrics.py +53 -0
  4. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/_search_context.py +185 -29
  5. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/polygons.py +10 -2
  6. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1/src/fmu_sumo.egg-info}/PKG-INFO +1 -1
  7. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/.github/pull_request_template.md +0 -0
  8. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/.github/workflows/build_docs.yaml +0 -0
  9. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/.github/workflows/check_formatting.yml +0 -0
  10. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/.github/workflows/publish_release.yaml +0 -0
  11. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/.github/workflows/run_tests.yaml +0 -0
  12. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/.gitignore +0 -0
  13. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/.pre-commit-config.yaml +0 -0
  14. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/.readthedocs.yml +0 -0
  15. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/CONTRIBUTING.md +0 -0
  16. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/LICENSE +0 -0
  17. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/README.md +0 -0
  18. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/SECURITY.md +0 -0
  19. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/docs/_static/equinor-logo.png +0 -0
  20. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/docs/_static/equinor-logo2.jpg +0 -0
  21. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/docs/_static/equinor_logo.jpg +0 -0
  22. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/docs/_static/equinor_logo_only.jpg +0 -0
  23. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/docs/_templates/layout.html +0 -0
  24. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/docs/conf.py +0 -0
  25. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/docs/explorer.rst +0 -0
  26. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/docs/index.rst +0 -0
  27. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/examples/explorer.ipynb +0 -0
  28. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/examples/explorer2.ipynb +0 -0
  29. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/examples/grids-and-properties.ipynb +0 -0
  30. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/examples/metrics.ipynb +0 -0
  31. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/examples/table-aggregation.ipynb +0 -0
  32. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/pyproject.toml +0 -0
  33. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/setup.cfg +0 -0
  34. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/__init__.py +0 -0
  35. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/__init__.py +0 -0
  36. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/__init__.py +0 -0
  37. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/cache.py +0 -0
  38. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/explorer.py +0 -0
  39. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/filters.py +0 -0
  40. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/__init__.py +0 -0
  41. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/_child.py +0 -0
  42. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/_document.py +0 -0
  43. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/case.py +0 -0
  44. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/cases.py +0 -0
  45. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/cpgrid.py +0 -0
  46. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/cpgrid_property.py +0 -0
  47. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/cube.py +0 -0
  48. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/dictionary.py +0 -0
  49. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/ensemble.py +0 -0
  50. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/ensembles.py +0 -0
  51. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/iteration.py +0 -0
  52. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/iterations.py +0 -0
  53. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/realization.py +0 -0
  54. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/realizations.py +0 -0
  55. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/surface.py +0 -0
  56. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/objects/table.py +0 -0
  57. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu/sumo/explorer/timefilter.py +0 -0
  58. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu_sumo.egg-info/SOURCES.txt +0 -0
  59. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu_sumo.egg-info/dependency_links.txt +0 -0
  60. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu_sumo.egg-info/requires.txt +0 -0
  61. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/src/fmu_sumo.egg-info/top_level.txt +0 -0
  62. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/tests/conftest.py +0 -0
  63. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/tests/context.py +0 -0
  64. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/tests/test_explorer.py +0 -0
  65. {fmu_sumo-2.5.0 → fmu_sumo-2.6.1}/tests/test_objects_table.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fmu-sumo
3
- Version: 2.5.0
3
+ Version: 2.6.1
4
4
  Summary: Python package for interacting with Sumo in an FMU setting
5
5
  Author: Equinor
6
6
  License: Apache License
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '2.5.0'
32
- __version_tuple__ = version_tuple = (2, 5, 0)
31
+ __version__ = version = '2.6.1'
32
+ __version_tuple__ = version_tuple = (2, 6, 1)
33
33
 
34
- __commit_id__ = commit_id = 'g2782fab59'
34
+ __commit_id__ = commit_id = 'gcd5b68d28'
@@ -272,3 +272,56 @@ class Metrics:
272
272
  "percentiles", field=field, percents=percents
273
273
  )
274
274
  )["values"]
275
+
276
+ def _fnv1a_script(self, field):
277
+ return {
278
+ "init_script": """
279
+ state.h = state.count = state.total = 0L;
280
+ """,
281
+ "map_script": f"""
282
+ state.total++;
283
+ if (doc['{field}'].size() == 0) return;
284
+ def s = doc.get('{field}').value;
285
+ long h = -3750763034362895579L;
286
+ for (int i = 0; i < s.length(); i++) {{
287
+ h ^= (long) s.charAt(i);
288
+ h *= 1099511628211L;
289
+ }}
290
+ state.h ^= h;
291
+ state.count++;
292
+ """,
293
+ "combine_script": """
294
+ return state;
295
+ """,
296
+ "reduce_script": """
297
+ long h = 0, c = 0, t = 0;
298
+ for (st in states) {
299
+ h ^= st.h; c += st.count; t += st.total
300
+ }
301
+ return ['checksum': Long.toHexString(h), 'docs_in_checksum': c, 'docs_total': t];
302
+ """,
303
+ }
304
+
305
+ def fnv1a(self, field):
306
+ """Compute the 64-bit FNV-1a checksum for field over the current set of objects.
307
+
308
+ Arguments:
309
+ - field (str): the name of a property in the metadata.
310
+
311
+ Returns:
312
+ - a dict with the keys "docs_all", "docs_seen" and "xor_fnv64_hex".
313
+ """
314
+ return self._aggregate("scripted_metric", **self._fnv1a_script(field))
315
+
316
+ async def fnv1a_async(self, field):
317
+ """Compute the 64-bit FNV-1a checksum for field over the current set of objects.
318
+
319
+ Arguments:
320
+ - field (str): the name of a property in the metadata.
321
+
322
+ Returns:
323
+ - a dict with the keys "docs_all", "docs_seen" and "xor_fnv64_hex".
324
+ """
325
+ return await self._aggregate_async(
326
+ "scripted_metric", **self._fnv1a_script(field)
327
+ )
@@ -929,6 +929,26 @@ class SearchContext:
929
929
  """
930
930
  return self.get_field_values(field)
931
931
 
932
+ def match_field_values(self, field: str, patterns: list[str]) -> list[str]:
933
+ query = {
934
+ "query": self._query,
935
+ "size": 0,
936
+ "aggs": {
937
+ "values": {
938
+ "terms": {
939
+ "field": field,
940
+ "include": "|".join(patterns),
941
+ "size": 1000,
942
+ }
943
+ }
944
+ },
945
+ }
946
+ res = self._sumo.post("/search", json=query).json()
947
+ return [
948
+ bucket["key"]
949
+ for bucket in res["aggregations"]["values"]["buckets"]
950
+ ]
951
+
932
952
  async def get_field_values_and_counts_async(
933
953
  self, field: str
934
954
  ) -> Dict[str, int]:
@@ -978,6 +998,28 @@ class SearchContext:
978
998
  """
979
999
  return await self.get_field_values_async(field)
980
1000
 
1001
+ async def match_field_values_async(
1002
+ self, field: str, patterns: list[str]
1003
+ ) -> list[str]:
1004
+ query = {
1005
+ "query": self._query,
1006
+ "size": 0,
1007
+ "aggs": {
1008
+ "values": {
1009
+ "terms": {
1010
+ "field": field,
1011
+ "include": "|".join(patterns),
1012
+ "size": 1000,
1013
+ }
1014
+ }
1015
+ },
1016
+ }
1017
+ res = (await self._sumo.post_async("/search", json=query)).json()
1018
+ return [
1019
+ bucket["key"]
1020
+ for bucket in res["aggregations"]["values"]["buckets"]
1021
+ ]
1022
+
981
1023
  _timestamp_query = {
982
1024
  "bool": {
983
1025
  "must": [{"exists": {"field": "data.time.t0"}}],
@@ -1475,7 +1517,7 @@ class SearchContext:
1475
1517
 
1476
1518
  def __verify_aggregation_operation(
1477
1519
  self, sres
1478
- ) -> Tuple[str, str, str, str]:
1520
+ ) -> Tuple[str, str, str, str, str]:
1479
1521
  tot_hits = sres["hits"]["total"]["value"]
1480
1522
  if tot_hits == 0:
1481
1523
  raise Exception("No matching realizations found.")
@@ -1502,18 +1544,38 @@ class SearchContext:
1502
1544
  "key"
1503
1545
  ]
1504
1546
  classname = sres["aggregations"]["class"]["buckets"][0]["key"]
1505
- return caseuuid, classname, entityuuid, ensemblename
1547
+
1548
+ return caseuuid, classname, entityuuid, ensemblename, tot_hits
1506
1549
 
1507
1550
  def _verify_aggregation_operation(
1508
1551
  self, columns
1509
1552
  ) -> Tuple[str, str, str, str]:
1510
- assert columns is None or len(columns) == 1, (
1511
- "Exactly one column required for collection aggregation."
1512
- )
1513
1553
  sc = self if columns is None else self.filter(column=columns)
1514
1554
  query = sc.__prepare_verify_aggregation_query()
1515
1555
  sres = sc._sumo.post("/search", json=query).json()
1516
- return sc.__verify_aggregation_operation(sres)
1556
+ caseuuid, classname, entityuuid, ensemblename, tot_hits = (
1557
+ sc.__verify_aggregation_operation(sres)
1558
+ )
1559
+
1560
+ if (
1561
+ classname != "surface"
1562
+ and isinstance(columns, list)
1563
+ and len(columns) == 1
1564
+ ):
1565
+ sc = SearchContext(
1566
+ sumo=self._sumo,
1567
+ ).filter(
1568
+ cls=classname,
1569
+ realization=True,
1570
+ entity=entityuuid,
1571
+ ensemble=ensemblename,
1572
+ )
1573
+
1574
+ if len(sc) != tot_hits:
1575
+ raise Exception(
1576
+ "Filtering on realization is not allowed for table and parameter aggregation."
1577
+ )
1578
+ return caseuuid, classname, entityuuid, ensemblename
1517
1579
 
1518
1580
  def __prepare_aggregation_spec(
1519
1581
  self, caseuuid, classname, entityuuid, ensemblename, operation, columns
@@ -1530,7 +1592,9 @@ class SearchContext:
1530
1592
  spec["columns"] = columns
1531
1593
  return spec
1532
1594
 
1533
- def _aggregate(self, columns=None, operation=None) -> objects.Child:
1595
+ def _aggregate(
1596
+ self, columns=None, operation=None, no_wait=False
1597
+ ) -> objects.Child | httpx.Response:
1534
1598
  caseuuid, classname, entityuuid, ensemblename = (
1535
1599
  self._verify_aggregation_operation(columns)
1536
1600
  )
@@ -1544,30 +1608,85 @@ class SearchContext:
1544
1608
  print(ex.response.reason_phrase)
1545
1609
  print(ex.response.text)
1546
1610
  raise ex
1611
+ if no_wait:
1612
+ return res
1613
+ # ELSE
1547
1614
  res = self._sumo.poll(res).json()
1548
1615
  return self._to_sumo(res)
1549
1616
 
1550
- def aggregate(self, columns=None, operation=None) -> objects.Child:
1617
+ def aggregate(
1618
+ self, columns=None, operation=None, no_wait=False
1619
+ ) -> objects.Child | httpx.Response:
1620
+ assert columns is None or len(columns) == 1, (
1621
+ "Exactly one column required for collection aggregation."
1622
+ )
1551
1623
  sc = self.filter(realization=True, column=columns)
1552
1624
  if len(sc.hidden) > 0:
1553
- return sc.hidden._aggregate(columns=columns, operation=operation)
1554
- else:
1555
- return sc.visible._aggregate(columns=columns, operation=operation)
1625
+ sc = sc.hidden
1626
+ return sc._aggregate(
1627
+ columns=columns, operation=operation, no_wait=no_wait
1628
+ )
1629
+
1630
+ def batch_aggregate(self, columns=None, operation=None, no_wait=False):
1631
+ """Aggregate one or more columns for the current context.
1632
+
1633
+ Args:
1634
+ columns: list of column names or regular expressions for column names.
1635
+ operation: must be "collection"
1636
+ no_wait: set to True if the client handles polling itself.
1637
+
1638
+ Returns:
1639
+ list of column names that occur in the current context and match the names/patterns.
1640
+ """
1641
+ assert operation == "collection"
1642
+ assert type(columns) is list and len(columns) > 0
1643
+ assert len(columns) < 1000, (
1644
+ "Maximum 1000 columns allowed for a single call to batch_aggregate."
1645
+ )
1646
+ sc = self.filter(realization=True, column=columns)
1647
+ if len(sc.hidden) > 0:
1648
+ sc = sc.hidden
1649
+ res = sc._aggregate(columns=columns, operation=operation, no_wait=True)
1650
+ assert type(res) is httpx.Response
1651
+ if no_wait:
1652
+ return res
1653
+ # ELSE
1654
+ return self._sumo.poll(res)
1556
1655
 
1557
1656
  async def _verify_aggregation_operation_async(
1558
1657
  self, columns
1559
1658
  ) -> Tuple[str, str, str, str]:
1560
- assert columns is None or len(columns) == 1, (
1561
- "Exactly one column required for collection aggregation."
1562
- )
1563
1659
  sc = self if columns is None else self.filter(column=columns)
1564
1660
  query = sc.__prepare_verify_aggregation_query()
1565
1661
  sres = (await self._sumo.post_async("/search", json=query)).json()
1566
- return sc.__verify_aggregation_operation(sres)
1662
+ caseuuid, classname, entityuuid, ensemblename, tot_hits = (
1663
+ sc.__verify_aggregation_operation(sres)
1664
+ )
1665
+
1666
+ if (
1667
+ classname != "surface"
1668
+ and isinstance(columns, list)
1669
+ and len(columns) == 1
1670
+ ):
1671
+ sc = SearchContext(
1672
+ sumo=self._sumo,
1673
+ ).filter(
1674
+ cls=classname,
1675
+ realization=True,
1676
+ entity=entityuuid,
1677
+ ensemble=ensemblename,
1678
+ )
1679
+
1680
+ tot_reals = await sc.length_async()
1681
+ if tot_reals != tot_hits:
1682
+ raise Exception(
1683
+ "Filtering on realization is not allowed for table and parameter aggregation."
1684
+ )
1685
+ return caseuuid, classname, entityuuid, ensemblename
1567
1686
 
1568
1687
  async def _aggregate_async(
1569
- self, columns=None, operation=None
1570
- ) -> objects.Child:
1688
+ self, columns=None, operation=None, no_wait=False
1689
+ ) -> objects.Child | httpx.Response:
1571
1690
  (
1572
1691
  caseuuid,
1573
1692
  classname,
@@ -1584,24 +1703,59 @@ class SearchContext:
1584
1703
  print(ex.response.reason_phrase)
1585
1704
  print(ex.response.text)
1586
1705
  raise ex
1706
+ if no_wait:
1707
+ return res
1708
+ # ELSE
1587
1709
  res = (await self._sumo.poll_async(res)).json()
1588
1710
  return self._to_sumo(res)
1589
1711
 
1590
1712
  async def aggregate_async(
1591
- self, columns=None, operation=None
1592
- ) -> objects.Child:
1713
+ self, columns=None, operation=None, no_wait=False
1714
+ ) -> objects.Child | httpx.Response:
1715
+ assert columns is None or len(columns) == 1, (
1716
+ "Exactly one column required for collection aggregation."
1717
+ )
1593
1718
  sc = self.filter(realization=True, column=columns)
1594
1719
  length_hidden = await sc.hidden.length_async()
1595
1720
  if length_hidden > 0:
1596
- return await sc.hidden._aggregate_async(
1597
- columns=columns, operation=operation
1598
- )
1599
- else:
1600
- return await sc.visible._aggregate_async(
1601
- columns=columns, operation=operation
1602
- )
1721
+ sc = sc.hidden
1722
+ return await sc._aggregate_async(
1723
+ columns=columns, operation=operation, no_wait=no_wait
1724
+ )
1725
+
1726
+ async def batch_aggregate_async(
1727
+ self, columns=None, operation=None, no_wait=False
1728
+ ):
1729
+ """Aggregate one or more columns for the current context.
1730
+
1731
+ Args:
1732
+ columns: list of column names or regular expressions for column names.
1733
+ operation: must be "collection"
1734
+ no_wait: set to True if the client handles polling itself.
1735
+
1736
+ Returns:
1737
+ list of column names that occur in the current context and match the names/patterns.
1738
+ """
1739
+ assert operation == "collection"
1740
+ assert type(columns) is list and len(columns) > 0
1741
+ assert len(columns) < 1000, (
1742
+ "Maximum 1000 columns allowed for a single call to batch_aggregate_async."
1743
+ )
1744
+ sc = self.filter(realization=True, column=columns)
1745
+ if len(sc.hidden) > 0:
1746
+ sc = sc.hidden
1747
+ res = await sc._aggregate_async(
1748
+ columns=columns, operation=operation, no_wait=True
1749
+ )
1750
+ assert type(res) is httpx.Response
1751
+ if no_wait:
1752
+ return res
1753
+ # ELSE
1754
+ return await self._sumo.poll_async(res)
1603
1755
 
1604
- def aggregation(self, column=None, operation=None) -> objects.Child:
1756
+ def aggregation(
1757
+ self, column=None, operation=None, no_wait=False
1758
+ ) -> objects.Child | httpx.Response:
1605
1759
  assert operation is not None
1606
1760
  assert column is None or isinstance(column, str)
1607
1761
  sc = self.filter(aggregation=operation, column=column)
@@ -1623,11 +1777,12 @@ class SearchContext:
1623
1777
  return self.filter(realization=True).aggregate(
1624
1778
  columns=[column] if column is not None else None,
1625
1779
  operation=operation,
1780
+ no_wait=no_wait,
1626
1781
  )
1627
1782
 
1628
1783
  async def aggregation_async(
1629
- self, column=None, operation=None
1630
- ) -> objects.Child:
1784
+ self, column=None, operation=None, no_wait=False
1785
+ ) -> objects.Child | httpx.Response:
1631
1786
  assert operation is not None
1632
1787
  assert column is None or isinstance(column, str)
1633
1788
  sc = self.filter(aggregation=operation, column=column)
@@ -1652,6 +1807,7 @@ class SearchContext:
1652
1807
  return await self.filter(realization=True).aggregate_async(
1653
1808
  columns=[column] if column is not None else None,
1654
1809
  operation=operation,
1810
+ no_wait=no_wait,
1655
1811
  )
1656
1812
 
1657
1813
  @deprecation.deprecated(
@@ -28,7 +28,11 @@ class Polygons(Child):
28
28
  import pandas as pd
29
29
 
30
30
  try:
31
- return pd.read_csv(self.blob)
31
+ if self.format == "csv":
32
+ return pd.read_csv(self.blob)
33
+ if self.format == "parquet":
34
+ return pd.read_parquet(self.blob)
35
+ raise TypeError()
32
36
  except TypeError as type_err:
33
37
  raise TypeError(f"Unknown format: {self.format}") from type_err
34
38
 
@@ -42,6 +46,10 @@ class Polygons(Child):
42
46
  import pandas as pd
43
47
 
44
48
  try:
45
- return pd.read_csv(await self.blob_async)
49
+ if self.format == "csv":
50
+ return pd.read_csv(await self.blob_async)
51
+ if self.format == "parquet":
52
+ return pd.read_parquet(await self.blob_async)
53
+ raise TypeError()
46
54
  except TypeError as type_err:
47
55
  raise TypeError(f"Unknown format: {self.format}") from type_err
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fmu-sumo
3
- Version: 2.5.0
3
+ Version: 2.6.1
4
4
  Summary: Python package for interacting with Sumo in an FMU setting
5
5
  Author: Equinor
6
6
  License: Apache License
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes