resqpy 4.14.2__py3-none-any.whl → 5.1.6__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (67) hide show
  1. resqpy/__init__.py +1 -1
  2. resqpy/fault/_gcs_functions.py +10 -10
  3. resqpy/fault/_grid_connection_set.py +277 -113
  4. resqpy/grid/__init__.py +2 -3
  5. resqpy/grid/_defined_geometry.py +3 -3
  6. resqpy/grid/_extract_functions.py +8 -2
  7. resqpy/grid/_grid.py +95 -12
  8. resqpy/grid/_grid_types.py +22 -7
  9. resqpy/grid/_points_functions.py +1 -1
  10. resqpy/grid/_regular_grid.py +6 -2
  11. resqpy/grid_surface/__init__.py +17 -38
  12. resqpy/grid_surface/_blocked_well_populate.py +5 -5
  13. resqpy/grid_surface/_find_faces.py +1413 -253
  14. resqpy/lines/_polyline.py +24 -33
  15. resqpy/model/_catalogue.py +9 -0
  16. resqpy/model/_forestry.py +18 -14
  17. resqpy/model/_hdf5.py +11 -3
  18. resqpy/model/_model.py +85 -10
  19. resqpy/model/_xml.py +38 -13
  20. resqpy/multi_processing/wrappers/grid_surface_mp.py +92 -37
  21. resqpy/olio/read_nexus_fault.py +8 -2
  22. resqpy/olio/relperm.py +1 -1
  23. resqpy/olio/transmission.py +8 -8
  24. resqpy/olio/triangulation.py +36 -30
  25. resqpy/olio/vector_utilities.py +340 -6
  26. resqpy/olio/volume.py +0 -20
  27. resqpy/olio/wellspec_keywords.py +19 -13
  28. resqpy/olio/write_hdf5.py +1 -1
  29. resqpy/olio/xml_et.py +12 -0
  30. resqpy/property/__init__.py +6 -4
  31. resqpy/property/_collection_add_part.py +4 -3
  32. resqpy/property/_collection_create_xml.py +4 -2
  33. resqpy/property/_collection_get_attributes.py +4 -0
  34. resqpy/property/attribute_property_set.py +311 -0
  35. resqpy/property/grid_property_collection.py +11 -11
  36. resqpy/property/property_collection.py +79 -31
  37. resqpy/property/property_common.py +3 -8
  38. resqpy/rq_import/_add_surfaces.py +34 -14
  39. resqpy/rq_import/_grid_from_cp.py +2 -2
  40. resqpy/rq_import/_import_nexus.py +75 -48
  41. resqpy/rq_import/_import_vdb_all_grids.py +64 -52
  42. resqpy/rq_import/_import_vdb_ensemble.py +12 -13
  43. resqpy/surface/_mesh.py +4 -0
  44. resqpy/surface/_surface.py +593 -118
  45. resqpy/surface/_tri_mesh.py +13 -10
  46. resqpy/surface/_tri_mesh_stencil.py +4 -4
  47. resqpy/surface/_triangulated_patch.py +71 -51
  48. resqpy/time_series/_any_time_series.py +7 -4
  49. resqpy/time_series/_geologic_time_series.py +1 -1
  50. resqpy/unstructured/_hexa_grid.py +6 -2
  51. resqpy/unstructured/_prism_grid.py +13 -5
  52. resqpy/unstructured/_pyramid_grid.py +6 -2
  53. resqpy/unstructured/_tetra_grid.py +6 -2
  54. resqpy/unstructured/_unstructured_grid.py +6 -2
  55. resqpy/well/_blocked_well.py +1986 -1946
  56. resqpy/well/_deviation_survey.py +3 -3
  57. resqpy/well/_md_datum.py +11 -21
  58. resqpy/well/_trajectory.py +10 -5
  59. resqpy/well/_wellbore_frame.py +10 -2
  60. resqpy/well/blocked_well_frame.py +3 -3
  61. resqpy/well/well_object_funcs.py +7 -9
  62. resqpy/well/well_utils.py +33 -0
  63. {resqpy-4.14.2.dist-info → resqpy-5.1.6.dist-info}/METADATA +8 -9
  64. {resqpy-4.14.2.dist-info → resqpy-5.1.6.dist-info}/RECORD +66 -66
  65. {resqpy-4.14.2.dist-info → resqpy-5.1.6.dist-info}/WHEEL +1 -1
  66. resqpy/grid/_moved_functions.py +0 -15
  67. {resqpy-4.14.2.dist-info → resqpy-5.1.6.dist-info}/LICENSE +0 -0
@@ -85,10 +85,10 @@ class PropertyCollection():
85
85
  self.realization = realization # model realization number within an ensemble
86
86
  self.null_value = None
87
87
  self.imported_list = []
88
- # above is list of (uuid, file_name, keyword, cached_name, discrete, uom, time_index, null_value,
88
+ # above is list of (uuid, source, keyword, cached_name, discrete, uom, time_index, null_value,
89
89
  # min_value, max_value, property_kind, facet_type, facet, realization,
90
90
  # indexable_element, count, local_property_kind_uuid, const_value, points,
91
- # time_series_uuid, string_lookup_uuid)
91
+ # time_series_uuid, string_lookup_uuid, pre_packed)
92
92
  self.guess_warning = False
93
93
  if support is not None:
94
94
  self.model = support.model
@@ -141,20 +141,28 @@ class PropertyCollection():
141
141
  else:
142
142
  pcs._set_support_uuid_notnone(self, support, support_uuid, model, modify_parts)
143
143
 
144
- def supporting_shape(self, indexable_element = None, direction = None):
144
+ def supporting_shape(self,
145
+ indexable_element = None,
146
+ direction = None,
147
+ count = 1,
148
+ points = False,
149
+ pre_packed = False):
145
150
  """Return the shape of the supporting representation with respect to the given indexable element
146
151
 
147
152
  arguments:
148
153
  indexable_element (string, optional): if None, a hard-coded default depending on the supporting representation class
149
154
  will be used
150
155
  direction (string, optional): must be passed if required for the combination of support class and indexable element;
151
- currently only used for Grid faces.
156
+ currently only used for Grid faces
157
+ count (int, default 1): the count parameter for the property
158
+ points (bool, default False): set True if the property is a points property
159
+ pre_packed (bool, default False): set True if the required shape is for a pre-packed boolean property
152
160
 
153
161
  returns:
154
162
  list of int, being required shape of numpy array, or None if not coded for
155
163
 
156
164
  note:
157
- individual property arrays will only match this shape if they have the same indexable element and a count of one
165
+ individual property arrays will only match this shape if they have the same indexable element and matching count etc.
158
166
  """
159
167
 
160
168
  # when at global level was causing circular reference loading issues as grid imports this module
@@ -204,6 +212,14 @@ class PropertyCollection():
204
212
  else:
205
213
  raise Exception(f'unsupported support class {type(support)} for property')
206
214
 
215
+ if pre_packed:
216
+ shape_list[-1] = (shape_list[-1] - 1) // 8 + 1
217
+
218
+ if shape_list is not None:
219
+ if count > 1:
220
+ shape_list.append(count)
221
+ if points:
222
+ shape_list.append(3)
207
223
  return shape_list
208
224
 
209
225
  def populate_from_property_set(self, property_set_root):
@@ -404,7 +420,8 @@ class PropertyCollection():
404
420
  call this method once for each group of differently sized properties; for very large collections
405
421
  it might also be necessary to divide the work into smaller groups to reduce memory usage;
406
422
  this method does not write to hdf5 nor create xml – use the usual methods for further processing
407
- of the imported list
423
+ of the imported list;
424
+ does not currently support packed arrays
408
425
  """
409
426
 
410
427
  source = 'sampled'
@@ -743,7 +760,8 @@ class PropertyCollection():
743
760
  title = None,
744
761
  title_mode = None,
745
762
  related_uuid = None,
746
- const_value = None):
763
+ const_value = None,
764
+ extra = None):
747
765
  """Returns a single part selected by those arguments which are not None.
748
766
 
749
767
  multiple_handling (string, default 'exception'): one of 'exception', 'none', 'first', 'oldest', 'newest'
@@ -782,7 +800,8 @@ class PropertyCollection():
782
800
  title = title,
783
801
  title_mode = title_mode,
784
802
  related_uuid = related_uuid,
785
- const_value = const_value)
803
+ const_value = const_value,
804
+ extra = extra)
786
805
  parts_list = temp_collection.parts()
787
806
  if len(parts_list) == 0:
788
807
  return None
@@ -815,7 +834,8 @@ class PropertyCollection():
815
834
  title = None,
816
835
  title_mode = None,
817
836
  related_uuid = None,
818
- use_pack = True):
837
+ use_pack = True,
838
+ extra = None):
819
839
  """Returns the array of data for a single part selected by those arguments which are not None.
820
840
 
821
841
  arguments:
@@ -832,7 +852,7 @@ class PropertyCollection():
832
852
 
833
853
  Other optional arguments:
834
854
  realization, support_uuid, continuous, points, count, indexable, property_kind, facet_type, facet,
835
- citation_title, time_series_uuid, time_index, uom, string_lookup_id, categorical, related_uuid:
855
+ citation_title, time_series_uuid, time_index, uom, string_lookup_id, categorical, related_uuid, extra:
836
856
 
837
857
  For each of these arguments: if None, then all members of collection pass this filter;
838
858
  if not None then only those members with the given value pass this filter;
@@ -868,7 +888,8 @@ class PropertyCollection():
868
888
  multiple_handling = multiple_handling,
869
889
  title = title,
870
890
  title_mode = title_mode,
871
- related_uuid = related_uuid)
891
+ related_uuid = related_uuid,
892
+ extra = extra)
872
893
  if part is None:
873
894
  return None
874
895
  return self.cached_part_array_ref(part,
@@ -1085,17 +1106,17 @@ class PropertyCollection():
1085
1106
  return meta
1086
1107
 
1087
1108
  def null_value_for_part(self, part):
1088
- """Returns the null value for the (discrete) property part; np.NaN for continuous parts.
1109
+ """Returns the null value for the (discrete) property part; np.nan for continuous parts.
1089
1110
 
1090
1111
  arguments:
1091
1112
  part (string): the part name for which the null value is required
1092
1113
 
1093
1114
  returns:
1094
- int or np.NaN
1115
+ int or np.nan
1095
1116
  """
1096
1117
 
1097
1118
  if self.continuous_for_part(part):
1098
- return np.NaN
1119
+ return np.nan
1099
1120
  return self.element_for_part(part, 19)
1100
1121
 
1101
1122
  def continuous_for_part(self, part):
@@ -1300,6 +1321,12 @@ class PropertyCollection():
1300
1321
 
1301
1322
  return [self.citation_title_for_part(p) for p in self.parts()]
1302
1323
 
1324
+ def source_for_part(self, part):
1325
+ """Returns the source string from the part's extra metadata, if present, else None."""
1326
+
1327
+ assert self.model is not None
1328
+ return self.model.source_for_part(part)
1329
+
1303
1330
  def time_series_uuid_for_part(self, part):
1304
1331
  """If the property has an associated time series (is not static), returns the uuid for the time series.
1305
1332
 
@@ -1677,7 +1704,7 @@ class PropertyCollection():
1677
1704
  exclude_inactive (boolean, default True): elements which are flagged as inactive in the supporting representation
1678
1705
  are masked out if this argument is True
1679
1706
  exclude_value (float or int, optional): if present, elements which match this value are masked out; if not None
1680
- then usually set to np.NaN for continuous data or null_value_for_part() for discrete data
1707
+ then usually set to np.nan for continuous data or null_value_for_part() for discrete data
1681
1708
  points (boolean, default False): if True, the simple array is expected to have an extra dimension of extent 3,
1682
1709
  relative to the inactive attribute of the support
1683
1710
 
@@ -1765,7 +1792,7 @@ class PropertyCollection():
1765
1792
  representation object with the attribute name 'inactive', to multiple properties (this will only work
1766
1793
  if the indexable element is set to the typical value for the class of supporting representation, eg.
1767
1794
  'cells' for grid objects); if exclude_null is set True then null value elements will also be masked out
1768
- (as long as masked is True); however, it is recommended simply to use np.NaN values in floating point
1795
+ (as long as masked is True); however, it is recommended simply to use np.nan values in floating point
1769
1796
  property arrays if the commonality is not needed;
1770
1797
  set use_pack True if the hdf5 data may have been written with a similar setting
1771
1798
 
@@ -1783,8 +1810,10 @@ class PropertyCollection():
1783
1810
  if masked:
1784
1811
  exclude_value = self.null_value_for_part(part) if exclude_null else None
1785
1812
  return self.masked_array(self.__dict__[cached_array_name], exclude_value = exclude_value)
1786
- else:
1813
+ elif dtype is None:
1787
1814
  return self.__dict__[cached_array_name]
1815
+ else:
1816
+ return self.__dict__[cached_array_name].astype(dtype)
1788
1817
 
1789
1818
  def h5_slice(self, part, slice_tuple):
1790
1819
  """Returns a subset of the array for part, without loading the whole array.
@@ -1860,7 +1889,7 @@ class PropertyCollection():
1860
1889
  shape = self.supporting_shape(indexable_element = self.indexable_for_part(part),
1861
1890
  direction = pcga._part_direction(self, part))
1862
1891
  assert shape is not None
1863
- return shape, (float if self.continuous_for_part(part) else int)
1892
+ return tuple(shape), (float if self.continuous_for_part(part) else int)
1864
1893
 
1865
1894
  h5_key_pair = self._shape_and_type_of_part_get_h5keypair(part, part_node, model)
1866
1895
  if h5_key_pair is None:
@@ -1942,7 +1971,7 @@ class PropertyCollection():
1942
1971
  the maximum realization number present and slices for any missing realizations will be filled with fill_value;
1943
1972
  if False, the extent of the first axis will only cpver the number pf realizations actually present (see also notes)
1944
1973
  fill_value (int or float, optional): the value to use for missing realization slices; if None, will default to
1945
- np.NaN if data is continuous, -1 otherwise; irrelevant if fill_missing is False
1974
+ np.nan if data is continuous, -1 otherwise; irrelevant if fill_missing is False
1946
1975
  indexable_element (string, optional): the indexable element for the properties in the collection; if None, will
1947
1976
  be determined from the data
1948
1977
 
@@ -1962,7 +1991,7 @@ class PropertyCollection():
1962
1991
  r_list, continuous = pcga._realizations_array_ref_initial_checks(self)
1963
1992
 
1964
1993
  if fill_value is None:
1965
- fill_value = np.NaN if continuous else -1
1994
+ fill_value = np.nan if continuous else -1
1966
1995
  if indexable_element is None:
1967
1996
  indexable_element = self.indexable_for_part(self.parts()[0])
1968
1997
 
@@ -1991,7 +2020,7 @@ class PropertyCollection():
1991
2020
  the maximum time index present and slices for any missing indices will be filled with fill_value; if False,
1992
2021
  the extent of the first axis will only cpver the number pf time indices actually present (see also notes)
1993
2022
  fill_value (int or float, optional): the value to use for missing time index slices; if None, will default to
1994
- np.NaN if data is continuous, -1 otherwise; irrelevant if fill_missing is False
2023
+ np.nan if data is continuous, -1 otherwise; irrelevant if fill_missing is False
1995
2024
  indexable_element (string, optional): the indexable element for the properties in the collection; if None, will
1996
2025
  be determined from the data
1997
2026
 
@@ -2012,7 +2041,7 @@ class PropertyCollection():
2012
2041
  ti_list, continuous = pcga._time_array_ref_initial_checks(self)
2013
2042
 
2014
2043
  if fill_value is None:
2015
- fill_value = np.NaN if continuous else -1
2044
+ fill_value = np.nan if continuous else -1
2016
2045
 
2017
2046
  if indexable_element is None:
2018
2047
  indexable_element = self.indexable_for_part(self.parts()[0])
@@ -2213,7 +2242,8 @@ class PropertyCollection():
2213
2242
  const_value = None,
2214
2243
  points = False,
2215
2244
  time_series_uuid = None,
2216
- string_lookup_uuid = None):
2245
+ string_lookup_uuid = None,
2246
+ pre_packed = False):
2217
2247
  """Caches array and adds to the list of imported properties (but not to the collection dict).
2218
2248
 
2219
2249
  arguments:
@@ -2244,6 +2274,7 @@ class PropertyCollection():
2244
2274
  be provided when writing hdf5 and creating xml for the imported list
2245
2275
  string_lookup_uuid (UUID, optional): should be provided for categorical properties, though can alternatively
2246
2276
  be specified when creating xml
2277
+ pre_packed (bool, default False): set to True if the property is boolean and the array is already packed
2247
2278
 
2248
2279
  returns:
2249
2280
  uuid of nascent property object
@@ -2265,6 +2296,7 @@ class PropertyCollection():
2265
2296
  assert (cached_array is not None and const_value is None) or (cached_array is None and const_value is not None)
2266
2297
  assert not points or not discrete
2267
2298
  assert count > 0
2299
+ assert (not pre_packed) or ((cached_array is not None) and (cached_array.dtype == np.uint8))
2268
2300
  rqp_c.check_and_warn_property_kind(property_kind, 'adding property to imported list')
2269
2301
 
2270
2302
  if self.imported_list is None:
@@ -2273,16 +2305,25 @@ class PropertyCollection():
2273
2305
  uuid = bu.new_uuid()
2274
2306
  cached_name = rqp_c._cache_name_for_uuid(uuid)
2275
2307
  if cached_array is not None:
2308
+ direction = facet if facet_type == 'direction' else None
2309
+ shape = self.supporting_shape(indexable_element = indexable_element,
2310
+ direction = direction,
2311
+ count = count,
2312
+ points = points,
2313
+ pre_packed = pre_packed)
2314
+ assert shape is not None, f'unsupported indexable element {indexable_element} for supporting representation'
2315
+ assert cached_array.shape == tuple(
2316
+ shape), f'property array has shape {cached_array.shape} when expecting {tuple(shape)}'
2276
2317
  min_value, max_value = pcga._min_max_of_cached_array(self, cached_name, cached_array, null_value, discrete)
2277
2318
  else:
2278
- if const_value == null_value or (not discrete and np.isnan(const_value)):
2319
+ if const_value == null_value or isinstance(const_value, bool) or (not discrete and np.isnan(const_value)):
2279
2320
  min_value = max_value = None
2280
2321
  else:
2281
2322
  min_value = max_value = const_value
2282
2323
  self.imported_list.append(
2283
2324
  (uuid, source_info, keyword, cached_name, discrete, uom, time_index, null_value, min_value, max_value,
2284
2325
  property_kind, facet_type, facet, realization, indexable_element, count, local_property_kind_uuid,
2285
- const_value, points, time_series_uuid, string_lookup_uuid))
2326
+ const_value, points, time_series_uuid, string_lookup_uuid, pre_packed))
2286
2327
  return uuid
2287
2328
 
2288
2329
  def add_similar_to_imported_list(self,
@@ -2305,6 +2346,7 @@ class PropertyCollection():
2305
2346
  points = None,
2306
2347
  time_series_uuid = None,
2307
2348
  string_lookup_uuid = None,
2349
+ pre_packed = False,
2308
2350
  similar_model = None,
2309
2351
  title = None):
2310
2352
  """Caches array and adds to the list of imported properties using default metadata from a similar property.
@@ -2336,6 +2378,7 @@ class PropertyCollection():
2336
2378
  be provided when writing hdf5 and creating xml for the imported list
2337
2379
  string_lookup_uuid (UUID, optional): should be provided for categorical properties, though can alternatively
2338
2380
  be specified when creating xml
2381
+ pre_packed (bool, default False): set to True if the property is boolean and the cached array is packed
2339
2382
  similar_model (Model, optional): the model where the similar property resides, if not the same as this
2340
2383
  property collection
2341
2384
  title (str, optional): synonym for keyword argument
@@ -2392,6 +2435,7 @@ class PropertyCollection():
2392
2435
  args['string_lookup_uuid'] = get_arg(time_series_uuid, similar.string_lookup_uuid())
2393
2436
  em = similar.extra_metadata if hasattr(similar, 'extra_metadata') else {}
2394
2437
  args['source_info'] = get_arg(source_info, em.get('source'))
2438
+ args['pre_packed'] = pre_packed
2395
2439
 
2396
2440
  return self.add_cached_array_to_imported_list(cached_array, **args)
2397
2441
 
@@ -2439,7 +2483,8 @@ class PropertyCollection():
2439
2483
  as 32 bit; if None, the system default is to write as 32 bit; if True, 32 bit is used; if
2440
2484
  False, 64 bit data is written; ignored if dtype is not None
2441
2485
  use_pack (bool, default False): if True, bool arrays will be packed along their last axis; this
2442
- will generally result in hdf5 data that is not readable by non-resqpy applications
2486
+ will generally result in hdf5 data that is not readable by non-resqpy applications; leave
2487
+ as False for already packed arrays
2443
2488
  chunks (str, optional): if not None, one of 'auto', 'all', or 'slice', controlling hdf5 chunks
2444
2489
  compression (str, optional): if not None, one of 'gzip' or 'lzf' being the hdf5 compression
2445
2490
  algorithm to be used; gzip gives better compression ratio but is slower
@@ -2467,8 +2512,8 @@ class PropertyCollection():
2467
2512
  uuid = entry[0]
2468
2513
  cached_name = entry[3]
2469
2514
  tail = 'points_patch0' if entry[18] else 'values_patch0'
2470
- if use_pack and (str(dtype).startswith('bool') or
2471
- (dtype is None and str(self.__dict__[cached_name].dtype) == 'bool')):
2515
+ if use_pack and ('bool' in str(dtype) or
2516
+ (dtype is None and 'bool' in str(self.__dict__[cached_name].dtype))):
2472
2517
  dtype = 'pack'
2473
2518
  h5_reg.register_dataset(uuid, tail, self.__dict__[cached_name], dtype = dtype)
2474
2519
  h5_reg.write(file = file_name, mode = mode, use_int32 = use_int32)
@@ -2595,7 +2640,8 @@ class PropertyCollection():
2595
2640
  points = False,
2596
2641
  extra_metadata = {},
2597
2642
  const_value = None,
2598
- expand_const_arrays = False):
2643
+ expand_const_arrays = False,
2644
+ pre_packed = False):
2599
2645
  """Create a property xml node for a single property related to a given supporting representation node.
2600
2646
 
2601
2647
  arguments:
@@ -2652,9 +2698,11 @@ class PropertyCollection():
2652
2698
  must cycle fastest in the array, ie. be the last index
2653
2699
  points (bool, default False): if True, this is a points property
2654
2700
  extra_metadata (dictionary, optional): if present, adds extra metadata in the xml
2655
- const_value (float or int, optional): if present, create xml for a constant array filled with this value
2701
+ const_value (float, int or bool, optional): if present, create xml for a constant array filled with this value
2656
2702
  expand_const_arrays (boolean, default False): if True, the hdf5 write must also have been called with the
2657
2703
  same argument and the xml will treat a constant array as a normal array
2704
+ pre_packed (boolean, default False): if True, the property is a boolean property and the array has already
2705
+ been packed into bits
2658
2706
 
2659
2707
  returns:
2660
2708
  the newly created property xml node
@@ -2682,7 +2730,7 @@ class PropertyCollection():
2682
2730
  direction = None if facet_type is None or facet_type != 'direction' else facet
2683
2731
 
2684
2732
  if self.support is not None:
2685
- pcxml._check_shape_list(self, indexable_element, direction, property_array, points, count)
2733
+ pcxml._check_shape_list(self, indexable_element, direction, property_array, points, count, pre_packed)
2686
2734
 
2687
2735
  # todo: assertions:
2688
2736
  # numpy data type matches discrete flag (and assumptions about precision)
@@ -12,18 +12,13 @@ import numpy as np
12
12
  import resqpy.property as rqp
13
13
  import resqpy.olio.uuid as bu
14
14
  import resqpy.olio.xml_et as rqet
15
- import resqpy.weights_and_measures as bwam
15
+ import resqpy.weights_and_measures as wam
16
16
 
17
17
  # the following resqml property kinds and facet types are 'known about' by this module in relation to nexus
18
18
  # other property kinds should be handled okay but without any special treatment
19
19
  # see property_kind_and_facet_from_keyword() for simulator keyword to property kind and facet mapping
20
20
 
21
- supported_property_kind_list = [
22
- 'continuous', 'discrete', 'categorical', 'code', 'index', 'depth', 'rock volume', 'pore volume', 'volume',
23
- 'thickness', 'length', 'cell length', 'area', 'net to gross ratio', 'porosity', 'permeability thickness',
24
- 'permeability length', 'permeability rock', 'rock permeability', 'fluid volume', 'transmissibility', 'pressure',
25
- 'saturation', 'solution gas-oil ratio', 'vapor oil-gas ratio', 'property multiplier', 'thermodynamic temperature'
26
- ]
21
+ supported_property_kind_list = list(wam.valid_property_kinds())
27
22
 
28
23
  supported_local_property_kind_list = [
29
24
  'active', 'transmissibility multiplier', 'fault transmissibility', 'mat transmissibility'
@@ -326,7 +321,7 @@ def infer_property_kind(name, unit):
326
321
 
327
322
  # Currently unit is ignored
328
323
 
329
- valid_kinds = bwam.valid_property_kinds()
324
+ valid_kinds = wam.valid_property_kinds()
330
325
 
331
326
  if name in valid_kinds:
332
327
  kind = name
@@ -15,14 +15,16 @@ import resqpy.surface as rqs
15
15
 
16
16
 
17
17
  def add_surfaces(
18
- epc_file, # existing resqml model
19
- crs_uuid = None, # optional crs uuid, defaults to crs associated with model (usually main grid crs)
20
- surface_file_format = 'zmap', # zmap, rms (roxar) or GOCAD-Tsurf only formats currently supported
21
- rq_class = 'surface', # 'surface' or 'mesh': the class of object to be created
22
- surface_role = 'map', # 'map' or 'pick'
23
- quad_triangles = False, # if True, 4 triangles per quadrangle will be used for mesh formats, otherwise 2
24
- surface_file_list = None, # list of full file names (paths), each holding one surface
25
- make_horizon_interpretations_and_features = True): # if True, feature and interpretation objects are created
18
+ epc_file, # existing resqml model
19
+ crs_uuid = None, # optional crs uuid, defaults to crs associated with model (usually main grid crs)
20
+ surface_file_format = 'zmap', # zmap, rms (roxar) or GOCAD-Tsurf only formats currently supported
21
+ rq_class = 'surface', # 'surface' or 'mesh': the class of object to be created
22
+ surface_role = 'map', # 'map' or 'pick'
23
+ quad_triangles = False, # if True, 4 triangles per quadrangle will be used for mesh formats, otherwise 2
24
+ surface_file_list = None, # list of full file names (paths), each holding one surface
25
+ make_horizon_interpretations_and_features = True, # if True, feature and interpretation objects are created
26
+ interpretation_type = 'horizon',
27
+ fault_is_normal = True):
26
28
  """Process a list of surface files, adding each surface as a new part in the resqml model.
27
29
 
28
30
  Arguments:
@@ -34,6 +36,8 @@ def add_surfaces(
34
36
  quad_triangles (bool, default False): if True, 4 triangles per quadrangle will be used for mesh formats, otherwise 2
35
37
  surface_file_list (list, default None): list of full file names (paths), each holding one surface
36
38
  make_horizon_interpretations_and_features (bool, default True): if True, feature and interpretation objects are created
39
+ interpretation_type (str, default 'horizon'): if 'make_horizon_interpretations_and_features' is True, feature and interpretation objects are added. Default is 'horizon', other options are 'fault' and 'geobody'
40
+ fault_is_normal (bool, default True): if 'interpretation_type' is 'fault', define if the fault is a normal fault. Default True
37
41
 
38
42
  Returns:
39
43
  resqml model object with added surfaces
@@ -42,13 +46,15 @@ def add_surfaces(
42
46
  assert surface_file_list, 'surface file list is empty or missing'
43
47
  assert surface_file_format in ['zmap', 'rms', 'roxar',
44
48
  'GOCAD-Tsurf'], 'unsupported surface file format: ' + str(surface_file_format)
49
+ assert interpretation_type in ['horizon', 'fault', 'geobody']
45
50
  rq_class = _get_rq_class(rq_class)
46
51
 
47
52
  model, crs_uuid = _get_model_details(epc_file, crs_uuid)
48
53
 
49
54
  for surf_file in surface_file_list:
50
55
  model = _add_single_surface(model, surf_file, surface_file_format, surface_role, quad_triangles, crs_uuid,
51
- rq_class, make_horizon_interpretations_and_features)
56
+ rq_class, make_horizon_interpretations_and_features, interpretation_type,
57
+ fault_is_normal)
52
58
 
53
59
  # mark model as modified
54
60
  model.set_modified()
@@ -61,7 +67,7 @@ def add_surfaces(
61
67
 
62
68
 
63
69
  def _add_single_surface(model, surf_file, surface_file_format, surface_role, quad_triangles, crs_uuid, rq_class,
64
- make_horizon_interpretations_and_features):
70
+ make_horizon_interpretations_and_features, interpretation_type, fault_is_normal):
65
71
  _, short_name = os.path.split(surf_file)
66
72
  dot = short_name.rfind('.')
67
73
  if dot > 0:
@@ -106,10 +112,24 @@ def _add_single_surface(model, surf_file, surface_file_format, surface_role, qua
106
112
  surface.write_hdf5()
107
113
 
108
114
  if make_horizon_interpretations_and_features:
109
- feature = rqo.GeneticBoundaryFeature(model, kind = 'horizon', feature_name = short_name)
110
- feature.create_xml()
111
- interp = rqo.HorizonInterpretation(model, genetic_boundary_feature = feature, domain = 'depth')
112
- interp_root = interp.create_xml()
115
+ if interpretation_type == 'horizon':
116
+ feature = rqo.GeneticBoundaryFeature(model, kind = 'horizon', feature_name = short_name)
117
+ feature.create_xml()
118
+ interp = rqo.HorizonInterpretation(model, genetic_boundary_feature = feature, domain = 'depth')
119
+ interp_root = interp.create_xml()
120
+ elif interpretation_type == 'fault':
121
+ feature = rqo.TectonicBoundaryFeature(model, kind = 'fault', feature_name = short_name)
122
+ feature.create_xml()
123
+ interp = rqo.FaultInterpretation(model,
124
+ tectonic_boundary_feature = feature,
125
+ domain = 'depth',
126
+ is_normal = fault_is_normal)
127
+ interp_root = interp.create_xml()
128
+ else:
129
+ feature = rqo.GeobodyFeature(model, feature_name = short_name)
130
+ feature.create_xml()
131
+ interp = rqo.GeobodyInterpretation(model, geobody_feature = feature, domain = 'depth')
132
+ interp_root = interp.create_xml()
113
133
  surface.set_represented_interpretation_root(interp_root)
114
134
 
115
135
  surface.create_xml(add_as_part = True, add_relationships = True, title = short_name, originator = None)
@@ -511,8 +511,8 @@ class _GridFromCp:
511
511
  assert len(where_defined) == 3 and len(where_defined[0]) > 0, 'no extant cell geometries'
512
512
  sample_kji0 = (where_defined[0][0], where_defined[1][0], where_defined[2][0])
513
513
  sample_cp = self.__cp_array[sample_kji0]
514
- self.__cell_ijk_lefthanded = (vec.clockwise(sample_cp[0, 0, 0], sample_cp[0, 1, 0], sample_cp[0, 0, 1]) >=
515
- 0.0)
514
+ self.__cell_ijk_lefthanded = \
515
+ (vec.clockwise(sample_cp[0, 0, 0], sample_cp[0, 1, 0], sample_cp[0, 0, 1]) >= 0.0)
516
516
  if not self.grid.k_direction_is_down:
517
517
  self.__cell_ijk_lefthanded = not self.__cell_ijk_lefthanded
518
518
  if self.__crs.is_right_handed_xyz():