resqpy 4.5.0__py3-none-any.whl → 4.6.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
resqpy/olio/write_hdf5.py CHANGED
@@ -15,7 +15,7 @@ import numpy as np
15
15
  import resqpy.olio.uuid as bu
16
16
 
17
17
  resqml_path_head = '/RESQML/' # note: latest fesapi code uses RESQML20
18
- write_bool_as_int8 = True # Nexus read fails if bool used as hdf5 element dtype; also better for NullValue handling
18
+ write_bool_as_uint8 = True # Nexus read fails if bool used as hdf5 element dtype
19
19
  write_int_as_int32 = True # only applies if registered dtype is None
20
20
 
21
21
 
@@ -37,7 +37,8 @@ class H5Register():
37
37
  group_tail (string): the remainder of the hdf5 internal path (following RESQML and
38
38
  uuid elements)
39
39
  a (numpy array): the dataset (array) to be registered for writing
40
- dtype (type or string): the required type of the individual elements within the dataset
40
+ dtype (type or string): the required type of the individual elements within the dataset;
41
+ special value of 'pack' may be used to cause a bool array to be packed before writing
41
42
  hdf5_internal_path (string, optional): if present, a full hdf5 internal path to use
42
43
  instead of the default generated from the uuid
43
44
  copy (boolean, default False): if True, a copy of the array will be made at the time of
@@ -50,14 +51,21 @@ class H5Register():
50
51
  notes:
51
52
  several arrays might belong to the same object;
52
53
  if a dtype is given and necessitates a conversion of the array data, the behaviour will
53
- be as if the copy argument is True regardless of its setting
54
+ be as if the copy argument is True regardless of its setting;
55
+ the use of 'pack' as dtype will result in hdf5 data that will not generally be readable
56
+ by non-resqpy applications; when reading packed data, the required shape must be specified;
57
+ packing only takes place over the last axis; do not use packing if the array needs to be
58
+ read or updated in slices, or read a single value at a time with index values
54
59
  """
55
60
 
56
- # print('registering dataset with uuid ' + str(object_uuid) + ' and group tail ' + group_tail)
61
+ # log.debug('registering dataset with uuid ' + str(object_uuid) + ' and group tail ' + group_tail)
57
62
  assert (len(group_tail) > 0)
58
63
  assert a is not None
59
64
  assert isinstance(a, np.ndarray)
60
- if dtype is not None:
65
+ if str(dtype) == 'pack':
66
+ a = np.packbits(a, axis = -1) # todo: check this returns uint8 array
67
+ dtype = 'uint8'
68
+ elif dtype is not None:
61
69
  a = a.astype(dtype, copy = copy)
62
70
  elif copy:
63
71
  a = a.copy()
@@ -66,7 +74,7 @@ class H5Register():
66
74
  if group_tail[-1] == '/':
67
75
  group_tail = group_tail[:-1]
68
76
  if (object_uuid, group_tail) in self.dataset_dict.keys():
69
- pass # todo: warn of re-registration?
77
+ log.warning(f'multiple hdf5 registrations for uuid: {object_uuid}; group: {group_tail}')
70
78
  self.dataset_dict[(object_uuid, group_tail)] = (a, dtype)
71
79
  if hdf5_internal_path:
72
80
  self.hdf5_path_dict[(object_uuid, group_tail)] = hdf5_internal_path
@@ -99,8 +107,8 @@ class H5Register():
99
107
  dtype = a.dtype
100
108
  if use_int32 and str(dtype) == 'int64':
101
109
  dtype = 'int32'
102
- if write_bool_as_int8 and str(dtype).lower().startswith('bool'):
103
- dtype = 'int8'
110
+ if write_bool_as_uint8 and str(dtype).lower().startswith('bool'):
111
+ dtype = 'uint8'
104
112
  # log.debug('Writing hdf5 dataset ' + internal_path + ' of size ' + str(a.size) + ' type ' + str(dtype))
105
113
  fp.create_dataset(internal_path, data = a, dtype = dtype)
106
114
 
resqpy/olio/xml_et.py CHANGED
@@ -301,7 +301,7 @@ def cut_extra_metadata(root):
301
301
  """Removes all the extra metadata children under root node."""
302
302
 
303
303
  for child in root:
304
- if child.tag == 'ExtraMetadata':
304
+ if match(child.tag, 'ExtraMetadata'):
305
305
  root.remove(child)
306
306
 
307
307
 
@@ -413,7 +413,7 @@ def print_xml_tree(root,
413
413
  log_level = log_level,
414
414
  max_lines = max_lines,
415
415
  line_count = line_count)
416
- if line_count > max_lines:
416
+ if max_lines and line_count > max_lines:
417
417
  break
418
418
  return line_count
419
419
 
@@ -730,7 +730,7 @@ def write_xml_node(xml_fp, root, level = 0, namespace_keys = []):
730
730
  else:
731
731
  line += '>'
732
732
  text = root.text
733
- if (not text or text.isspace()) and tag == 'Title':
733
+ if (not text or text.isspace()) and tag.endswith('Title'):
734
734
  text = 'untitled'
735
735
  if text and not text.isspace():
736
736
  line += text.replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;')
@@ -525,17 +525,15 @@ def _get_indexable_element(indexable_element, support_type):
525
525
  return indexable_element
526
526
 
527
527
 
528
- def _cached_part_array_ref_get_array(collection, part, dtype, model, cached_array_name):
528
+ def _cached_part_array_ref_get_array(collection, part, dtype, model, cached_array_name, use_pack):
529
529
  const_value = collection.constant_value_for_part(part)
530
530
  if const_value is None:
531
- _cached_part_array_ref_const_none(collection, part, dtype, model, cached_array_name)
531
+ _cached_part_array_ref_const_none(collection, part, dtype, model, cached_array_name, use_pack)
532
532
  else:
533
533
  _cached_part_array_ref_const_notnone(collection, part, const_value, cached_array_name)
534
- if not hasattr(collection, cached_array_name):
535
- return None
536
534
 
537
535
 
538
- def _cached_part_array_ref_const_none(collection, part, dtype, model, cached_array_name):
536
+ def _cached_part_array_ref_const_none(collection, part, dtype, model, cached_array_name, use_pack):
539
537
  part_node = collection.node_for_part(part)
540
538
  if part_node is None:
541
539
  return None
@@ -544,6 +542,13 @@ def _cached_part_array_ref_const_none(collection, part, dtype, model, cached_arr
544
542
  else:
545
543
  first_values_node, tag, dtype = _cached_part_array_ref_get_node_values(part_node, dtype)
546
544
 
545
+ # the required shape is required if a bool array may need to be unpacked from bits
546
+ required_shape = None
547
+ str_dtype = str(dtype)
548
+ if use_pack and ('bool' in str_dtype or 'int8' in str_dtype):
549
+ required_shape = collection.supporting_shape(indexable_element = collection.indexable_for_part(part),
550
+ direction = _part_direction(collection, part))
551
+
547
552
  h5_key_pair = model.h5_uuid_and_path_for_node(first_values_node, tag = tag)
548
553
  if h5_key_pair is None:
549
554
  return None
@@ -552,6 +557,7 @@ def _cached_part_array_ref_const_none(collection, part, dtype, model, cached_arr
552
557
  cache_array = True,
553
558
  object = collection,
554
559
  array_attribute = cached_array_name,
560
+ required_shape = required_shape,
555
561
  dtype = dtype)
556
562
 
557
563
 
@@ -124,6 +124,7 @@ class Property(BaseResqpy):
124
124
  find_local_property_kind = True,
125
125
  expand_const_arrays = False,
126
126
  dtype = None,
127
+ use_pack = False,
127
128
  extra_metadata = {}):
128
129
  """Populates a new Property from a numpy array and metadata; NB. Writes data to hdf5 and adds part to model.
129
130
 
@@ -168,6 +169,8 @@ class Property(BaseResqpy):
168
169
  expand_const_arrays (boolean, default False): if True, and a const_value is given, the array will be fully
169
170
  expanded and written to the hdf5 file; the xml will then not indicate that it is constant
170
171
  dtype (numpy dtype, optional): if present, the elemental data type to use when writing the array to hdf5
172
+ use_pack (bool, default False): if True, a bool array will be packed along its last axis; this
173
+ will generally result in hdf5 data that is not readable by non-resqpy applications
171
174
  extra_metadata (optional): if present, a dictionary of extra metadata to be added for the part
172
175
 
173
176
  returns:
@@ -213,7 +216,7 @@ class Property(BaseResqpy):
213
216
  count = count,
214
217
  points = points,
215
218
  const_value = const_value)
216
- prop.write_hdf5(expand_const_arrays = expand_const_arrays, dtype = dtype)
219
+ prop.write_hdf5(expand_const_arrays = expand_const_arrays, dtype = dtype, use_pack = use_pack)
217
220
  prop.create_xml(support_uuid = support_uuid,
218
221
  time_series_uuid = time_series_uuid,
219
222
  string_lookup_uuid = string_lookup_uuid,
@@ -223,7 +226,7 @@ class Property(BaseResqpy):
223
226
  extra_metadata = extra_metadata)
224
227
  return prop
225
228
 
226
- def array_ref(self, dtype = None, masked = False, exclude_null = False):
229
+ def array_ref(self, dtype = None, masked = False, exclude_null = False, use_pack = True):
227
230
  """Returns a (cached) numpy array containing the property values.
228
231
 
229
232
  arguments:
@@ -232,6 +235,8 @@ class Property(BaseResqpy):
232
235
  the inactive cell mask in the case of a Grid property
233
236
  exclude_null (boolean, default False): if True and masked is True, elements whose value is the null value
234
237
  (NaN for floats) will be masked out
238
+ use_pack (boolean, default True): if True, and the property is a boolean array, the hdf5 data will
239
+ be unpacked if its shape indicates that it has been packed into bits for storage
235
240
 
236
241
  returns:
237
242
  numpy array
@@ -242,7 +247,8 @@ class Property(BaseResqpy):
242
247
  return self.collection.cached_part_array_ref(self.part,
243
248
  dtype = dtype,
244
249
  masked = masked,
245
- exclude_null = exclude_null)
250
+ exclude_null = exclude_null,
251
+ use_pack = use_pack)
246
252
 
247
253
  def is_continuous(self):
248
254
  """Returns boolean indicating that the property contains continuous (ie. float) data.
@@ -383,7 +389,7 @@ class Property(BaseResqpy):
383
389
  const_value = const_value,
384
390
  points = points)
385
391
 
386
- def write_hdf5(self, file_name = None, mode = 'a', expand_const_arrays = False, dtype = None):
392
+ def write_hdf5(self, file_name = None, mode = 'a', expand_const_arrays = False, dtype = None, use_pack = False):
387
393
  """Writes the array data to the hdf5 file; not usually called directly.
388
394
 
389
395
  arguments:
@@ -393,6 +399,8 @@ class Property(BaseResqpy):
393
399
  expand_const_arrays (bool, default False): if True and the array is a constant array then a fully populated
394
400
  array is generated and stored (otherwise the constant value is held in xml and no hdf5 data is needed)
395
401
  dtype (numpy dtype, optional): if present, the elemental data type to use when writing the array to hdf5
402
+ use_pack (bool, default False): if True, a bool array will be packed along its last axis; this
403
+ will generally result in hdf5 data that is not readable by non-resqpy applications
396
404
 
397
405
  notes:
398
406
  see the documentation for the convenience method from_array()
@@ -400,10 +408,13 @@ class Property(BaseResqpy):
400
408
  if not self.collection.imported_list:
401
409
  log.warning('no imported Property array to write to hdf5')
402
410
  return
411
+ if str(dtype) == 'pack':
412
+ use_pack = True
403
413
  self.collection.write_hdf5_for_imported_list(file_name = file_name,
404
414
  mode = mode,
405
415
  expand_const_arrays = expand_const_arrays,
406
- dtype = dtype)
416
+ dtype = dtype,
417
+ use_pack = use_pack)
407
418
 
408
419
  def create_xml(self,
409
420
  ext_uuid = None,
@@ -58,7 +58,8 @@ class PropertyCollection():
58
58
  """
59
59
 
60
60
  assert property_set_root is None or support is not None, \
61
- 'support (grid, wellbore frame, blocked well, mesh, or grid connection set) must be specified when populating property collection from property set'
61
+ 'support (grid, wellbore frame, blocked well, mesh, or grid connection set) must be specified ' + \
62
+ 'when populating property collection from property set'
62
63
 
63
64
  self.dict = {} # main dictionary of model property parts which are members of the collection
64
65
  # above is mapping from part_name to:
@@ -802,7 +803,8 @@ class PropertyCollection():
802
803
  multiple_handling = 'exception',
803
804
  title = None,
804
805
  title_mode = None,
805
- related_uuid = None):
806
+ related_uuid = None,
807
+ use_pack = True):
806
808
  """Returns the array of data for a single part selected by those arguments which are not None.
807
809
 
808
810
  arguments:
@@ -814,6 +816,8 @@ class PropertyCollection():
814
816
  will also be masked out
815
817
  multiple_handling (string, default 'exception'): one of 'exception', 'none', 'first', 'oldest', 'newest'
816
818
  title (string, optional): synonym for citation_title argument
819
+ use_pack (boolean, default True): if True, and the property is a boolean array, the hdf5 data will
820
+ be unpacked if its shape indicates that it has been packed into bits
817
821
 
818
822
  Other optional arguments:
819
823
  realization, support_uuid, continuous, points, count, indexable, property_kind, facet_type, facet,
@@ -856,7 +860,11 @@ class PropertyCollection():
856
860
  related_uuid = related_uuid)
857
861
  if part is None:
858
862
  return None
859
- return self.cached_part_array_ref(part, dtype = dtype, masked = masked, exclude_null = exclude_null)
863
+ return self.cached_part_array_ref(part,
864
+ dtype = dtype,
865
+ masked = masked,
866
+ exclude_null = exclude_null,
867
+ use_pack = use_pack)
860
868
 
861
869
  def number_of_parts(self):
862
870
  """Returns the number of parts (properties) in this collection.
@@ -1722,7 +1730,7 @@ class PropertyCollection():
1722
1730
  return None # could treat as fatal error
1723
1731
  return model.h5_uuid_and_path_for_node(first_values_node, tag = tag)
1724
1732
 
1725
- def cached_part_array_ref(self, part, dtype = None, masked = False, exclude_null = False):
1733
+ def cached_part_array_ref(self, part, dtype = None, masked = False, exclude_null = False, use_pack = True):
1726
1734
  """Returns a numpy array containing the data for the property part; the array is cached in this collection.
1727
1735
 
1728
1736
  arguments:
@@ -1733,6 +1741,8 @@ class PropertyCollection():
1733
1741
  the mask is set to the inactive array attribute of the support object if present
1734
1742
  exclude_null (boolean, default False): if True, and masked is also True, then elements of the array
1735
1743
  holding the null value will also be masked out
1744
+ use_pack (boolean, default True): if True, and the property is a boolean array, the hdf5 data will
1745
+ be unpacked if its shape indicates that it has been packed into bits for storage
1736
1746
 
1737
1747
  returns:
1738
1748
  reference to a cached numpy array containing the actual property data; multiple calls will return
@@ -1746,7 +1756,8 @@ class PropertyCollection():
1746
1756
  if the indexable element is set to the typical value for the class of supporting representation, eg.
1747
1757
  'cells' for grid objects); if exclude_null is set True then null value elements will also be masked out
1748
1758
  (as long as masked is True); however, it is recommended simply to use np.NaN values in floating point
1749
- property arrays if the commonality is not needed
1759
+ property arrays if the commonality is not needed;
1760
+ set use_pack True if the hdf5 data may have been written with a similar setting
1750
1761
 
1751
1762
  :meta common:
1752
1763
  """
@@ -1757,7 +1768,7 @@ class PropertyCollection():
1757
1768
  return None
1758
1769
 
1759
1770
  if not hasattr(self, cached_array_name):
1760
- pcga._cached_part_array_ref_get_array(self, part, dtype, model, cached_array_name)
1771
+ pcga._cached_part_array_ref_get_array(self, part, dtype, model, cached_array_name, use_pack)
1761
1772
 
1762
1773
  if masked:
1763
1774
  exclude_value = self.null_value_for_part(part) if exclude_null else None
@@ -1856,13 +1867,18 @@ class PropertyCollection():
1856
1867
  assert len(patch_list) == 1 # todo: handle more than one patch of values
1857
1868
  return model.h5_uuid_and_path_for_node(rqet.find_tag(patch_list[0], 'Values'))
1858
1869
 
1859
- def facets_array_ref(self, use_32_bit = False, indexable_element = None): # todo: add masked argument
1870
+ def facets_array_ref(self,
1871
+ use_32_bit = False,
1872
+ indexable_element = None,
1873
+ use_pack = True): # todo: add masked argument
1860
1874
  """Returns a +1D array of all parts with first axis being over facet values; Use facet_list() for lookup.
1861
1875
 
1862
1876
  arguments:
1863
1877
  use_32_bit (boolean, default False): if True, the resulting numpy array will use a 32 bit dtype; if False, 64 bit
1864
1878
  indexable_element (string, optional): the indexable element for the properties in the collection; if None, will
1865
1879
  be determined from the data
1880
+ use_pack (boolean, default True): if True, and the property is a boolean array, the hdf5 data will
1881
+ be unpacked if its shape indicates that it has been packed into bits
1866
1882
 
1867
1883
  returns:
1868
1884
  numpy array containing all the data in the collection, the first axis being over facet values and the rest of
@@ -1897,7 +1913,7 @@ class PropertyCollection():
1897
1913
 
1898
1914
  for part in self.parts():
1899
1915
  facet_index = facet_list.index(self.facet_for_part(part))
1900
- pa = self.cached_part_array_ref(part, dtype = dtype)
1916
+ pa = self.cached_part_array_ref(part, dtype = dtype, use_pack = use_pack)
1901
1917
  a[facet_index] = pa
1902
1918
  self.uncache_part_array(part)
1903
1919
 
@@ -2278,7 +2294,8 @@ class PropertyCollection():
2278
2294
  mode = 'a',
2279
2295
  expand_const_arrays = False,
2280
2296
  dtype = None,
2281
- use_int32 = None):
2297
+ use_int32 = None,
2298
+ use_pack = False):
2282
2299
  """Create or append to an hdf5 file, writing datasets for the imported arrays.
2283
2300
 
2284
2301
  arguments:
@@ -2293,6 +2310,8 @@ class PropertyCollection():
2293
2310
  use_int32 (bool, optional): if dtype is None, this controls whether 64 bit int arrays are written
2294
2311
  as 32 bit; if None, the system default is to write as 32 bit; if True, 32 bit is used; if
2295
2312
  False, 64 bit data is written; ignored if dtype is not None
2313
+ use_pack (bool, default False): if True, bool arrays will be packed along their last axis; this
2314
+ will generally result in hdf5 data that is not readable by non-resqpy applications
2296
2315
 
2297
2316
  :meta common:
2298
2317
  """
@@ -2315,10 +2334,13 @@ class PropertyCollection():
2315
2334
  uuid = entry[0]
2316
2335
  cached_name = entry[3]
2317
2336
  tail = 'points_patch0' if entry[18] else 'values_patch0'
2337
+ if use_pack and (str(dtype).startswith('bool') or
2338
+ (dtype is None and str(self.__dict__[cached_name].dtype) == 'bool')):
2339
+ dtype = 'pack'
2318
2340
  h5_reg.register_dataset(uuid, tail, self.__dict__[cached_name], dtype = dtype)
2319
2341
  h5_reg.write(file = file_name, mode = mode, use_int32 = use_int32)
2320
2342
 
2321
- def write_hdf5_for_part(self, part, file_name = None, mode = 'a'):
2343
+ def write_hdf5_for_part(self, part, file_name = None, mode = 'a', use_pack = False):
2322
2344
  """Create or append to an hdf5 file, writing dataset for the specified part."""
2323
2345
 
2324
2346
  if self.constant_value_for_part(part) is not None:
@@ -2326,7 +2348,10 @@ class PropertyCollection():
2326
2348
  h5_reg = rwh5.H5Register(self.model)
2327
2349
  a = self.cached_part_array_ref(part)
2328
2350
  tail = 'points_patch0' if self.points_for_part(part) else 'values_patch0'
2329
- h5_reg.register_dataset(self.uuid_for_part(part), tail, a)
2351
+ dtype = None
2352
+ if use_pack and 'bool' in str(a.dtype):
2353
+ dtype = 'pack'
2354
+ h5_reg.register_dataset(self.uuid_for_part(part), tail, a, dtype = dtype)
2330
2355
  h5_reg.write(file = file_name, mode = mode)
2331
2356
 
2332
2357
  def create_xml_for_imported_list_and_add_parts_to_model(self,
@@ -2,7 +2,7 @@
2
2
 
3
3
  __all__ = [
4
4
  'BaseSurface', 'CombinedSurface', 'Mesh', 'TriangulatedPatch', 'PointSet', 'Surface', 'TriMesh',
5
- 'distill_triangle_points'
5
+ 'distill_triangle_points', '_adjust_flange_z'
6
6
  ]
7
7
 
8
8
  from ._base_surface import BaseSurface
@@ -10,7 +10,7 @@ from ._combined_surface import CombinedSurface
10
10
  from ._mesh import Mesh
11
11
  from ._triangulated_patch import TriangulatedPatch
12
12
  from ._pointset import PointSet
13
- from ._surface import Surface, distill_triangle_points
13
+ from ._surface import Surface, distill_triangle_points, _adjust_flange_z
14
14
  from ._tri_mesh import TriMesh
15
15
 
16
16
  # Set "module" attribute of all public objects to this path.
@@ -10,6 +10,7 @@ log = logging.getLogger(__name__)
10
10
  import numpy as np
11
11
 
12
12
  import resqpy.crs as rqc
13
+ import resqpy.lines as rql
13
14
  import resqpy.olio.intersection as meet
14
15
  import resqpy.olio.triangulation as triangulate
15
16
  import resqpy.olio.uuid as bu
@@ -394,6 +395,8 @@ class Surface(rqsb.BaseSurface):
394
395
  flange_point_count = 11,
395
396
  flange_radial_factor = 10.0,
396
397
  flange_radial_distance = None,
398
+ flange_inner_ring = False,
399
+ saucer_parameter = None,
397
400
  make_clockwise = False):
398
401
  """Populate this (empty) Surface object with a Delaunay triangulation of points in a PointSet object.
399
402
 
@@ -414,6 +417,13 @@ class Surface(rqsb.BaseSurface):
414
417
  factor of the maximum radial distance of the points themselves; ignored if extend_with_flange is False
415
418
  flange_radial_distance (float, optional): if present, the minimum absolute distance of flange points from
416
419
  centre of points; units are those of the crs
420
+ flange_inner_ring (bool, default False): if True, an inner ring of points, with double flange point counr,
421
+ is created at a radius just outside that of the furthest flung original point; this improves
422
+ triangulation of the extended point set when the original has a non-convex hull
423
+ saucer_parameter (float, optional): if present, and extend_with_flange is True, then the fractional
424
+ distance from the centre of the points to its rim at which to sample the surface for extrapolation
425
+ and thereby modify the recumbent z of flange points; 0 will usually give shallower and smoother saucer;
426
+ larger values (must be less than one) will lead to stronger and more erratic saucer shape in flange
417
427
  make_clockwise (bool, default False): if True, the returned triangles will all be clockwise when
418
428
  viewed in the direction -ve to +ve z axis; if reorient is also True, the clockwise aspect is
419
429
  enforced in the reoriented space
@@ -430,6 +440,7 @@ class Surface(rqsb.BaseSurface):
430
440
  and radial distance arguments
431
441
  """
432
442
 
443
+ assert saucer_parameter is None or 0.0 <= saucer_parameter < 1.0
433
444
  crs = rqc.Crs(self.model, uuid = point_set.crs_uuid)
434
445
  p = point_set.full_array_ref()
435
446
  if crs.xy_units == crs.z_units or not reorient:
@@ -443,10 +454,13 @@ class Surface(rqsb.BaseSurface):
443
454
  else:
444
455
  p_xy = unit_adjusted_p
445
456
  if extend_with_flange:
457
+ if not reorient:
458
+ log.warning('extending point set with flange without reorientation')
446
459
  flange_points = triangulate.surrounding_xy_ring(p_xy,
447
460
  count = flange_point_count,
448
461
  radial_factor = flange_radial_factor,
449
- radial_distance = flange_radial_distance)
462
+ radial_distance = flange_radial_distance,
463
+ inner_ring = flange_inner_ring)
450
464
  p_xy_e = np.concatenate((p_xy, flange_points), axis = 0)
451
465
  if reorient:
452
466
  # reorient back extenstion points into original p space
@@ -457,6 +471,7 @@ class Surface(rqsb.BaseSurface):
457
471
  else:
458
472
  p_xy_e = p_xy
459
473
  p_e = unit_adjusted_p
474
+ flange_array = None
460
475
  log.debug('number of points going into dt: ' + str(len(p_xy_e)))
461
476
  success = False
462
477
  try:
@@ -471,15 +486,18 @@ class Surface(rqsb.BaseSurface):
471
486
  log.debug('number of triangles: ' + str(len(t)))
472
487
  if make_clockwise:
473
488
  triangulate.make_all_clockwise_xy(t, p_e) # modifies t in situ
489
+ if extend_with_flange:
490
+ flange_array = np.zeros(len(t), dtype = bool)
491
+ flange_array[:] = np.where(np.any(t >= len(p), axis = 1), True, False)
492
+ if saucer_parameter is not None:
493
+ assert reorient, 'flange saucer mode only available with reorientation active'
494
+ _adjust_flange_z(self.model, self.crs_uuid, p_xy_e, len(p), t, flange_array, saucer_parameter)
495
+ p_e = vec.rotate_array(reorient_matrix.T, p_xy_e)
474
496
  if crs.xy_units != crs.z_units and reorient:
475
497
  wam.convert_lengths(p_e[:, 2], crs.xy_units, crs.z_units)
476
498
  self.crs_uuid = point_set.crs_uuid
477
499
  self.set_from_triangles_and_points(t, p_e)
478
- if extend_with_flange:
479
- flange_array = np.zeros(len(t), dtype = bool)
480
- flange_array[:] = np.where(np.any(t >= len(p), axis = 1), True, False)
481
- return flange_array
482
- return None
500
+ return flange_array
483
501
 
484
502
  def make_all_clockwise_xy(self, reorient = False):
485
503
  """Reorders cached triangles data such that all triangles are clockwise when viewed from -ve z axis.
@@ -1074,3 +1092,48 @@ def distill_triangle_points(t, p):
1074
1092
  assert np.all(triangles_mapped < len(points_distilled))
1075
1093
 
1076
1094
  return triangles_mapped, points_distilled
1095
+
1096
+
1097
+ def _adjust_flange_z(model, crs_uuid, p_xy_e, flange_start_index, t, flange_array, saucer_parameter):
1098
+ """Adjust the flange point z values (in recumbent space) by extrapolation of pair of points on original."""
1099
+
1100
+ # reconstruct the hull (could be concave) of original points
1101
+ all_edges, edge_use_count = triangulate.edges(t)
1102
+ inner_edges = triangulate.internal_edges(all_edges, edge_use_count)
1103
+ t_for_inner_edges = triangulate.triangles_using_edges(t, inner_edges)
1104
+ assert np.all(t_for_inner_edges >= 0)
1105
+ flange_pairs = flange_array[t_for_inner_edges]
1106
+ rim_edges = inner_edges[np.where(flange_pairs[:, 0] != flange_pairs[:, 1])]
1107
+ assert rim_edges.ndim == 2 and rim_edges.shape[1] == 2 and len(rim_edges) > 0
1108
+ rim_edge_index_list, rim_point_index_list = triangulate.rims(rim_edges)
1109
+ assert len(rim_edge_index_list) == 1 and len(rim_point_index_list) == 1
1110
+ rim_edge_indices = rim_edge_index_list[0]
1111
+ rim_point_indices = rim_point_index_list[0] # ordered list of points on original hull (could be concave)
1112
+ rim_pl = rql.Polyline(model,
1113
+ set_coord = p_xy_e[rim_point_indices],
1114
+ set_crs = crs_uuid,
1115
+ is_closed = True,
1116
+ title = 'rim')
1117
+
1118
+ centre = np.mean(p_xy_e[:flange_start_index], axis = 0)
1119
+ # for each flange point, intersect a line from centre with the rim, and sample surface at saucer parameter
1120
+ for flange_pi in range(flange_start_index, len(p_xy_e)):
1121
+ f_xyz = p_xy_e[flange_pi]
1122
+ pl_seg, rim_x, rim_y = rim_pl.first_line_intersection(centre[0], centre[1], f_xyz[0], f_xyz[1])
1123
+ assert pl_seg is not None
1124
+ rim_xyz = rim_pl.segment_xyz_from_xy(pl_seg, rim_x, rim_y)
1125
+ sample_p = (1.0 - saucer_parameter) * centre + saucer_parameter * rim_xyz
1126
+ p_list = vec.points_in_triangles_njit(np.expand_dims(sample_p, axis = 0), p_xy_e[t], 1)
1127
+ vertical = np.array((0.0, 0.0, 1.0), dtype = float)
1128
+ assert len(p_list) > 0
1129
+ triangle_index, p_index, _ = p_list[0]
1130
+ start_xyz = meet.line_triangle_intersect_numba(sample_p, vertical, p_xy_e[t[triangle_index]], t_tol = 0.05)
1131
+ v_to_rim = rim_xyz - start_xyz
1132
+ v_to_flange_p = f_xyz - start_xyz
1133
+ if abs(v_to_rim[0]) > abs(v_to_rim[1]):
1134
+ f = (v_to_rim[0]) / (v_to_flange_p[0])
1135
+ else:
1136
+ f = (v_to_rim[1]) / (v_to_flange_p[1])
1137
+ assert 0.0 < f < 1.0
1138
+ z = (rim_xyz[2] - start_xyz[2]) / f + start_xyz[2]
1139
+ p_xy_e[flange_pi, 2] = z
@@ -3,13 +3,14 @@
3
3
  __all__ = [
4
4
  'TimeSeries', 'GeologicTimeSeries', 'AnyTimeSeries', 'TimeDuration', 'selected_time_series', 'simplified_timestamp',
5
5
  'cleaned_timestamp', 'time_series_from_list', 'merge_timeseries_from_uuid', 'geologic_time_str',
6
- 'timeframe_for_time_series_uuid', 'any_time_series', 'time_series_from_nexus_summary', 'check_timestamp'
6
+ 'timeframe_for_time_series_uuid', 'any_time_series', 'time_series_from_nexus_summary', 'check_timestamp',
7
+ 'colloquial_date'
7
8
  ]
8
9
 
9
10
  from ._any_time_series import AnyTimeSeries
10
11
  from ._geologic_time_series import GeologicTimeSeries
11
12
  from ._time_duration import TimeDuration
12
- from ._time_series import TimeSeries, check_timestamp
13
+ from ._time_series import TimeSeries, check_timestamp, colloquial_date
13
14
  from ._functions import selected_time_series, simplified_timestamp, cleaned_timestamp, time_series_from_list, \
14
15
  merge_timeseries_from_uuid, geologic_time_str, timeframe_for_time_series_uuid, any_time_series
15
16
  from ._from_nexus_summary import time_series_from_nexus_summary
@@ -233,3 +233,13 @@ def check_timestamp(timestamp):
233
233
  if timestamp.endswith('Z'):
234
234
  timestamp = timestamp[:-1]
235
235
  _ = dt.datetime.fromisoformat(timestamp)
236
+
237
+
238
+ def colloquial_date(timestamp, usa_date_format = False):
239
+ """Returns date string in format DD/MM/YYYY (or MM/DD/YYYY if usa_date_format is True)."""
240
+ if timestamp.endswith('Z'):
241
+ timestamp = timestamp[:-1]
242
+ date_obj = dt.datetime.fromisoformat(timestamp)
243
+ if usa_date_format:
244
+ return f'{date_obj.month:02}/{date_obj.day:02}/{date_obj.year:4}'
245
+ return f'{date_obj.day:02}/{date_obj.month:02}/{date_obj.year:4}'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: resqpy
3
- Version: 4.5.0
3
+ Version: 4.6.3
4
4
  Summary: Python API for working with RESQML models
5
5
  Home-page: https://github.com/bp/resqpy
6
6
  License: MIT
@@ -25,7 +25,7 @@ Requires-Dist: lasio (>=0.30,<0.31)
25
25
  Requires-Dist: lxml (>=4.9,<5.0)
26
26
  Requires-Dist: numba (>=0.56,<1.0)
27
27
  Requires-Dist: numpy (>=1.23,<2.0)
28
- Requires-Dist: pandas (>=1.5,<2.0)
28
+ Requires-Dist: pandas (>=1.4,<2.0)
29
29
  Requires-Dist: scipy (>=1.9,<2.0)
30
30
  Project-URL: Documentation, https://resqpy.readthedocs.io/en/latest/
31
31
  Project-URL: Repository, https://github.com/bp/resqpy
@@ -52,7 +52,7 @@ The package is written and maintained by bp, and is made available under the MIT
52
52
  license as a contribution to the open-source community.
53
53
 
54
54
  **resqpy** was created by Andy Beer. For enquires about resqpy, please contact
55
- Nathan Lane (Nathan.Lane@bp.com)
55
+ Emma Nesbit (Emma.Nesbit@uk.bp.com)
56
56
 
57
57
  ### Documentation
58
58