polytope-python 1.0.6__tar.gz → 1.0.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. {polytope-python-1.0.6/polytope_python.egg-info → polytope-python-1.0.8}/PKG-INFO +1 -1
  2. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/backends/datacube.py +6 -3
  3. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/backends/fdb.py +8 -10
  4. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/backends/mock.py +3 -1
  5. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/backends/xarray.py +5 -3
  6. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/tensor_index_tree.py +1 -0
  7. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_mappers/datacube_mappers.py +10 -2
  8. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_mappers/mapper_types/healpix.py +10 -1
  9. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_mappers/mapper_types/healpix_nested.py +10 -1
  10. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_mappers/mapper_types/local_regular.py +22 -3
  11. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_mappers/mapper_types/octahedral.py +12 -1
  12. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_mappers/mapper_types/reduced_ll.py +10 -1
  13. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_mappers/mapper_types/regular.py +17 -3
  14. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_merger/datacube_merger.py +0 -1
  15. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_transformations.py +1 -2
  16. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_type_change/datacube_type_change.py +1 -1
  17. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/engine/hullslicer.py +18 -33
  18. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/options.py +1 -1
  19. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/polytope.py +8 -2
  20. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/utility/exceptions.py +8 -4
  21. polytope-python-1.0.8/polytope_feature/version.py +1 -0
  22. {polytope-python-1.0.6 → polytope-python-1.0.8/polytope_python.egg-info}/PKG-INFO +1 -1
  23. polytope-python-1.0.8/polytope_python.egg-info/SOURCES.txt +54 -0
  24. polytope-python-1.0.8/polytope_python.egg-info/top_level.txt +1 -0
  25. {polytope-python-1.0.6 → polytope-python-1.0.8}/setup.py +1 -1
  26. polytope-python-1.0.6/polytope/version.py +0 -1
  27. polytope-python-1.0.6/polytope_python.egg-info/SOURCES.txt +0 -54
  28. polytope-python-1.0.6/polytope_python.egg-info/top_level.txt +0 -1
  29. {polytope-python-1.0.6 → polytope-python-1.0.8}/LICENSE +0 -0
  30. {polytope-python-1.0.6 → polytope-python-1.0.8}/MANIFEST.in +0 -0
  31. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/__init__.py +0 -0
  32. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/__init__.py +0 -0
  33. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/backends/__init__.py +0 -0
  34. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/datacube_axis.py +0 -0
  35. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/index_tree_pb2.py +0 -0
  36. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/__init__.py +0 -0
  37. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_cyclic/__init__.py +0 -0
  38. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_cyclic/datacube_cyclic.py +0 -0
  39. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_mappers/__init__.py +0 -0
  40. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_mappers/mapper_types/__init__.py +0 -0
  41. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_merger/__init__.py +0 -0
  42. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_reverse/__init__.py +0 -0
  43. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_reverse/datacube_reverse.py +0 -0
  44. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/transformations/datacube_type_change/__init__.py +0 -0
  45. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/datacube/tree_encoding.py +0 -0
  46. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/engine/__init__.py +0 -0
  47. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/engine/engine.py +0 -0
  48. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/shapes.py +0 -0
  49. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/utility/__init__.py +0 -0
  50. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/utility/combinatorics.py +0 -0
  51. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/utility/geometry.py +0 -0
  52. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/utility/list_tools.py +0 -0
  53. {polytope-python-1.0.6/polytope → polytope-python-1.0.8/polytope_feature}/utility/profiling.py +0 -0
  54. {polytope-python-1.0.6 → polytope-python-1.0.8}/polytope_python.egg-info/dependency_links.txt +0 -0
  55. {polytope-python-1.0.6 → polytope-python-1.0.8}/polytope_python.egg-info/not-zip-safe +0 -0
  56. {polytope-python-1.0.6 → polytope-python-1.0.8}/polytope_python.egg-info/requires.txt +0 -0
  57. {polytope-python-1.0.6 → polytope-python-1.0.8}/pyproject.toml +0 -0
  58. {polytope-python-1.0.6 → polytope-python-1.0.8}/requirements.txt +0 -0
  59. {polytope-python-1.0.6 → polytope-python-1.0.8}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 1.0
2
2
  Name: polytope-python
3
- Version: 1.0.6
3
+ Version: 1.0.8
4
4
  Summary: Polytope datacube feature extraction library
5
5
  Home-page: https://github.com/ecmwf/polytope
6
6
  Author: ECMWF
@@ -1,6 +1,6 @@
1
1
  import logging
2
2
  from abc import ABC, abstractmethod
3
- from typing import Any
3
+ from typing import Any, Dict
4
4
 
5
5
  from ...utility.combinatorics import validate_axes
6
6
  from ..datacube_axis import DatacubeAxis
@@ -31,9 +31,10 @@ class Datacube(ABC):
31
31
  self.merged_axes = []
32
32
  self.unwanted_path = {}
33
33
  self.compressed_axes = compressed_axes_options
34
+ self.grid_md5_hash = None
34
35
 
35
36
  @abstractmethod
36
- def get(self, requests: TensorIndexTree) -> Any:
37
+ def get(self, requests: TensorIndexTree, context: Dict) -> Any:
37
38
  """Return data given a set of request trees"""
38
39
 
39
40
  @property
@@ -69,6 +70,7 @@ class Datacube(ABC):
69
70
  # TODO: do we use this?? This shouldn't work for a disk in lat/lon on a octahedral or other grid??
70
71
  for compressed_grid_axis in transformation.compressed_grid_axes:
71
72
  self.compressed_grid_axes.append(compressed_grid_axis)
73
+ self.grid_md5_hash = transformation.md5_hash
72
74
  if len(final_axis_names) > 1:
73
75
  self.coupled_axes.append(final_axis_names)
74
76
  for axis in final_axis_names:
@@ -126,9 +128,10 @@ class Datacube(ABC):
126
128
  """
127
129
  path = self.fit_path(path)
128
130
  indexes = axis.find_indexes(path, self)
131
+
129
132
  idx_between = axis.find_indices_between(indexes, lower, upper, self, method)
130
133
 
131
- logging.info(f"For axis {axis.name} between {lower} and {upper}, found indices {idx_between}")
134
+ logging.debug(f"For axis {axis.name} between {lower} and {upper}, found indices {idx_between}")
132
135
 
133
136
  return idx_between
134
137
 
@@ -77,7 +77,9 @@ class FDBDatacube(Datacube):
77
77
  for axis_name in axes_to_remove:
78
78
  self._axes.pop(axis_name, None)
79
79
 
80
- def get(self, requests: TensorIndexTree):
80
+ def get(self, requests: TensorIndexTree, context=None):
81
+ if context is None:
82
+ context = {}
81
83
  requests.pprint()
82
84
  if len(requests.children) == 0:
83
85
  return requests
@@ -104,11 +106,11 @@ class FDBDatacube(Datacube):
104
106
  uncompressed_request = {}
105
107
  for i, key in enumerate(compressed_request[0].keys()):
106
108
  uncompressed_request[key] = combi[i]
107
- complete_uncompressed_request = (uncompressed_request, compressed_request[1])
109
+ complete_uncompressed_request = (uncompressed_request, compressed_request[1], self.grid_md5_hash)
108
110
  complete_list_complete_uncompressed_requests.append(complete_uncompressed_request)
109
111
  complete_fdb_decoding_info.append(fdb_requests_decoding_info[j])
110
112
  logging.debug("The requests we give GribJump are: %s", complete_list_complete_uncompressed_requests)
111
- output_values = self.gj.extract(complete_list_complete_uncompressed_requests)
113
+ output_values = self.gj.extract(complete_list_complete_uncompressed_requests, context)
112
114
  logging.debug("GribJump outputs: %s", output_values)
113
115
  self.assign_fdb_output_to_nodes(output_values, complete_fdb_decoding_info)
114
116
 
@@ -124,7 +126,7 @@ class FDBDatacube(Datacube):
124
126
 
125
127
  # First when request node is root, go to its children
126
128
  if requests.axis.name == "root":
127
- logging.info("Looking for data for the tree: %s", [leaf.flatten() for leaf in requests.leaves])
129
+ logging.debug("Looking for data for the tree: %s", [leaf.flatten() for leaf in requests.leaves])
128
130
 
129
131
  for c in requests.children:
130
132
  self.get_fdb_requests(c, fdb_requests, fdb_requests_decoding_info)
@@ -161,8 +163,8 @@ class FDBDatacube(Datacube):
161
163
  new_current_start_idx = []
162
164
  for j, idx in enumerate(sub_lat_idxs):
163
165
  if idx not in seen_indices:
164
- # TODO: need to remove it from the values in the corresponding tree node
165
- # TODO: need to read just the range we give to gj ... DONE?
166
+ # NOTE: need to remove it from the values in the corresponding tree node
167
+ # NOTE: need to read just the range we give to gj
166
168
  original_fdb_node_range_vals.append(actual_fdb_node[0].values[j])
167
169
  seen_indices.add(idx)
168
170
  new_current_start_idx.append(idx)
@@ -187,8 +189,6 @@ class FDBDatacube(Datacube):
187
189
 
188
190
  second_ax = requests.children[0].children[0].axis
189
191
 
190
- # TODO: actually, here we should not remap the nearest_pts, we should instead unmap the
191
- # found_latlon_pts and then remap them later once we have compared found_latlon_pts and nearest_pts
192
192
  nearest_pts = [
193
193
  [lat_val, second_ax._remap_val_to_axis_range(lon_val)]
194
194
  for (lat_val, lon_val) in zip(
@@ -325,8 +325,6 @@ class FDBDatacube(Datacube):
325
325
  request_ranges_with_idx = list(enumerate(interm_request_ranges))
326
326
  sorted_list = sorted(request_ranges_with_idx, key=lambda x: x[1][0])
327
327
  original_indices, sorted_request_ranges = zip(*sorted_list)
328
- logging.debug("We sorted the request ranges into: %s", sorted_request_ranges)
329
- logging.debug("The sorted and unique leaf node ranges are: %s", new_fdb_node_ranges)
330
328
  return (original_indices, sorted_request_ranges, new_fdb_node_ranges)
331
329
 
332
330
  def datacube_natural_indexes(self, axis, subarray):
@@ -24,10 +24,12 @@ class MockDatacube(Datacube):
24
24
  self.stride[k] = stride_cumulative
25
25
  stride_cumulative *= self.dimensions[k]
26
26
 
27
- def get(self, requests: TensorIndexTree):
27
+ def get(self, requests: TensorIndexTree, context=None):
28
28
  # Takes in a datacube and verifies the leaves of the tree are complete
29
29
  # (ie it found values for all datacube axis)
30
30
 
31
+ if context is None:
32
+ context = {}
31
33
  for r in requests.leaves:
32
34
  path = r.flatten()
33
35
  if len(path.items()) == len(self.dimensions.items()):
@@ -50,12 +50,14 @@ class XArrayDatacube(Datacube):
50
50
  val = self._axes[name].type
51
51
  self._check_and_add_axes(options, name, val)
52
52
 
53
- def get(self, requests, leaf_path=None, axis_counter=0):
53
+ def get(self, requests, context=None, leaf_path=None, axis_counter=0):
54
+ if context is None:
55
+ context = {}
54
56
  if leaf_path is None:
55
57
  leaf_path = {}
56
58
  if requests.axis.name == "root":
57
59
  for c in requests.children:
58
- self.get(c, leaf_path, axis_counter + 1)
60
+ self.get(c, context, leaf_path, axis_counter + 1)
59
61
  else:
60
62
  key_value_path = {requests.axis.name: requests.values}
61
63
  ax = requests.axis
@@ -66,7 +68,7 @@ class XArrayDatacube(Datacube):
66
68
  if len(requests.children) != 0:
67
69
  # We are not a leaf and we loop over
68
70
  for c in requests.children:
69
- self.get(c, leaf_path, axis_counter + 1)
71
+ self.get(c, context, leaf_path, axis_counter + 1)
70
72
  else:
71
73
  if self.axis_counter != axis_counter:
72
74
  requests.remove_branch()
@@ -105,6 +105,7 @@ class TensorIndexTree(object):
105
105
  def add_value(self, value):
106
106
  new_values = list(self.values)
107
107
  new_values.append(value)
108
+ new_values.sort()
108
109
  self.values = tuple(new_values)
109
110
 
110
111
  def create_child(self, axis, value, next_nodes):
@@ -15,17 +15,25 @@ class DatacubeMapper(DatacubeAxisTransformation):
15
15
  self.local_area = []
16
16
  if mapper_options.local is not None:
17
17
  self.local_area = mapper_options.local
18
+ self._axis_reversed = None
19
+ if mapper_options.axis_reversed is not None:
20
+ self._axis_reversed = mapper_options.axis_reversed
18
21
  self.old_axis = name
19
22
  self._final_transformation = self.generate_final_transformation()
20
23
  self._final_mapped_axes = self._final_transformation._mapped_axes
21
24
  self._axis_reversed = self._final_transformation._axis_reversed
22
25
  self.compressed_grid_axes = self._final_transformation.compressed_grid_axes
26
+ self.md5_hash = self._final_transformation.md5_hash
23
27
 
24
28
  def generate_final_transformation(self):
25
29
  map_type = _type_to_datacube_mapper_lookup[self.grid_type]
26
- module = import_module("polytope.datacube.transformations.datacube_mappers.mapper_types." + self.grid_type)
30
+ module = import_module(
31
+ "polytope_feature.datacube.transformations.datacube_mappers.mapper_types." + self.grid_type
32
+ )
27
33
  constructor = getattr(module, map_type)
28
- transformation = deepcopy(constructor(self.old_axis, self.grid_axes, self.grid_resolution, self.local_area))
34
+ transformation = deepcopy(
35
+ constructor(self.old_axis, self.grid_axes, self.grid_resolution, self.local_area, self._axis_reversed)
36
+ )
29
37
  return transformation
30
38
 
31
39
  def blocked_axes(self):
@@ -5,7 +5,7 @@ from ..datacube_mappers import DatacubeMapper
5
5
 
6
6
 
7
7
  class HealpixGridMapper(DatacubeMapper):
8
- def __init__(self, base_axis, mapped_axes, resolution, local_area=[]):
8
+ def __init__(self, base_axis, mapped_axes, resolution, local_area=[], axis_reversed=None):
9
9
  # TODO: if local area is not empty list, raise NotImplemented
10
10
  self._mapped_axes = mapped_axes
11
11
  self._base_axis = base_axis
@@ -13,6 +13,11 @@ class HealpixGridMapper(DatacubeMapper):
13
13
  self._axis_reversed = {mapped_axes[0]: True, mapped_axes[1]: False}
14
14
  self._first_axis_vals = self.first_axis_vals()
15
15
  self.compressed_grid_axes = [self._mapped_axes[1]]
16
+ self.md5_hash = md5_hash.get(resolution, None)
17
+ if self._axis_reversed[mapped_axes[1]]:
18
+ raise NotImplementedError("Healpix grid with second axis in decreasing order is not supported")
19
+ if not self._axis_reversed[mapped_axes[0]]:
20
+ raise NotImplementedError("Healpix grid with first axis in increasing order is not supported")
16
21
 
17
22
  def first_axis_vals(self):
18
23
  rad2deg = 180 / math.pi
@@ -133,3 +138,7 @@ class HealpixGridMapper(DatacubeMapper):
133
138
  second_idx = self.second_axis_vals(first_val).index(second_val)
134
139
  healpix_index = self.axes_idx_to_healpix_idx(first_idx, second_idx)
135
140
  return healpix_index
141
+
142
+
143
+ # md5 grid hash in form {resolution : hash}
144
+ md5_hash = {}
@@ -5,7 +5,7 @@ from ..datacube_mappers import DatacubeMapper
5
5
 
6
6
 
7
7
  class NestedHealpixGridMapper(DatacubeMapper):
8
- def __init__(self, base_axis, mapped_axes, resolution, local_area=[]):
8
+ def __init__(self, base_axis, mapped_axes, resolution, local_area=[], axis_reversed=None):
9
9
  # TODO: if local area is not empty list, raise NotImplemented
10
10
  self._mapped_axes = mapped_axes
11
11
  self._base_axis = base_axis
@@ -17,6 +17,11 @@ class NestedHealpixGridMapper(DatacubeMapper):
17
17
  self.k = int(math.log2(self.Nside))
18
18
  self.Npix = 12 * self.Nside * self.Nside
19
19
  self.Ncap = (self.Nside * (self.Nside - 1)) << 1
20
+ self.md5_hash = md5_hash.get(resolution, None)
21
+ if self._axis_reversed[mapped_axes[1]]:
22
+ raise NotImplementedError("Healpix grid with second axis in decreasing order is not supported")
23
+ if not self._axis_reversed[mapped_axes[0]]:
24
+ raise NotImplementedError("Healpix grid with first axis in increasing order is not supported")
20
25
 
21
26
  def first_axis_vals(self):
22
27
  rad2deg = 180 / math.pi
@@ -211,3 +216,7 @@ class NestedHealpixGridMapper(DatacubeMapper):
211
216
 
212
217
  def int_sqrt(self, i):
213
218
  return int(math.sqrt(i + 0.5))
219
+
220
+
221
+ # md5 grid hash in form {resolution : hash}
222
+ md5_hash = {}
@@ -4,7 +4,7 @@ from ..datacube_mappers import DatacubeMapper
4
4
 
5
5
 
6
6
  class LocalRegularGridMapper(DatacubeMapper):
7
- def __init__(self, base_axis, mapped_axes, resolution, local_area=[]):
7
+ def __init__(self, base_axis, mapped_axes, resolution, local_area=[], axis_reversed=None):
8
8
  # TODO: if local area is not empty list, raise NotImplemented
9
9
  self._mapped_axes = mapped_axes
10
10
  self._base_axis = base_axis
@@ -15,17 +15,32 @@ class LocalRegularGridMapper(DatacubeMapper):
15
15
  if not isinstance(resolution, list):
16
16
  self.first_resolution = resolution
17
17
  self.second_resolution = resolution
18
+ self.md5_hash = md5_hash.get(resolution, None)
18
19
  else:
19
20
  self.first_resolution = resolution[0]
20
21
  self.second_resolution = resolution[1]
22
+ self.md5_hash = md5_hash.get(tuple(resolution), None)
21
23
  self._first_deg_increment = (local_area[1] - local_area[0]) / self.first_resolution
22
24
  self._second_deg_increment = (local_area[3] - local_area[2]) / self.second_resolution
23
- self._axis_reversed = {mapped_axes[0]: True, mapped_axes[1]: False}
25
+ if axis_reversed is None:
26
+ self._axis_reversed = {mapped_axes[0]: False, mapped_axes[1]: False}
27
+ else:
28
+ assert set(axis_reversed.keys()) == set(mapped_axes)
29
+ self._axis_reversed = axis_reversed
24
30
  self._first_axis_vals = self.first_axis_vals()
25
31
  self.compressed_grid_axes = [self._mapped_axes[1]]
32
+ if self._axis_reversed[mapped_axes[1]]:
33
+ raise NotImplementedError("Local regular grid with second axis in decreasing order is not supported")
26
34
 
27
35
  def first_axis_vals(self):
28
- first_ax_vals = [self._first_axis_max - i * self._first_deg_increment for i in range(self.first_resolution + 1)]
36
+ if self._axis_reversed[self._mapped_axes[0]]:
37
+ first_ax_vals = [
38
+ self._first_axis_max - i * self._first_deg_increment for i in range(self.first_resolution + 1)
39
+ ]
40
+ else:
41
+ first_ax_vals = [
42
+ self._first_axis_min + i * self._first_deg_increment for i in range(self.first_resolution + 1)
43
+ ]
29
44
  return first_ax_vals
30
45
 
31
46
  def map_first_axis(self, lower, upper):
@@ -68,3 +83,7 @@ class LocalRegularGridMapper(DatacubeMapper):
68
83
  second_idx = self.second_axis_vals(first_val).index(second_val)
69
84
  final_index = self.axes_idx_to_regular_idx(first_idx, second_idx)
70
85
  return final_index
86
+
87
+
88
+ # md5 grid hash in form {resolution : hash}
89
+ md5_hash = {}
@@ -5,7 +5,7 @@ from ..datacube_mappers import DatacubeMapper
5
5
 
6
6
 
7
7
  class OctahedralGridMapper(DatacubeMapper):
8
- def __init__(self, base_axis, mapped_axes, resolution, local_area=[]):
8
+ def __init__(self, base_axis, mapped_axes, resolution, local_area=[], axis_reversed=None):
9
9
  # TODO: if local area is not empty list, raise NotImplemented
10
10
  self._mapped_axes = mapped_axes
11
11
  self._base_axis = base_axis
@@ -14,7 +14,12 @@ class OctahedralGridMapper(DatacubeMapper):
14
14
  self._first_idx_map = self.create_first_idx_map()
15
15
  self._second_axis_spacing = {}
16
16
  self._axis_reversed = {mapped_axes[0]: True, mapped_axes[1]: False}
17
+ if self._axis_reversed[mapped_axes[1]]:
18
+ raise NotImplementedError("Octahedral grid with second axis in decreasing order is not supported")
19
+ if not self._axis_reversed[mapped_axes[0]]:
20
+ raise NotImplementedError("Octahedral grid with first axis in increasing order is not supported")
17
21
  self.compressed_grid_axes = [self._mapped_axes[1]]
22
+ self.md5_hash = md5_hash.get(resolution, None)
18
23
 
19
24
  def gauss_first_guess(self):
20
25
  i = 0
@@ -2750,3 +2755,9 @@ class OctahedralGridMapper(DatacubeMapper):
2750
2755
  (first_idx, second_idx) = self.find_second_axis_idx(first_val, second_val)
2751
2756
  octahedral_index = self.axes_idx_to_octahedral_idx(first_idx, second_idx)
2752
2757
  return octahedral_index
2758
+
2759
+
2760
+ # md5 grid hash in form {resolution : hash}
2761
+ md5_hash = {
2762
+ 1280: "158db321ae8e773681eeb40e0a3d350f",
2763
+ }
@@ -4,7 +4,7 @@ from ..datacube_mappers import DatacubeMapper
4
4
 
5
5
 
6
6
  class ReducedLatLonMapper(DatacubeMapper):
7
- def __init__(self, base_axis, mapped_axes, resolution, local_area=[]):
7
+ def __init__(self, base_axis, mapped_axes, resolution, local_area=[], axis_reversed=None):
8
8
  # TODO: if local area is not empty list, raise NotImplemented
9
9
  self._mapped_axes = mapped_axes
10
10
  self._base_axis = base_axis
@@ -12,6 +12,11 @@ class ReducedLatLonMapper(DatacubeMapper):
12
12
  self._axis_reversed = {mapped_axes[0]: False, mapped_axes[1]: False}
13
13
  self._first_axis_vals = self.first_axis_vals()
14
14
  self.compressed_grid_axes = [self._mapped_axes[1]]
15
+ self.md5_hash = md5_hash.get(resolution, None)
16
+ if self._axis_reversed[mapped_axes[1]]:
17
+ raise NotImplementedError("Reduced lat-lon grid with second axis in decreasing order is not supported")
18
+ if self._axis_reversed[mapped_axes[0]]:
19
+ raise NotImplementedError("Reduced lat-lon grid with first axis in decreasing order is not supported")
15
20
 
16
21
  def first_axis_vals(self):
17
22
  resolution = 180 / (self._resolution - 1)
@@ -1504,3 +1509,7 @@ class ReducedLatLonMapper(DatacubeMapper):
1504
1509
  second_idx = self.second_axis_vals(first_val).index(second_val)
1505
1510
  reduced_ll_index = self.axes_idx_to_reduced_ll_idx(first_idx, second_idx)
1506
1511
  return reduced_ll_index
1512
+
1513
+
1514
+ # md5 grid hash in form {resolution : hash}
1515
+ md5_hash = {}
@@ -4,18 +4,28 @@ from ..datacube_mappers import DatacubeMapper
4
4
 
5
5
 
6
6
  class RegularGridMapper(DatacubeMapper):
7
- def __init__(self, base_axis, mapped_axes, resolution, local_area=[]):
7
+ def __init__(self, base_axis, mapped_axes, resolution, local_area=[], axis_reversed=None):
8
8
  # TODO: if local area is not empty list, raise NotImplemented
9
9
  self._mapped_axes = mapped_axes
10
10
  self._base_axis = base_axis
11
11
  self._resolution = resolution
12
12
  self.deg_increment = 90 / self._resolution
13
- self._axis_reversed = {mapped_axes[0]: True, mapped_axes[1]: False}
13
+ if axis_reversed is None:
14
+ self._axis_reversed = {mapped_axes[0]: True, mapped_axes[1]: False}
15
+ else:
16
+ assert set(axis_reversed.keys()) == set(mapped_axes)
17
+ self._axis_reversed = axis_reversed
14
18
  self._first_axis_vals = self.first_axis_vals()
15
19
  self.compressed_grid_axes = [self._mapped_axes[1]]
20
+ self.md5_hash = md5_hash.get(resolution, None)
21
+ if self._axis_reversed[mapped_axes[1]]:
22
+ raise NotImplementedError("Regular grid with second axis in decreasing order is not supported")
16
23
 
17
24
  def first_axis_vals(self):
18
- first_ax_vals = [90 - i * self.deg_increment for i in range(2 * self._resolution)]
25
+ if self._axis_reversed[self._mapped_axes[0]]:
26
+ first_ax_vals = [90 - i * self.deg_increment for i in range(2 * self._resolution)]
27
+ else:
28
+ first_ax_vals = [-90 + i * self.deg_increment for i in range(2 * self._resolution)]
19
29
  return first_ax_vals
20
30
 
21
31
  def map_first_axis(self, lower, upper):
@@ -56,3 +66,7 @@ class RegularGridMapper(DatacubeMapper):
56
66
  second_idx = self.second_axis_vals(first_val).index(second_val)
57
67
  final_index = self.axes_idx_to_regular_idx(first_idx, second_idx)
58
68
  return final_index
69
+
70
+
71
+ # md5 grid hash in form {resolution : hash}
72
+ md5_hash = {}
@@ -93,6 +93,5 @@ class DatacubeAxisMerger(DatacubeAxisTransformation):
93
93
  if node.axis.name == self._first_axis:
94
94
  (new_first_vals, new_second_vals) = self.unmerge(node.values)
95
95
  node.values = new_first_vals
96
- # TODO: actually need to give the second axis of the transformation to get the interm axis
97
96
  interm_node = node.add_node_layer_after(self._second_axis, new_second_vals)
98
97
  return (interm_node, unwanted_path)
@@ -12,9 +12,8 @@ class DatacubeAxisTransformation(ABC):
12
12
  transformation_type = _type_to_datacube_transformation_lookup[transformation_type_key]
13
13
  transformation_file_name = _type_to_transformation_file_lookup[transformation_type_key]
14
14
  file_name = ".datacube_" + transformation_file_name
15
- module = import_module("polytope.datacube.transformations" + file_name + file_name)
15
+ module = import_module("polytope_feature.datacube.transformations" + file_name + file_name)
16
16
  constructor = getattr(module, transformation_type)
17
- # transformation_type_option = transformation_options[transformation_type_key]
18
17
  transformation_type_option = transformation_options
19
18
  new_transformation = deepcopy(constructor(name, transformation_type_option))
20
19
 
@@ -15,7 +15,7 @@ class DatacubeAxisTypeChange(DatacubeAxisTransformation):
15
15
 
16
16
  def generate_final_transformation(self):
17
17
  map_type = _type_to_datacube_type_change_lookup[self.new_type]
18
- module = import_module("polytope.datacube.transformations.datacube_type_change.datacube_type_change")
18
+ module = import_module("polytope_feature.datacube.transformations.datacube_type_change.datacube_type_change")
19
19
  constructor = getattr(module, map_type)
20
20
  transformation = deepcopy(constructor(self.name, self.new_type))
21
21
  return transformation
@@ -110,7 +110,7 @@ class HullSlicer(Engine):
110
110
 
111
111
  def _build_sliceable_child(self, polytope, ax, node, datacube, values, next_nodes, slice_axis_idx):
112
112
  for i, value in enumerate(values):
113
- if i == 0:
113
+ if i == 0 or ax.name not in self.compressed_axes:
114
114
  fvalue = ax.to_float(value)
115
115
  new_polytope = slice(polytope, ax.name, fvalue, slice_axis_idx)
116
116
  remapped_val = self.remap_values(ax, value)
@@ -121,19 +121,8 @@ class HullSlicer(Engine):
121
121
  child["unsliced_polytopes"].add(new_polytope)
122
122
  next_nodes.append(child)
123
123
  else:
124
- if ax.name not in self.compressed_axes:
125
- fvalue = ax.to_float(value)
126
- new_polytope = slice(polytope, ax.name, fvalue, slice_axis_idx)
127
- remapped_val = self.remap_values(ax, value)
128
- (child, next_nodes) = node.create_child(ax, remapped_val, next_nodes)
129
- child["unsliced_polytopes"] = copy(node["unsliced_polytopes"])
130
- child["unsliced_polytopes"].remove(polytope)
131
- if new_polytope is not None:
132
- child["unsliced_polytopes"].add(new_polytope)
133
- next_nodes.append(child)
134
- else:
135
- remapped_val = self.remap_values(ax, value)
136
- child.add_value(remapped_val)
124
+ remapped_val = self.remap_values(ax, value)
125
+ child.add_value(remapped_val)
137
126
 
138
127
  def _build_branch(self, ax, node, datacube, next_nodes):
139
128
  if ax.name not in self.compressed_axes:
@@ -142,26 +131,23 @@ class HullSlicer(Engine):
142
131
  for polytope in node["unsliced_polytopes"]:
143
132
  if ax.name in polytope._axes:
144
133
  right_unsliced_polytopes.append(polytope)
145
- # for polytope in node["unsliced_polytopes"]:
146
134
  for i, polytope in enumerate(right_unsliced_polytopes):
147
135
  node._parent = parent_node
148
- # if ax.name in polytope._axes:
149
- if True:
150
- lower, upper, slice_axis_idx = polytope.extents(ax.name)
151
- # here, first check if the axis is an unsliceable axis and directly build node if it is
152
- # NOTE: we should have already created the ax_is_unsliceable cache before
153
- if self.ax_is_unsliceable[ax.name]:
154
- self._build_unsliceable_child(polytope, ax, node, datacube, [lower], next_nodes, slice_axis_idx)
155
- else:
156
- values = self.find_values_between(polytope, ax, node, datacube, lower, upper)
157
- # NOTE: need to only remove the branches if the values are empty,
158
- # but only if there are no other possible children left in the tree that
159
- # we can append and if somehow this happens before and we need to remove, then what do we do??
160
- if i == len(right_unsliced_polytopes) - 1:
161
- # we have iterated all polytopes and we can now remove the node if we need to
162
- if len(values) == 0 and len(node.children) == 0:
163
- node.remove_branch()
164
- self._build_sliceable_child(polytope, ax, node, datacube, values, next_nodes, slice_axis_idx)
136
+ lower, upper, slice_axis_idx = polytope.extents(ax.name)
137
+ # here, first check if the axis is an unsliceable axis and directly build node if it is
138
+ # NOTE: we should have already created the ax_is_unsliceable cache before
139
+ if self.ax_is_unsliceable[ax.name]:
140
+ self._build_unsliceable_child(polytope, ax, node, datacube, [lower], next_nodes, slice_axis_idx)
141
+ else:
142
+ values = self.find_values_between(polytope, ax, node, datacube, lower, upper)
143
+ # NOTE: need to only remove the branches if the values are empty,
144
+ # but only if there are no other possible children left in the tree that
145
+ # we can append and if somehow this happens before and we need to remove, then what do we do??
146
+ if i == len(right_unsliced_polytopes) - 1:
147
+ # we have iterated all polytopes and we can now remove the node if we need to
148
+ if len(values) == 0 and len(node.children) == 0:
149
+ node.remove_branch()
150
+ self._build_sliceable_child(polytope, ax, node, datacube, values, next_nodes, slice_axis_idx)
165
151
  else:
166
152
  all_values = []
167
153
  all_lowers = []
@@ -215,7 +201,6 @@ class HullSlicer(Engine):
215
201
  for p in polytopes:
216
202
  if p.is_in_union:
217
203
  for axis in p.axes():
218
- # if axis in self.compressed_axes:
219
204
  if axis == self.compressed_axes[-1]:
220
205
  self.compressed_axes.remove(axis)
221
206
 
@@ -22,6 +22,7 @@ class MapperConfig(TransformationConfig):
22
22
  resolution: Union[int, List[int]] = 0
23
23
  axes: List[str] = [""]
24
24
  local: Optional[List[float]] = None
25
+ axis_reversed: Optional[Dict[str, bool]] = None
25
26
 
26
27
 
27
28
  class ReverseConfig(TransformationConfig):
@@ -66,7 +67,6 @@ class Config(ConfigModel):
66
67
  class PolytopeOptions(ABC):
67
68
  @staticmethod
68
69
  def get_polytope_options(options):
69
-
70
70
  parser = argparse.ArgumentParser(allow_abbrev=False)
71
71
  conflator = Conflator(app_name="polytope", model=Config, cli=False, argparser=parser, **options)
72
72
  config_options = conflator.load()
@@ -1,3 +1,4 @@
1
+ import logging
1
2
  from typing import List
2
3
 
3
4
  from .options import PolytopeOptions
@@ -55,9 +56,14 @@ class Polytope:
55
56
  """Low-level API which takes a polytope geometry object and uses it to slice the datacube"""
56
57
  return self.engine.extract(self.datacube, polytopes)
57
58
 
58
- def retrieve(self, request: Request, method="standard"):
59
+ def retrieve(self, request: Request, method="standard", context=None):
59
60
  """Higher-level API which takes a request and uses it to slice the datacube"""
61
+ if context is None:
62
+ context = {}
63
+ logging.info("Starting request for %s ", context)
60
64
  self.datacube.check_branching_axes(request)
61
65
  request_tree = self.engine.extract(self.datacube, request.polytopes())
62
- self.datacube.get(request_tree)
66
+ logging.info("Created request tree for %s ", context)
67
+ self.datacube.get(request_tree, context)
68
+ logging.info("Retrieved data for %s ", context)
63
69
  return request_tree
@@ -1,4 +1,8 @@
1
- class AxisOverdefinedError(KeyError):
1
+ class PolytopeError(Exception):
2
+ pass
3
+
4
+
5
+ class AxisOverdefinedError(PolytopeError, KeyError):
2
6
  def __init__(self, axis):
3
7
  self.axis = axis
4
8
  self.message = (
@@ -7,19 +11,19 @@ class AxisOverdefinedError(KeyError):
7
11
  )
8
12
 
9
13
 
10
- class AxisUnderdefinedError(KeyError):
14
+ class AxisUnderdefinedError(PolytopeError, KeyError):
11
15
  def __init__(self, axis):
12
16
  self.axis = axis
13
17
  self.message = f"Axis {axis} is underdefined. It does not appear in any input polytope."
14
18
 
15
19
 
16
- class AxisNotFoundError(KeyError):
20
+ class AxisNotFoundError(PolytopeError, KeyError):
17
21
  def __init__(self, axis):
18
22
  self.axis = axis
19
23
  self.message = f"Axis {axis} does not exist in the datacube."
20
24
 
21
25
 
22
- class UnsliceableShapeError(KeyError):
26
+ class UnsliceableShapeError(PolytopeError, KeyError):
23
27
  def __init__(self, axis):
24
28
  self.axis = axis
25
29
  self.message = f"Higher-dimensional shape does not support unsliceable axis {axis.name}."
@@ -0,0 +1 @@
1
+ __version__ = "1.0.8"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 1.0
2
2
  Name: polytope-python
3
- Version: 1.0.6
3
+ Version: 1.0.8
4
4
  Summary: Polytope datacube feature extraction library
5
5
  Home-page: https://github.com/ecmwf/polytope
6
6
  Author: ECMWF
@@ -0,0 +1,54 @@
1
+ LICENSE
2
+ MANIFEST.in
3
+ pyproject.toml
4
+ requirements.txt
5
+ setup.py
6
+ polytope_feature/__init__.py
7
+ polytope_feature/options.py
8
+ polytope_feature/polytope.py
9
+ polytope_feature/shapes.py
10
+ polytope_feature/version.py
11
+ polytope_feature/datacube/__init__.py
12
+ polytope_feature/datacube/datacube_axis.py
13
+ polytope_feature/datacube/index_tree_pb2.py
14
+ polytope_feature/datacube/tensor_index_tree.py
15
+ polytope_feature/datacube/tree_encoding.py
16
+ polytope_feature/datacube/backends/__init__.py
17
+ polytope_feature/datacube/backends/datacube.py
18
+ polytope_feature/datacube/backends/fdb.py
19
+ polytope_feature/datacube/backends/mock.py
20
+ polytope_feature/datacube/backends/xarray.py
21
+ polytope_feature/datacube/transformations/__init__.py
22
+ polytope_feature/datacube/transformations/datacube_transformations.py
23
+ polytope_feature/datacube/transformations/datacube_cyclic/__init__.py
24
+ polytope_feature/datacube/transformations/datacube_cyclic/datacube_cyclic.py
25
+ polytope_feature/datacube/transformations/datacube_mappers/__init__.py
26
+ polytope_feature/datacube/transformations/datacube_mappers/datacube_mappers.py
27
+ polytope_feature/datacube/transformations/datacube_mappers/mapper_types/__init__.py
28
+ polytope_feature/datacube/transformations/datacube_mappers/mapper_types/healpix.py
29
+ polytope_feature/datacube/transformations/datacube_mappers/mapper_types/healpix_nested.py
30
+ polytope_feature/datacube/transformations/datacube_mappers/mapper_types/local_regular.py
31
+ polytope_feature/datacube/transformations/datacube_mappers/mapper_types/octahedral.py
32
+ polytope_feature/datacube/transformations/datacube_mappers/mapper_types/reduced_ll.py
33
+ polytope_feature/datacube/transformations/datacube_mappers/mapper_types/regular.py
34
+ polytope_feature/datacube/transformations/datacube_merger/__init__.py
35
+ polytope_feature/datacube/transformations/datacube_merger/datacube_merger.py
36
+ polytope_feature/datacube/transformations/datacube_reverse/__init__.py
37
+ polytope_feature/datacube/transformations/datacube_reverse/datacube_reverse.py
38
+ polytope_feature/datacube/transformations/datacube_type_change/__init__.py
39
+ polytope_feature/datacube/transformations/datacube_type_change/datacube_type_change.py
40
+ polytope_feature/engine/__init__.py
41
+ polytope_feature/engine/engine.py
42
+ polytope_feature/engine/hullslicer.py
43
+ polytope_feature/utility/__init__.py
44
+ polytope_feature/utility/combinatorics.py
45
+ polytope_feature/utility/exceptions.py
46
+ polytope_feature/utility/geometry.py
47
+ polytope_feature/utility/list_tools.py
48
+ polytope_feature/utility/profiling.py
49
+ polytope_python.egg-info/PKG-INFO
50
+ polytope_python.egg-info/SOURCES.txt
51
+ polytope_python.egg-info/dependency_links.txt
52
+ polytope_python.egg-info/not-zip-safe
53
+ polytope_python.egg-info/requires.txt
54
+ polytope_python.egg-info/top_level.txt
@@ -0,0 +1 @@
1
+ polytope_feature
@@ -5,7 +5,7 @@ from setuptools import find_packages, setup
5
5
 
6
6
  __version__ = re.search(
7
7
  r'__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
8
- io.open("polytope/version.py", encoding="utf_8_sig").read(),
8
+ io.open("polytope_feature/version.py", encoding="utf_8_sig").read(),
9
9
  ).group(1)
10
10
 
11
11
  with open("requirements.txt") as f:
@@ -1 +0,0 @@
1
- __version__ = "1.0.6"
@@ -1,54 +0,0 @@
1
- LICENSE
2
- MANIFEST.in
3
- pyproject.toml
4
- requirements.txt
5
- setup.py
6
- polytope/__init__.py
7
- polytope/options.py
8
- polytope/polytope.py
9
- polytope/shapes.py
10
- polytope/version.py
11
- polytope/datacube/__init__.py
12
- polytope/datacube/datacube_axis.py
13
- polytope/datacube/index_tree_pb2.py
14
- polytope/datacube/tensor_index_tree.py
15
- polytope/datacube/tree_encoding.py
16
- polytope/datacube/backends/__init__.py
17
- polytope/datacube/backends/datacube.py
18
- polytope/datacube/backends/fdb.py
19
- polytope/datacube/backends/mock.py
20
- polytope/datacube/backends/xarray.py
21
- polytope/datacube/transformations/__init__.py
22
- polytope/datacube/transformations/datacube_transformations.py
23
- polytope/datacube/transformations/datacube_cyclic/__init__.py
24
- polytope/datacube/transformations/datacube_cyclic/datacube_cyclic.py
25
- polytope/datacube/transformations/datacube_mappers/__init__.py
26
- polytope/datacube/transformations/datacube_mappers/datacube_mappers.py
27
- polytope/datacube/transformations/datacube_mappers/mapper_types/__init__.py
28
- polytope/datacube/transformations/datacube_mappers/mapper_types/healpix.py
29
- polytope/datacube/transformations/datacube_mappers/mapper_types/healpix_nested.py
30
- polytope/datacube/transformations/datacube_mappers/mapper_types/local_regular.py
31
- polytope/datacube/transformations/datacube_mappers/mapper_types/octahedral.py
32
- polytope/datacube/transformations/datacube_mappers/mapper_types/reduced_ll.py
33
- polytope/datacube/transformations/datacube_mappers/mapper_types/regular.py
34
- polytope/datacube/transformations/datacube_merger/__init__.py
35
- polytope/datacube/transformations/datacube_merger/datacube_merger.py
36
- polytope/datacube/transformations/datacube_reverse/__init__.py
37
- polytope/datacube/transformations/datacube_reverse/datacube_reverse.py
38
- polytope/datacube/transformations/datacube_type_change/__init__.py
39
- polytope/datacube/transformations/datacube_type_change/datacube_type_change.py
40
- polytope/engine/__init__.py
41
- polytope/engine/engine.py
42
- polytope/engine/hullslicer.py
43
- polytope/utility/__init__.py
44
- polytope/utility/combinatorics.py
45
- polytope/utility/exceptions.py
46
- polytope/utility/geometry.py
47
- polytope/utility/list_tools.py
48
- polytope/utility/profiling.py
49
- polytope_python.egg-info/PKG-INFO
50
- polytope_python.egg-info/SOURCES.txt
51
- polytope_python.egg-info/dependency_links.txt
52
- polytope_python.egg-info/not-zip-safe
53
- polytope_python.egg-info/requires.txt
54
- polytope_python.egg-info/top_level.txt
File without changes