polytope-python 1.0.31__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. polytope_feature/__init__.py +1 -0
  2. polytope_feature/datacube/__init__.py +1 -0
  3. polytope_feature/datacube/backends/__init__.py +1 -0
  4. polytope_feature/datacube/backends/datacube.py +171 -0
  5. polytope_feature/datacube/backends/fdb.py +399 -0
  6. polytope_feature/datacube/backends/mock.py +71 -0
  7. polytope_feature/datacube/backends/xarray.py +142 -0
  8. polytope_feature/datacube/datacube_axis.py +332 -0
  9. polytope_feature/datacube/index_tree_pb2.py +27 -0
  10. polytope_feature/datacube/tensor_index_tree.py +228 -0
  11. polytope_feature/datacube/transformations/__init__.py +1 -0
  12. polytope_feature/datacube/transformations/datacube_cyclic/__init__.py +1 -0
  13. polytope_feature/datacube/transformations/datacube_cyclic/datacube_cyclic.py +171 -0
  14. polytope_feature/datacube/transformations/datacube_mappers/__init__.py +1 -0
  15. polytope_feature/datacube/transformations/datacube_mappers/datacube_mappers.py +141 -0
  16. polytope_feature/datacube/transformations/datacube_mappers/mapper_types/__init__.py +5 -0
  17. polytope_feature/datacube/transformations/datacube_mappers/mapper_types/healpix.py +147 -0
  18. polytope_feature/datacube/transformations/datacube_mappers/mapper_types/healpix_nested.py +229 -0
  19. polytope_feature/datacube/transformations/datacube_mappers/mapper_types/local_regular.py +95 -0
  20. polytope_feature/datacube/transformations/datacube_mappers/mapper_types/octahedral.py +7896 -0
  21. polytope_feature/datacube/transformations/datacube_mappers/mapper_types/reduced_gaussian.py +1459 -0
  22. polytope_feature/datacube/transformations/datacube_mappers/mapper_types/reduced_ll.py +5128 -0
  23. polytope_feature/datacube/transformations/datacube_mappers/mapper_types/regular.py +75 -0
  24. polytope_feature/datacube/transformations/datacube_merger/__init__.py +1 -0
  25. polytope_feature/datacube/transformations/datacube_merger/datacube_merger.py +95 -0
  26. polytope_feature/datacube/transformations/datacube_reverse/__init__.py +1 -0
  27. polytope_feature/datacube/transformations/datacube_reverse/datacube_reverse.py +65 -0
  28. polytope_feature/datacube/transformations/datacube_transformations.py +96 -0
  29. polytope_feature/datacube/transformations/datacube_type_change/__init__.py +1 -0
  30. polytope_feature/datacube/transformations/datacube_type_change/datacube_type_change.py +124 -0
  31. polytope_feature/datacube/tree_encoding.py +132 -0
  32. polytope_feature/engine/__init__.py +1 -0
  33. polytope_feature/engine/engine.py +19 -0
  34. polytope_feature/engine/hullslicer.py +316 -0
  35. polytope_feature/options.py +77 -0
  36. polytope_feature/polytope.py +71 -0
  37. polytope_feature/shapes.py +405 -0
  38. polytope_feature/utility/__init__.py +0 -0
  39. polytope_feature/utility/combinatorics.py +48 -0
  40. polytope_feature/utility/exceptions.py +45 -0
  41. polytope_feature/utility/geometry.py +26 -0
  42. polytope_feature/utility/list_tools.py +41 -0
  43. polytope_feature/utility/profiling.py +14 -0
  44. polytope_feature/version.py +1 -0
  45. polytope_python-1.0.31.dist-info/LICENSE +201 -0
  46. polytope_python-1.0.31.dist-info/METADATA +21 -0
  47. polytope_python-1.0.31.dist-info/RECORD +49 -0
  48. polytope_python-1.0.31.dist-info/WHEEL +5 -0
  49. polytope_python-1.0.31.dist-info/top_level.txt +1 -0
@@ -0,0 +1,75 @@
1
+ import bisect
2
+
3
+ from ..datacube_mappers import DatacubeMapper
4
+
5
+
6
+ class RegularGridMapper(DatacubeMapper):
7
+ def __init__(self, base_axis, mapped_axes, resolution, md5_hash=None, local_area=[], axis_reversed=None):
8
+ # TODO: if local area is not empty list, raise NotImplemented
9
+ self._mapped_axes = mapped_axes
10
+ self._base_axis = base_axis
11
+ self._resolution = resolution
12
+ self.deg_increment = 90 / self._resolution
13
+ if axis_reversed is None:
14
+ self._axis_reversed = {mapped_axes[0]: True, mapped_axes[1]: False}
15
+ else:
16
+ assert set(axis_reversed.keys()) == set(mapped_axes)
17
+ self._axis_reversed = axis_reversed
18
+ self._first_axis_vals = self.first_axis_vals()
19
+ self.compressed_grid_axes = [self._mapped_axes[1]]
20
+ if md5_hash is not None:
21
+ self.md5_hash = md5_hash
22
+ else:
23
+ self.md5_hash = _md5_hash.get(resolution, None)
24
+ if self._axis_reversed[mapped_axes[1]]:
25
+ raise NotImplementedError("Regular grid with second axis in decreasing order is not supported")
26
+
27
+ def first_axis_vals(self):
28
+ if self._axis_reversed[self._mapped_axes[0]]:
29
+ first_ax_vals = [90 - i * self.deg_increment for i in range(2 * self._resolution)]
30
+ else:
31
+ first_ax_vals = [-90 + i * self.deg_increment for i in range(2 * self._resolution)]
32
+ return first_ax_vals
33
+
34
+ def map_first_axis(self, lower, upper):
35
+ axis_lines = self._first_axis_vals
36
+ return_vals = [val for val in axis_lines if lower <= val <= upper]
37
+ return return_vals
38
+
39
+ def second_axis_vals(self, first_val):
40
+ second_ax_vals = [i * self.deg_increment for i in range(4 * self._resolution)]
41
+ return second_ax_vals
42
+
43
+ def map_second_axis(self, first_val, lower, upper):
44
+ axis_lines = self.second_axis_vals(first_val)
45
+ return_vals = [val for val in axis_lines if lower <= val <= upper]
46
+ return return_vals
47
+
48
+ def axes_idx_to_regular_idx(self, first_idx, second_idx):
49
+ final_idx = first_idx * 4 * self._resolution + second_idx
50
+ return final_idx
51
+
52
+ def find_second_idx(self, first_val, second_val):
53
+ tol = 1e-10
54
+ second_axis_vals = self.second_axis_vals(first_val)
55
+ second_idx = bisect.bisect_left(second_axis_vals, second_val - tol)
56
+ return second_idx
57
+
58
+ def unmap_first_val_to_start_line_idx(self, first_val):
59
+ tol = 1e-8
60
+ first_val = [i for i in self._first_axis_vals if first_val - tol <= i <= first_val + tol][0]
61
+ first_idx = self._first_axis_vals.index(first_val)
62
+ return first_idx * 4 * self._resolution
63
+
64
+ def unmap(self, first_val, second_val):
65
+ tol = 1e-8
66
+ first_val = [i for i in self._first_axis_vals if first_val[0] - tol <= i <= first_val[0] + tol][0]
67
+ first_idx = self._first_axis_vals.index(first_val)
68
+ second_val = [i for i in self.second_axis_vals(first_val) if second_val[0] - tol <= i <= second_val[0] + tol][0]
69
+ second_idx = self.second_axis_vals(first_val).index(second_val)
70
+ final_index = self.axes_idx_to_regular_idx(first_idx, second_idx)
71
+ return final_index
72
+
73
+
74
+ # md5 grid hash in form {resolution : hash}
75
+ _md5_hash = {}
@@ -0,0 +1 @@
1
+ from .datacube_merger import *
@@ -0,0 +1,95 @@
1
+ import logging
2
+
3
+ import numpy as np
4
+ import pandas as pd
5
+
6
+ from ..datacube_transformations import DatacubeAxisTransformation
7
+
8
+
9
+ class DatacubeAxisMerger(DatacubeAxisTransformation):
10
+ def __init__(self, name, merge_options, datacube=None):
11
+ self.transformation_options = merge_options
12
+ self.name = name
13
+ self._first_axis = name
14
+ self._second_axis = merge_options.other_axis
15
+ self._linkers = merge_options.linkers
16
+ self._merged_values = self.merged_values(datacube)
17
+
18
+ def blocked_axes(self):
19
+ return [self._second_axis]
20
+
21
+ def unwanted_axes(self):
22
+ return []
23
+
24
+ def _mapped_axes(self):
25
+ return self._first_axis
26
+
27
+ def merged_values(self, datacube):
28
+ first_ax_vals = np.array(datacube.ax_vals(self.name))
29
+ second_ax_name = self._second_axis
30
+ second_ax_vals = np.array(datacube.ax_vals(second_ax_name))
31
+ linkers = self._linkers
32
+ first_grid, second_grid = np.meshgrid(first_ax_vals, second_ax_vals, indexing="ij")
33
+ combined_strings = np.char.add(
34
+ np.char.add(first_grid.ravel(), linkers[0]), np.char.add(second_grid.ravel(), linkers[1])
35
+ )
36
+ merged_values = pd.to_datetime(combined_strings).to_numpy().astype("datetime64[s]")
37
+ merged_values = np.array(merged_values)
38
+ merged_values.sort()
39
+ logging.info(
40
+ f"Merged values {first_ax_vals} on axis {self.name} and \
41
+ values {second_ax_vals} on axis {second_ax_name} to values {merged_values}"
42
+ )
43
+ return merged_values
44
+
45
+ def transformation_axes_final(self):
46
+ return [self._first_axis]
47
+
48
+ def generate_final_transformation(self):
49
+ return self
50
+
51
+ def unmerge(self, merged_val):
52
+ first_values = []
53
+ second_values = []
54
+ for merged_value in merged_val:
55
+ merged_val = str(merged_value)
56
+ first_idx = merged_val.find(self._linkers[0])
57
+ first_val = merged_val[:first_idx]
58
+ first_linker_size = len(self._linkers[0])
59
+ second_linked_size = len(self._linkers[1])
60
+ second_val = merged_val[first_idx + first_linker_size : -second_linked_size]
61
+
62
+ # TODO: maybe replacing like this is too specific to time/dates?
63
+ first_val = str(first_val).replace("-", "")
64
+ second_val = second_val.replace(":", "")
65
+ logging.info(
66
+ f"Unmerged value {merged_val} to values {first_val} on axis {self.name} \
67
+ and {second_val} on axis {self._second_axis}"
68
+ )
69
+ first_values.append(first_val)
70
+ second_values.append(second_val)
71
+ return (tuple(first_values), tuple(second_values))
72
+
73
+ def change_val_type(self, axis_name, values):
74
+ new_values = pd.to_datetime(values)
75
+ return new_values
76
+
77
+ def find_modified_indexes(self, indexes, path, datacube, axis):
78
+ if axis.name == self._first_axis:
79
+ return self._merged_values
80
+
81
+ def unmap_path_key(self, key_value_path, leaf_path, unwanted_path, axis):
82
+ new_key_value_path = {}
83
+ value = key_value_path[axis.name]
84
+ if axis.name == self._first_axis:
85
+ (first_val, second_val) = self.unmerge(value)
86
+ new_key_value_path[self._first_axis] = first_val
87
+ new_key_value_path[self._second_axis] = second_val
88
+ return (new_key_value_path, leaf_path, unwanted_path)
89
+
90
+ def unmap_tree_node(self, node, unwanted_path):
91
+ if node.axis.name == self._first_axis:
92
+ (new_first_vals, new_second_vals) = self.unmerge(node.values)
93
+ node.values = new_first_vals
94
+ interm_node = node.add_node_layer_after(self._second_axis, new_second_vals)
95
+ return (interm_node, unwanted_path)
@@ -0,0 +1 @@
1
+ from .datacube_reverse import *
@@ -0,0 +1,65 @@
1
+ from ....utility.list_tools import bisect_left_cmp, bisect_right_cmp
2
+ from ..datacube_transformations import DatacubeAxisTransformation
3
+
4
+
5
+ class DatacubeAxisReverse(DatacubeAxisTransformation):
6
+ def __init__(self, name, mapper_options, datacube=None):
7
+ self.name = name
8
+ self.transformation_options = mapper_options
9
+
10
+ def generate_final_transformation(self):
11
+ return self
12
+
13
+ def transformation_axes_final(self):
14
+ return [self.name]
15
+
16
+ def change_val_type(self, axis_name, values):
17
+ return values
18
+
19
+ def blocked_axes(self):
20
+ return []
21
+
22
+ def unwanted_axes(self):
23
+ return []
24
+
25
+ def find_modified_indexes(self, indexes, path, datacube, axis):
26
+ if axis.name in datacube.complete_axes:
27
+ ordered_indices = indexes.sort_values()
28
+ else:
29
+ ordered_indices = indexes
30
+ return ordered_indices
31
+
32
+ def find_indices_between(self, indexes, low, up, datacube, method, indexes_between_ranges, axis):
33
+ indexes_between_ranges = []
34
+ if axis.name == self.name:
35
+ if axis.name in datacube.complete_axes:
36
+ # Find the range of indexes between lower and upper
37
+ # https://pandas.pydata.org/docs/reference/api/pandas.Index.searchsorted.html
38
+ # Assumes the indexes are already sorted (could sort to be sure) and monotonically
39
+ # increasing
40
+ if method == "surrounding" or method == "nearest":
41
+ start = indexes.searchsorted(low, "left")
42
+ end = indexes.searchsorted(up, "right")
43
+ start = max(start - 1, 0)
44
+ end = min(end + 1, len(indexes))
45
+ indexes_between = indexes[start:end].to_list()
46
+ indexes_between_ranges.extend(indexes_between)
47
+ else:
48
+ start = indexes.searchsorted(low, "left")
49
+ end = indexes.searchsorted(up, "right")
50
+ indexes_between = indexes[start:end].to_list()
51
+ indexes_between_ranges.extend(indexes_between)
52
+ else:
53
+ if method == "surrounding" or method == "nearest":
54
+ end_idx = bisect_left_cmp(indexes, low, cmp=lambda x, y: x > y) + 1
55
+ start_idx = bisect_right_cmp(indexes, up, cmp=lambda x, y: x > y)
56
+ start = max(start_idx - 1, 0)
57
+ end = min(end_idx + 1, len(indexes))
58
+ indexes_between = indexes[start:end]
59
+ indexes_between_ranges.extend(indexes_between)
60
+ else:
61
+ end_idx = bisect_left_cmp(indexes, low, cmp=lambda x, y: x > y) + 1
62
+ start_idx = bisect_right_cmp(indexes, up, cmp=lambda x, y: x > y)
63
+ indexes_between = indexes[start_idx:end_idx]
64
+ indexes_between_ranges.extend(indexes_between)
65
+ return indexes_between_ranges
@@ -0,0 +1,96 @@
1
+ from abc import ABC, abstractmethod
2
+ from copy import deepcopy
3
+ from importlib import import_module
4
+
5
+
6
+ class DatacubeAxisTransformation(ABC):
7
+ def __init__(self):
8
+ self.parent = None
9
+
10
+ @staticmethod
11
+ def create_transform(name, transformation_type_key, transformation_options, datacube):
12
+ transformation_type = _type_to_datacube_transformation_lookup[transformation_type_key]
13
+ transformation_file_name = _type_to_transformation_file_lookup[transformation_type_key]
14
+ file_name = ".datacube_" + transformation_file_name
15
+ module = import_module("polytope_feature.datacube.transformations" + file_name + file_name)
16
+ constructor = getattr(module, transformation_type)
17
+ transformation_type_option = transformation_options
18
+ new_transformation = deepcopy(constructor(name, transformation_type_option, datacube))
19
+
20
+ new_transformation.name = name
21
+ return new_transformation
22
+
23
+ @staticmethod
24
+ def get_final_axes(name, transformation_type_key, transformation_options, datacube):
25
+ new_transformation = DatacubeAxisTransformation.create_transform(
26
+ name, transformation_type_key, transformation_options, datacube
27
+ )
28
+ transformation_axis_names = new_transformation.transformation_axes_final()
29
+ return (transformation_axis_names, new_transformation)
30
+
31
+ def name(self):
32
+ pass
33
+
34
+ def transformation_options(self):
35
+ pass
36
+
37
+ @abstractmethod
38
+ def generate_final_transformation(self):
39
+ pass
40
+
41
+ @abstractmethod
42
+ def transformation_axes_final(self):
43
+ pass
44
+
45
+ @abstractmethod
46
+ def change_val_type(self, axis_name, values):
47
+ pass
48
+
49
+ def find_modified_indexes(self, indexes, path, datacube, axis):
50
+ return indexes
51
+
52
+ def unmap_path_key(self, key_value_path, leaf_path, unwanted_path, axis):
53
+ return (key_value_path, leaf_path, unwanted_path)
54
+
55
+ def unmap_tree_node(self, node, unwanted_path):
56
+ return (node, unwanted_path)
57
+
58
+ def find_indices_between(self, indexes_ranges, low, up, datacube, method, indexes_between_ranges, axis):
59
+ return indexes_between_ranges
60
+
61
+ def _remap_val_to_axis_range(self, value, axis):
62
+ return value
63
+
64
+ def offset(self, range, axis, offset):
65
+ return offset
66
+
67
+ def remap(self, range, ranges, axis):
68
+ return ranges
69
+
70
+ def to_intervals(self, range, intervals, axis):
71
+ return intervals
72
+
73
+
74
+ _type_to_datacube_transformation_lookup = {
75
+ "mapper": "DatacubeMapper",
76
+ "cyclic": "DatacubeAxisCyclic",
77
+ "merge": "DatacubeAxisMerger",
78
+ "reverse": "DatacubeAxisReverse",
79
+ "type_change": "DatacubeAxisTypeChange",
80
+ }
81
+
82
+ _type_to_transformation_file_lookup = {
83
+ "mapper": "mappers",
84
+ "cyclic": "cyclic",
85
+ "merge": "merger",
86
+ "reverse": "reverse",
87
+ "type_change": "type_change",
88
+ }
89
+
90
+ has_transform = {
91
+ "mapper": "has_mapper",
92
+ "cyclic": "is_cyclic",
93
+ "merge": "has_merger",
94
+ "reverse": "reorder",
95
+ "type_change": "type_change",
96
+ }
@@ -0,0 +1 @@
1
+ from .datacube_type_change import *
@@ -0,0 +1,124 @@
1
+ from copy import deepcopy
2
+ from importlib import import_module
3
+
4
+ import pandas as pd
5
+
6
+ from ..datacube_transformations import DatacubeAxisTransformation
7
+
8
+
9
+ class DatacubeAxisTypeChange(DatacubeAxisTransformation):
10
+ # The transformation here will be to point the old axes to the new cyclic axes
11
+
12
+ def __init__(self, name, type_options, datacube=None):
13
+ self.name = name
14
+ self.transformation_options = type_options
15
+ self.new_type = type_options.type
16
+ self._final_transformation = self.generate_final_transformation()
17
+
18
+ def generate_final_transformation(self):
19
+ map_type = _type_to_datacube_type_change_lookup[self.new_type]
20
+ module = import_module("polytope_feature.datacube.transformations.datacube_type_change.datacube_type_change")
21
+ constructor = getattr(module, map_type)
22
+ transformation = deepcopy(constructor(self.name, self.new_type))
23
+ return transformation
24
+
25
+ def transformation_axes_final(self):
26
+ return [self._final_transformation.axis_name]
27
+
28
+ def change_val_type(self, axis_name, values):
29
+ return_idx = [self._final_transformation.transform_type(val) for val in values]
30
+ if None in return_idx:
31
+ return None
32
+ return_idx.sort()
33
+ return return_idx
34
+
35
+ def make_str(self, value):
36
+ return self._final_transformation.make_str(value)
37
+
38
+ def blocked_axes(self):
39
+ return []
40
+
41
+ def unwanted_axes(self):
42
+ return []
43
+
44
+ def find_modified_indexes(self, indexes, path, datacube, axis):
45
+ if axis.name == self.name:
46
+ return self.change_val_type(axis.name, indexes)
47
+
48
+ def unmap_path_key(self, key_value_path, leaf_path, unwanted_path, axis):
49
+ value = key_value_path[axis.name]
50
+ if axis.name == self.name:
51
+ unchanged_val = self.make_str(value)
52
+ key_value_path[axis.name] = unchanged_val
53
+ return (key_value_path, leaf_path, unwanted_path)
54
+
55
+ def unmap_tree_node(self, node, unwanted_path):
56
+ if node.axis.name == self.name:
57
+ new_node_vals = self.make_str(node.values)
58
+ node.values = new_node_vals
59
+ return (node, unwanted_path)
60
+
61
+
62
+ class TypeChangeStrToInt(DatacubeAxisTypeChange):
63
+ def __init__(self, axis_name, new_type):
64
+ self.axis_name = axis_name
65
+ self._new_type = new_type
66
+
67
+ def transform_type(self, value):
68
+ try:
69
+ return int(value)
70
+ except ValueError:
71
+ return None
72
+
73
+ def make_str(self, value):
74
+ values = []
75
+ for val in value:
76
+ values.append(str(val))
77
+ return tuple(values)
78
+
79
+
80
+ class TypeChangeStrToTimestamp(DatacubeAxisTypeChange):
81
+ def __init__(self, axis_name, new_type):
82
+ self.axis_name = axis_name
83
+ self._new_type = new_type
84
+
85
+ def transform_type(self, value):
86
+ try:
87
+ return pd.Timestamp(value)
88
+ except ValueError:
89
+ return None
90
+
91
+ def make_str(self, value):
92
+ values = []
93
+ for val in value:
94
+ values.append(val.strftime("%Y%m%d"))
95
+ return tuple(values)
96
+
97
+
98
+ class TypeChangeStrToTimedelta(DatacubeAxisTypeChange):
99
+ def __init__(self, axis_name, new_type):
100
+ self.axis_name = axis_name
101
+ self._new_type = new_type
102
+
103
+ def transform_type(self, value):
104
+ try:
105
+ hours = int(value[:2])
106
+ mins = int(value[2:])
107
+ return pd.Timedelta(hours=hours, minutes=mins)
108
+ except ValueError:
109
+ return None
110
+
111
+ def make_str(self, value):
112
+ values = []
113
+ for val in value:
114
+ hours = int(val.total_seconds() // 3600)
115
+ mins = int((val.total_seconds() % 3600) // 60)
116
+ values.append(f"{hours:02d}{mins:02d}")
117
+ return tuple(values)
118
+
119
+
120
+ _type_to_datacube_type_change_lookup = {
121
+ "int": "TypeChangeStrToInt",
122
+ "date": "TypeChangeStrToTimestamp",
123
+ "time": "TypeChangeStrToTimedelta",
124
+ }
@@ -0,0 +1,132 @@
1
+ import math
2
+ from copy import deepcopy
3
+
4
+ from . import index_tree_pb2 as pb2
5
+ from .datacube_axis import IntDatacubeAxis
6
+ from .tensor_index_tree import TensorIndexTree
7
+
8
+
9
+ def encode_tree(tree: TensorIndexTree):
10
+ node = pb2.Node()
11
+
12
+ node.axis = tree.axis.name
13
+
14
+ # NOTE: do we need this if we parse the tree before it has values?
15
+ if tree.result is not None:
16
+ for result in tree.result:
17
+ node.result.append(result)
18
+
19
+ # Nest children in protobuf root tree node
20
+ for c in tree.children:
21
+ encode_child(tree, c, node)
22
+
23
+ # Write to file
24
+ return node.SerializeToString()
25
+
26
+
27
+ def write_encoded_tree_to_file(tree_bytes):
28
+ with open("encodedTree", "wb") as fs:
29
+ fs.write(tree_bytes)
30
+
31
+
32
+ def encode_child(tree: TensorIndexTree, child: TensorIndexTree, node, result_size=[]):
33
+ child_node = pb2.Node()
34
+
35
+ new_result_size = deepcopy(result_size)
36
+ # new_result_size = result_size
37
+ new_result_size.append(len(child.values))
38
+
39
+ if child.hidden:
40
+ # add indexes to parent and add also indexes size...
41
+ node.indexes.extend(tree.indexes)
42
+ break_tag = False
43
+ return break_tag
44
+
45
+ # need to add axis and children etc to the encoded node only if the tree node isn't hidden
46
+ else:
47
+ child_node.axis = child.axis.name
48
+ child_node.value.extend(child.values)
49
+ child_node.size_result.extend(new_result_size)
50
+
51
+ for c in child.children:
52
+ breaking = encode_child(child, c, child_node, new_result_size)
53
+ if not breaking:
54
+ for c_ in child.children:
55
+ child_node.size_indexes_branch.append(len(c_.children))
56
+ break
57
+
58
+ # we append the children once their branch has been completed until the leaf
59
+ if not child.hidden:
60
+ node.children.append(child_node)
61
+
62
+
63
+ def decode_tree(datacube, bytearray):
64
+ node = pb2.Node()
65
+ node.ParseFromString(bytearray)
66
+
67
+ tree = TensorIndexTree()
68
+
69
+ if node.axis == "root":
70
+ root = IntDatacubeAxis()
71
+ root.name = "root"
72
+ tree.axis = root
73
+ else:
74
+ tree.axis = datacube._axes[node.axis]
75
+
76
+ # Put contents of node children into tree
77
+ decode_child(node, tree, datacube)
78
+
79
+ return tree
80
+
81
+
82
+ def decode_child(node, tree, datacube):
83
+ if len(node.children) == 0:
84
+ tree.result = node.result
85
+ tree.result_size = node.size_result
86
+ tree.indexes = node.indexes
87
+ tree.indexes_size = node.size_indexes_branch
88
+ for child in node.children:
89
+ if child.axis in datacube._axes.keys():
90
+ child_axis = datacube._axes[child.axis]
91
+ child_vals = tuple(child.value)
92
+ child_node = TensorIndexTree(child_axis, child_vals)
93
+ tree.add_child(child_node)
94
+ decode_child(child, child_node, datacube)
95
+ else:
96
+ grandchild_axis = datacube._axes[child.children[0].axis]
97
+ for c in child.children:
98
+ grandchild_vals = tuple(c.value)
99
+ grandchild_node = TensorIndexTree(grandchild_axis, grandchild_vals)
100
+ tree.add_child(grandchild_node)
101
+ decode_child(c, grandchild_node, datacube)
102
+
103
+
104
+ def decode_into_tree(tree, bytearray):
105
+ # TODO: write a decoder that decodes the bytearray (ie results) from gribjump directly into the tree instance
106
+ node = pb2.Node()
107
+ node.ParseFromString(bytearray)
108
+
109
+ decode_child_into_tree(tree, node)
110
+
111
+ return tree
112
+
113
+
114
+ def decode_child_into_tree(tree, node):
115
+ if not tree.hidden:
116
+ # iterate through tree
117
+ for i, child in enumerate(tree.children):
118
+ node_c = node.children[i]
119
+ decode_child_into_tree(child, node_c)
120
+ else:
121
+ # TODO: if it's hidden, use the sizes to attribute to the hidden lat/lon nodes...
122
+ num_results = math.prod(node.size_result)
123
+ num_lat_branches = len(tree.children)
124
+ start_result_idx = 0
125
+ for i in range(num_lat_branches):
126
+ lat_node = tree.children[i]
127
+ num_lon_branches = len(lat_node.children)
128
+ for j in range(num_lon_branches):
129
+ lon_node = lat_node.children[j]
130
+ next_result_idx = start_result_idx + num_results
131
+ lon_node.result = node.result[start_result_idx:next_result_idx]
132
+ start_result_idx = next_result_idx
@@ -0,0 +1 @@
1
+ from .engine import *
@@ -0,0 +1,19 @@
1
+ from typing import List
2
+
3
+ from ..datacube.backends.datacube import Datacube
4
+ from ..datacube.tensor_index_tree import TensorIndexTree
5
+ from ..shapes import ConvexPolytope
6
+
7
+
8
+ class Engine:
9
+ def __init__(self):
10
+ pass
11
+
12
+ def extract(self, datacube: Datacube, polytopes: List[ConvexPolytope]) -> TensorIndexTree:
13
+ pass
14
+
15
+ @staticmethod
16
+ def default():
17
+ from .hullslicer import HullSlicer
18
+
19
+ return HullSlicer()