polytope-python 1.0.2__tar.gz → 1.0.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- polytope-python-1.0.4/MANIFEST.in +1 -0
- {polytope-python-1.0.2/polytope_python.egg-info → polytope-python-1.0.4}/PKG-INFO +1 -1
- polytope-python-1.0.4/polytope/datacube/__init__.py +1 -0
- polytope-python-1.0.4/polytope/datacube/backends/__init__.py +1 -0
- polytope-python-1.0.4/polytope/datacube/backends/datacube.py +162 -0
- polytope-python-1.0.4/polytope/datacube/backends/fdb.py +356 -0
- {polytope-python-1.0.2/polytope/datacube → polytope-python-1.0.4/polytope/datacube/backends}/mock.py +15 -8
- polytope-python-1.0.4/polytope/datacube/backends/xarray.py +140 -0
- polytope-python-1.0.4/polytope/datacube/datacube_axis.py +311 -0
- polytope-python-1.0.4/polytope/datacube/index_tree_pb2.py +27 -0
- polytope-python-1.0.4/polytope/datacube/tensor_index_tree.py +223 -0
- polytope-python-1.0.4/polytope/datacube/transformations/__init__.py +1 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_cyclic/__init__.py +1 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_cyclic/datacube_cyclic.py +171 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_mappers/__init__.py +1 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_mappers/datacube_mappers.py +127 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_mappers/mapper_types/__init__.py +5 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_mappers/mapper_types/healpix.py +135 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_mappers/mapper_types/healpix_nested.py +213 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_mappers/mapper_types/local_regular.py +70 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_mappers/mapper_types/octahedral.py +2752 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_mappers/mapper_types/reduced_ll.py +1506 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_mappers/mapper_types/regular.py +58 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_merger/__init__.py +1 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_merger/datacube_merger.py +98 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_reverse/__init__.py +1 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_reverse/datacube_reverse.py +65 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_transformations.py +97 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_type_change/__init__.py +1 -0
- polytope-python-1.0.4/polytope/datacube/transformations/datacube_type_change/datacube_type_change.py +73 -0
- polytope-python-1.0.4/polytope/datacube/tree_encoding.py +132 -0
- {polytope-python-1.0.2 → polytope-python-1.0.4}/polytope/engine/engine.py +3 -2
- polytope-python-1.0.4/polytope/engine/hullslicer.py +314 -0
- polytope-python-1.0.4/polytope/options.py +71 -0
- {polytope-python-1.0.2 → polytope-python-1.0.4}/polytope/polytope.py +17 -3
- {polytope-python-1.0.2 → polytope-python-1.0.4}/polytope/shapes.py +103 -16
- {polytope-python-1.0.2 → polytope-python-1.0.4}/polytope/utility/combinatorics.py +7 -2
- polytope-python-1.0.4/polytope/utility/geometry.py +26 -0
- polytope-python-1.0.4/polytope/utility/list_tools.py +22 -0
- polytope-python-1.0.4/polytope/version.py +1 -0
- {polytope-python-1.0.2 → polytope-python-1.0.4/polytope_python.egg-info}/PKG-INFO +1 -1
- polytope-python-1.0.4/polytope_python.egg-info/SOURCES.txt +54 -0
- polytope-python-1.0.4/polytope_python.egg-info/requires.txt +8 -0
- polytope-python-1.0.4/pyproject.toml +8 -0
- polytope-python-1.0.4/requirements.txt +8 -0
- {polytope-python-1.0.2 → polytope-python-1.0.4}/setup.py +3 -0
- polytope-python-1.0.2/polytope/datacube/__init__.py +0 -1
- polytope-python-1.0.2/polytope/datacube/datacube.py +0 -49
- polytope-python-1.0.2/polytope/datacube/datacube_axis.py +0 -464
- polytope-python-1.0.2/polytope/datacube/datacube_request_tree.py +0 -183
- polytope-python-1.0.2/polytope/datacube/xarray.py +0 -150
- polytope-python-1.0.2/polytope/engine/hullslicer.py +0 -162
- polytope-python-1.0.2/polytope/utility/geometry.py +0 -4
- polytope-python-1.0.2/polytope/version.py +0 -1
- polytope-python-1.0.2/polytope_python.egg-info/SOURCES.txt +0 -26
- polytope-python-1.0.2/pyproject.toml +0 -5
- {polytope-python-1.0.2 → polytope-python-1.0.4}/LICENSE +0 -0
- {polytope-python-1.0.2 → polytope-python-1.0.4}/polytope/__init__.py +0 -0
- {polytope-python-1.0.2 → polytope-python-1.0.4}/polytope/engine/__init__.py +0 -0
- {polytope-python-1.0.2 → polytope-python-1.0.4}/polytope/utility/__init__.py +0 -0
- {polytope-python-1.0.2 → polytope-python-1.0.4}/polytope/utility/exceptions.py +0 -0
- {polytope-python-1.0.2 → polytope-python-1.0.4}/polytope/utility/profiling.py +0 -0
- {polytope-python-1.0.2 → polytope-python-1.0.4}/polytope_python.egg-info/dependency_links.txt +0 -0
- {polytope-python-1.0.2 → polytope-python-1.0.4}/polytope_python.egg-info/not-zip-safe +0 -0
- {polytope-python-1.0.2 → polytope-python-1.0.4}/polytope_python.egg-info/top_level.txt +0 -0
- {polytope-python-1.0.2 → polytope-python-1.0.4}/setup.cfg +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
include requirements.txt
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .backends.datacube import *
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from ..backends.datacube import *
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from abc import ABC, abstractmethod
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from ...utility.combinatorics import validate_axes
|
|
6
|
+
from ..datacube_axis import DatacubeAxis
|
|
7
|
+
from ..tensor_index_tree import DatacubePath, TensorIndexTree
|
|
8
|
+
from ..transformations.datacube_mappers.datacube_mappers import DatacubeMapper
|
|
9
|
+
from ..transformations.datacube_transformations import (
|
|
10
|
+
DatacubeAxisTransformation,
|
|
11
|
+
has_transform,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class Datacube(ABC):
|
|
16
|
+
def __init__(self, axis_options=None, compressed_axes_options=[]):
|
|
17
|
+
if axis_options is None:
|
|
18
|
+
self.axis_options = {}
|
|
19
|
+
else:
|
|
20
|
+
self.axis_options = axis_options
|
|
21
|
+
self.coupled_axes = []
|
|
22
|
+
self.axis_counter = 0
|
|
23
|
+
self.complete_axes = []
|
|
24
|
+
self.blocked_axes = []
|
|
25
|
+
self.fake_axes = []
|
|
26
|
+
self.treated_axes = []
|
|
27
|
+
self.nearest_search = {}
|
|
28
|
+
self._axes = None
|
|
29
|
+
self.transformed_axes = []
|
|
30
|
+
self.compressed_grid_axes = []
|
|
31
|
+
self.merged_axes = []
|
|
32
|
+
self.unwanted_path = {}
|
|
33
|
+
self.compressed_axes = compressed_axes_options
|
|
34
|
+
|
|
35
|
+
@abstractmethod
|
|
36
|
+
def get(self, requests: TensorIndexTree) -> Any:
|
|
37
|
+
"""Return data given a set of request trees"""
|
|
38
|
+
|
|
39
|
+
@property
|
|
40
|
+
def axes(self):
|
|
41
|
+
return self._axes
|
|
42
|
+
|
|
43
|
+
def validate(self, axes):
|
|
44
|
+
"""returns true if the input axes can be resolved against the datacube axes"""
|
|
45
|
+
return validate_axes(list(self.axes.keys()), axes)
|
|
46
|
+
|
|
47
|
+
def _create_axes(self, name, values, transformation_type_key, transformation_options):
|
|
48
|
+
# first check what the final axes are for this axis name given transformations
|
|
49
|
+
transformation_options = transformation_type_key
|
|
50
|
+
final_axis_names = DatacubeAxisTransformation.get_final_axes(
|
|
51
|
+
name, transformation_type_key.name, transformation_options
|
|
52
|
+
)
|
|
53
|
+
transformation = DatacubeAxisTransformation.create_transform(
|
|
54
|
+
name, transformation_type_key.name, transformation_options
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
# do not compress merged axes
|
|
58
|
+
if transformation_type_key.name == "merge":
|
|
59
|
+
self.merged_axes.append(name)
|
|
60
|
+
self.merged_axes.append(final_axis_names)
|
|
61
|
+
for axis in final_axis_names:
|
|
62
|
+
# remove the merged_axes from the possible compressed axes
|
|
63
|
+
if axis in self.compressed_axes:
|
|
64
|
+
self.compressed_axes.remove(axis)
|
|
65
|
+
|
|
66
|
+
for blocked_axis in transformation.blocked_axes():
|
|
67
|
+
self.blocked_axes.append(blocked_axis)
|
|
68
|
+
if isinstance(transformation, DatacubeMapper):
|
|
69
|
+
# TODO: do we use this?? This shouldn't work for a disk in lat/lon on a octahedral or other grid??
|
|
70
|
+
for compressed_grid_axis in transformation.compressed_grid_axes:
|
|
71
|
+
self.compressed_grid_axes.append(compressed_grid_axis)
|
|
72
|
+
if len(final_axis_names) > 1:
|
|
73
|
+
self.coupled_axes.append(final_axis_names)
|
|
74
|
+
for axis in final_axis_names:
|
|
75
|
+
if axis in self.compressed_axes and axis != final_axis_names[-1]:
|
|
76
|
+
self.compressed_axes.remove(axis)
|
|
77
|
+
for axis_name in final_axis_names:
|
|
78
|
+
self.fake_axes.append(axis_name)
|
|
79
|
+
# if axis does not yet exist, create it
|
|
80
|
+
|
|
81
|
+
# first need to change the values so that we have right type
|
|
82
|
+
values = transformation.change_val_type(axis_name, values)
|
|
83
|
+
if self._axes is None or axis_name not in self._axes.keys():
|
|
84
|
+
DatacubeAxis.create_standard(axis_name, values, self)
|
|
85
|
+
# add transformation tag to axis, as well as transformation options for later
|
|
86
|
+
setattr(self._axes[axis_name], has_transform[transformation_type_key.name], True) # where has_transform is
|
|
87
|
+
# a factory inside datacube_transformations to set the has_transform, is_cyclic etc axis properties
|
|
88
|
+
# add the specific transformation handled here to the relevant axes
|
|
89
|
+
# Modify the axis to update with the tag
|
|
90
|
+
|
|
91
|
+
if transformation not in self._axes[axis_name].transformations: # Avoids duplicates being stored
|
|
92
|
+
self._axes[axis_name].transformations.append(transformation)
|
|
93
|
+
|
|
94
|
+
def _add_all_transformation_axes(self, options, name, values):
|
|
95
|
+
for transformation_type_key in options.transformations:
|
|
96
|
+
if transformation_type_key != "cyclic":
|
|
97
|
+
self.transformed_axes.append(name)
|
|
98
|
+
self._create_axes(name, values, transformation_type_key, options)
|
|
99
|
+
|
|
100
|
+
def _check_and_add_axes(self, options, name, values):
|
|
101
|
+
if options is not None:
|
|
102
|
+
self._add_all_transformation_axes(options, name, values)
|
|
103
|
+
else:
|
|
104
|
+
if name not in self.blocked_axes:
|
|
105
|
+
if self._axes is None or name not in self._axes.keys():
|
|
106
|
+
DatacubeAxis.create_standard(name, values, self)
|
|
107
|
+
|
|
108
|
+
def has_index(self, path: DatacubePath, axis, index):
|
|
109
|
+
"Given a path to a subset of the datacube, checks if the index exists on that sub-datacube axis"
|
|
110
|
+
path = self.fit_path(path)
|
|
111
|
+
indexes = axis.find_indexes(path, self)
|
|
112
|
+
return index in indexes
|
|
113
|
+
|
|
114
|
+
def fit_path(self, path):
|
|
115
|
+
for key in path.keys():
|
|
116
|
+
if key not in self.complete_axes and key not in self.fake_axes:
|
|
117
|
+
path.pop(key)
|
|
118
|
+
return path
|
|
119
|
+
|
|
120
|
+
def get_indices(self, path: DatacubePath, axis, lower, upper, method=None):
|
|
121
|
+
"""
|
|
122
|
+
Given a path to a subset of the datacube, return the discrete indexes which exist between
|
|
123
|
+
two non-discrete values (lower, upper) for a particular axis (given by label)
|
|
124
|
+
If lower and upper are equal, returns the index which exactly matches that value (if it exists)
|
|
125
|
+
e.g. returns integer discrete points between two floats
|
|
126
|
+
"""
|
|
127
|
+
path = self.fit_path(path)
|
|
128
|
+
indexes = axis.find_indexes(path, self)
|
|
129
|
+
idx_between = axis.find_indices_between(indexes, lower, upper, self, method)
|
|
130
|
+
|
|
131
|
+
logging.info(f"For axis {axis.name} between {lower} and {upper}, found indices {idx_between}")
|
|
132
|
+
|
|
133
|
+
return idx_between
|
|
134
|
+
|
|
135
|
+
def get_mapper(self, axis):
|
|
136
|
+
"""
|
|
137
|
+
Get the type mapper for a subaxis of the datacube given by label
|
|
138
|
+
"""
|
|
139
|
+
return self._axes[axis]
|
|
140
|
+
|
|
141
|
+
def remap_path(self, path: DatacubePath):
|
|
142
|
+
for key in path:
|
|
143
|
+
value = path[key]
|
|
144
|
+
path[key] = self._axes[key].remap([value, value])[0][0]
|
|
145
|
+
return path
|
|
146
|
+
|
|
147
|
+
@staticmethod
|
|
148
|
+
def create(datacube, config={}, axis_options={}, compressed_axes_options=[], alternative_axes=[]):
|
|
149
|
+
# TODO: get the configs as None for pre-determined value and change them to empty dictionary inside the function
|
|
150
|
+
if type(datacube).__name__ == "DataArray":
|
|
151
|
+
from .xarray import XArrayDatacube
|
|
152
|
+
|
|
153
|
+
xadatacube = XArrayDatacube(datacube, axis_options, compressed_axes_options)
|
|
154
|
+
return xadatacube
|
|
155
|
+
if type(datacube).__name__ == "GribJump":
|
|
156
|
+
from .fdb import FDBDatacube
|
|
157
|
+
|
|
158
|
+
fdbdatacube = FDBDatacube(datacube, config, axis_options, compressed_axes_options, alternative_axes)
|
|
159
|
+
return fdbdatacube
|
|
160
|
+
|
|
161
|
+
def check_branching_axes(self, request):
|
|
162
|
+
pass
|
|
@@ -0,0 +1,356 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import operator
|
|
3
|
+
from copy import deepcopy
|
|
4
|
+
from itertools import product
|
|
5
|
+
|
|
6
|
+
from ...utility.geometry import nearest_pt
|
|
7
|
+
from .datacube import Datacube, TensorIndexTree
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class FDBDatacube(Datacube):
|
|
11
|
+
def __init__(self, gj, config=None, axis_options=None, compressed_axes_options=[], alternative_axes=[]):
|
|
12
|
+
if config is None:
|
|
13
|
+
config = {}
|
|
14
|
+
|
|
15
|
+
super().__init__(axis_options, compressed_axes_options)
|
|
16
|
+
|
|
17
|
+
logging.info("Created an FDB datacube with options: " + str(axis_options))
|
|
18
|
+
|
|
19
|
+
self.unwanted_path = {}
|
|
20
|
+
self.axis_options = axis_options
|
|
21
|
+
|
|
22
|
+
partial_request = config
|
|
23
|
+
# Find values in the level 3 FDB datacube
|
|
24
|
+
|
|
25
|
+
self.gj = gj
|
|
26
|
+
if len(alternative_axes) == 0:
|
|
27
|
+
self.fdb_coordinates = self.gj.axes(partial_request)
|
|
28
|
+
else:
|
|
29
|
+
self.fdb_coordinates = {}
|
|
30
|
+
for axis_config in alternative_axes:
|
|
31
|
+
self.fdb_coordinates[axis_config.axis_name] = axis_config.values
|
|
32
|
+
|
|
33
|
+
logging.info("Axes returned from GribJump are: " + str(self.fdb_coordinates))
|
|
34
|
+
|
|
35
|
+
self.fdb_coordinates["values"] = []
|
|
36
|
+
for name, values in self.fdb_coordinates.items():
|
|
37
|
+
values.sort()
|
|
38
|
+
options = None
|
|
39
|
+
for opt in self.axis_options:
|
|
40
|
+
if opt.axis_name == name:
|
|
41
|
+
options = opt
|
|
42
|
+
|
|
43
|
+
self._check_and_add_axes(options, name, values)
|
|
44
|
+
self.treated_axes.append(name)
|
|
45
|
+
self.complete_axes.append(name)
|
|
46
|
+
|
|
47
|
+
# add other options to axis which were just created above like "lat" for the mapper transformations for eg
|
|
48
|
+
for name in self._axes:
|
|
49
|
+
if name not in self.treated_axes:
|
|
50
|
+
options = None
|
|
51
|
+
for opt in self.axis_options:
|
|
52
|
+
if opt.axis_name == name:
|
|
53
|
+
options = opt
|
|
54
|
+
|
|
55
|
+
val = self._axes[name].type
|
|
56
|
+
self._check_and_add_axes(options, name, val)
|
|
57
|
+
|
|
58
|
+
logging.info("Polytope created axes for: " + str(self._axes.keys()))
|
|
59
|
+
|
|
60
|
+
def check_branching_axes(self, request):
|
|
61
|
+
polytopes = request.polytopes()
|
|
62
|
+
for polytope in polytopes:
|
|
63
|
+
for ax in polytope._axes:
|
|
64
|
+
if ax == "levtype":
|
|
65
|
+
(upper, lower, idx) = polytope.extents(ax)
|
|
66
|
+
if "sfc" in polytope.points[idx]:
|
|
67
|
+
self.fdb_coordinates.pop("levelist", None)
|
|
68
|
+
self.fdb_coordinates.pop("quantile", None)
|
|
69
|
+
# TODO: When do these not appear??
|
|
70
|
+
self.fdb_coordinates.pop("direction", None)
|
|
71
|
+
self.fdb_coordinates.pop("frequency", None)
|
|
72
|
+
|
|
73
|
+
# NOTE: verify that we also remove the axis object for axes we've removed here
|
|
74
|
+
axes_to_remove = set(self.complete_axes) - set(self.fdb_coordinates.keys())
|
|
75
|
+
|
|
76
|
+
# Remove the keys from self._axes
|
|
77
|
+
for axis_name in axes_to_remove:
|
|
78
|
+
self._axes.pop(axis_name, None)
|
|
79
|
+
|
|
80
|
+
def get(self, requests: TensorIndexTree):
|
|
81
|
+
if len(requests.children) == 0:
|
|
82
|
+
return requests
|
|
83
|
+
fdb_requests = []
|
|
84
|
+
fdb_requests_decoding_info = []
|
|
85
|
+
self.get_fdb_requests(requests, fdb_requests, fdb_requests_decoding_info)
|
|
86
|
+
|
|
87
|
+
# here, loop through the fdb requests and request from gj and directly add to the nodes
|
|
88
|
+
|
|
89
|
+
# TODO: here, loop through the fdb requests and request from gj and directly add to the nodes
|
|
90
|
+
complete_list_complete_uncompressed_requests = []
|
|
91
|
+
complete_fdb_decoding_info = []
|
|
92
|
+
for j, compressed_request in enumerate(fdb_requests):
|
|
93
|
+
# TODO: can we do gj extract outside of this loop?
|
|
94
|
+
uncompressed_request = {}
|
|
95
|
+
|
|
96
|
+
# Need to determine the possible decompressed requests
|
|
97
|
+
|
|
98
|
+
# find the possible combinations of compressed indices
|
|
99
|
+
interm_branch_tuple_values = []
|
|
100
|
+
for key in compressed_request[0].keys():
|
|
101
|
+
interm_branch_tuple_values.append(compressed_request[0][key])
|
|
102
|
+
request_combis = product(*interm_branch_tuple_values)
|
|
103
|
+
|
|
104
|
+
# Need to extract the possible requests and add them to the right nodes
|
|
105
|
+
for combi in request_combis:
|
|
106
|
+
uncompressed_request = {}
|
|
107
|
+
for i, key in enumerate(compressed_request[0].keys()):
|
|
108
|
+
uncompressed_request[key] = combi[i]
|
|
109
|
+
complete_uncompressed_request = (uncompressed_request, compressed_request[1])
|
|
110
|
+
complete_list_complete_uncompressed_requests.append(complete_uncompressed_request)
|
|
111
|
+
complete_fdb_decoding_info.append(fdb_requests_decoding_info[j])
|
|
112
|
+
output_values = self.gj.extract(complete_list_complete_uncompressed_requests)
|
|
113
|
+
self.assign_fdb_output_to_nodes(output_values, complete_fdb_decoding_info)
|
|
114
|
+
|
|
115
|
+
def get_fdb_requests(
|
|
116
|
+
self,
|
|
117
|
+
requests: TensorIndexTree,
|
|
118
|
+
fdb_requests=[],
|
|
119
|
+
fdb_requests_decoding_info=[],
|
|
120
|
+
leaf_path=None,
|
|
121
|
+
):
|
|
122
|
+
if leaf_path is None:
|
|
123
|
+
leaf_path = {}
|
|
124
|
+
|
|
125
|
+
# First when request node is root, go to its children
|
|
126
|
+
if requests.axis.name == "root":
|
|
127
|
+
logging.info("Looking for data for the tree: " + str([leaf.flatten() for leaf in requests.leaves]))
|
|
128
|
+
|
|
129
|
+
for c in requests.children:
|
|
130
|
+
self.get_fdb_requests(c, fdb_requests, fdb_requests_decoding_info)
|
|
131
|
+
# If request node has no children, we have a leaf so need to assign fdb values to it
|
|
132
|
+
else:
|
|
133
|
+
key_value_path = {requests.axis.name: requests.values}
|
|
134
|
+
ax = requests.axis
|
|
135
|
+
(key_value_path, leaf_path, self.unwanted_path) = ax.unmap_path_key(
|
|
136
|
+
key_value_path, leaf_path, self.unwanted_path
|
|
137
|
+
)
|
|
138
|
+
leaf_path.update(key_value_path)
|
|
139
|
+
if len(requests.children[0].children[0].children) == 0:
|
|
140
|
+
# find the fdb_requests and associated nodes to which to add results
|
|
141
|
+
(path, current_start_idxs, fdb_node_ranges, lat_length) = self.get_2nd_last_values(requests, leaf_path)
|
|
142
|
+
(
|
|
143
|
+
original_indices,
|
|
144
|
+
sorted_request_ranges,
|
|
145
|
+
fdb_node_ranges,
|
|
146
|
+
) = self.sort_fdb_request_ranges(current_start_idxs, lat_length, fdb_node_ranges)
|
|
147
|
+
fdb_requests.append((path, sorted_request_ranges))
|
|
148
|
+
fdb_requests_decoding_info.append((original_indices, fdb_node_ranges))
|
|
149
|
+
|
|
150
|
+
# Otherwise remap the path for this key and iterate again over children
|
|
151
|
+
else:
|
|
152
|
+
for c in requests.children:
|
|
153
|
+
self.get_fdb_requests(c, fdb_requests, fdb_requests_decoding_info, leaf_path)
|
|
154
|
+
|
|
155
|
+
def remove_duplicates_in_request_ranges(self, fdb_node_ranges, current_start_idxs):
|
|
156
|
+
seen_indices = set()
|
|
157
|
+
for i, idxs_list in enumerate(current_start_idxs):
|
|
158
|
+
for k, sub_lat_idxs in enumerate(idxs_list):
|
|
159
|
+
actual_fdb_node = fdb_node_ranges[i][k]
|
|
160
|
+
original_fdb_node_range_vals = []
|
|
161
|
+
new_current_start_idx = []
|
|
162
|
+
for j, idx in enumerate(sub_lat_idxs):
|
|
163
|
+
if idx not in seen_indices:
|
|
164
|
+
# TODO: need to remove it from the values in the corresponding tree node
|
|
165
|
+
# TODO: need to readjust the range we give to gj ... DONE?
|
|
166
|
+
original_fdb_node_range_vals.append(actual_fdb_node[0].values[j])
|
|
167
|
+
seen_indices.add(idx)
|
|
168
|
+
new_current_start_idx.append(idx)
|
|
169
|
+
if original_fdb_node_range_vals != []:
|
|
170
|
+
actual_fdb_node[0].values = tuple(original_fdb_node_range_vals)
|
|
171
|
+
else:
|
|
172
|
+
# there are no values on this node anymore so can remove it
|
|
173
|
+
actual_fdb_node[0].remove_branch()
|
|
174
|
+
if len(new_current_start_idx) == 0:
|
|
175
|
+
current_start_idxs[i].pop(k)
|
|
176
|
+
else:
|
|
177
|
+
current_start_idxs[i][k] = new_current_start_idx
|
|
178
|
+
return (fdb_node_ranges, current_start_idxs)
|
|
179
|
+
|
|
180
|
+
def nearest_lat_lon_search(self, requests):
|
|
181
|
+
if len(self.nearest_search) != 0:
|
|
182
|
+
first_ax_name = requests.children[0].axis.name
|
|
183
|
+
second_ax_name = requests.children[0].children[0].axis.name
|
|
184
|
+
|
|
185
|
+
if first_ax_name not in self.nearest_search.keys() or second_ax_name not in self.nearest_search.keys():
|
|
186
|
+
raise Exception("nearest point search axes are wrong")
|
|
187
|
+
|
|
188
|
+
second_ax = requests.children[0].children[0].axis
|
|
189
|
+
|
|
190
|
+
# TODO: actually, here we should not remap the nearest_pts, we should instead unmap the
|
|
191
|
+
# found_latlon_pts and then remap them later once we have compared found_latlon_pts and nearest_pts
|
|
192
|
+
nearest_pts = [
|
|
193
|
+
[lat_val, second_ax._remap_val_to_axis_range(lon_val)]
|
|
194
|
+
for (lat_val, lon_val) in zip(
|
|
195
|
+
self.nearest_search[first_ax_name][0], self.nearest_search[second_ax_name][0]
|
|
196
|
+
)
|
|
197
|
+
]
|
|
198
|
+
|
|
199
|
+
found_latlon_pts = []
|
|
200
|
+
for lat_child in requests.children:
|
|
201
|
+
for lon_child in lat_child.children:
|
|
202
|
+
found_latlon_pts.append([lat_child.values, lon_child.values])
|
|
203
|
+
|
|
204
|
+
# now find the nearest lat lon to the points requested
|
|
205
|
+
nearest_latlons = []
|
|
206
|
+
for pt in nearest_pts:
|
|
207
|
+
nearest_latlon = nearest_pt(found_latlon_pts, pt)
|
|
208
|
+
nearest_latlons.append(nearest_latlon)
|
|
209
|
+
|
|
210
|
+
# need to remove the branches that do not fit
|
|
211
|
+
lat_children_values = [child.values for child in requests.children]
|
|
212
|
+
for i in range(len(lat_children_values)):
|
|
213
|
+
lat_child_val = lat_children_values[i]
|
|
214
|
+
lat_child = [child for child in requests.children if child.values == lat_child_val][0]
|
|
215
|
+
if lat_child.values not in [(latlon[0],) for latlon in nearest_latlons]:
|
|
216
|
+
lat_child.remove_branch()
|
|
217
|
+
else:
|
|
218
|
+
possible_lons = [latlon[1] for latlon in nearest_latlons if (latlon[0],) == lat_child.values]
|
|
219
|
+
lon_children_values = [child.values for child in lat_child.children]
|
|
220
|
+
for j in range(len(lon_children_values)):
|
|
221
|
+
lon_child_val = lon_children_values[j]
|
|
222
|
+
lon_child = [child for child in lat_child.children if child.values == lon_child_val][0]
|
|
223
|
+
for value in lon_child.values:
|
|
224
|
+
if value not in possible_lons:
|
|
225
|
+
lon_child.remove_compressed_branch(value)
|
|
226
|
+
|
|
227
|
+
def get_2nd_last_values(self, requests, leaf_path=None):
|
|
228
|
+
if leaf_path is None:
|
|
229
|
+
leaf_path = {}
|
|
230
|
+
# In this function, we recursively loop over the last two layers of the tree and store the indices of the
|
|
231
|
+
# request ranges in those layers
|
|
232
|
+
self.nearest_lat_lon_search(requests)
|
|
233
|
+
|
|
234
|
+
lat_length = len(requests.children)
|
|
235
|
+
current_start_idxs = [False] * lat_length
|
|
236
|
+
fdb_node_ranges = [False] * lat_length
|
|
237
|
+
for i in range(len(requests.children)):
|
|
238
|
+
lat_child = requests.children[i]
|
|
239
|
+
lon_length = len(lat_child.children)
|
|
240
|
+
current_start_idxs[i] = [None] * lon_length
|
|
241
|
+
fdb_node_ranges[i] = [[TensorIndexTree.root for y in range(lon_length)] for x in range(lon_length)]
|
|
242
|
+
current_start_idx = deepcopy(current_start_idxs[i])
|
|
243
|
+
fdb_range_nodes = deepcopy(fdb_node_ranges[i])
|
|
244
|
+
key_value_path = {lat_child.axis.name: lat_child.values}
|
|
245
|
+
ax = lat_child.axis
|
|
246
|
+
(key_value_path, leaf_path, self.unwanted_path) = ax.unmap_path_key(
|
|
247
|
+
key_value_path, leaf_path, self.unwanted_path
|
|
248
|
+
)
|
|
249
|
+
leaf_path.update(key_value_path)
|
|
250
|
+
(current_start_idxs[i], fdb_node_ranges[i]) = self.get_last_layer_before_leaf(
|
|
251
|
+
lat_child, leaf_path, current_start_idx, fdb_range_nodes
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
leaf_path_copy = deepcopy(leaf_path)
|
|
255
|
+
leaf_path_copy.pop("values", None)
|
|
256
|
+
return (leaf_path_copy, current_start_idxs, fdb_node_ranges, lat_length)
|
|
257
|
+
|
|
258
|
+
def get_last_layer_before_leaf(self, requests, leaf_path, current_idx, fdb_range_n):
|
|
259
|
+
current_idx = [[] for i in range(len(requests.children))]
|
|
260
|
+
fdb_range_n = [[] for i in range(len(requests.children))]
|
|
261
|
+
for i, c in enumerate(requests.children):
|
|
262
|
+
# now c are the leaves of the initial tree
|
|
263
|
+
key_value_path = {c.axis.name: c.values}
|
|
264
|
+
ax = c.axis
|
|
265
|
+
(key_value_path, leaf_path, self.unwanted_path) = ax.unmap_path_key(
|
|
266
|
+
key_value_path, leaf_path, self.unwanted_path
|
|
267
|
+
)
|
|
268
|
+
# TODO: change this to accommodate non consecutive indexes being compressed too
|
|
269
|
+
current_idx[i].extend(key_value_path["values"])
|
|
270
|
+
fdb_range_n[i].append(c)
|
|
271
|
+
return (current_idx, fdb_range_n)
|
|
272
|
+
|
|
273
|
+
def assign_fdb_output_to_nodes(self, output_values, fdb_requests_decoding_info):
|
|
274
|
+
for k in range(len(output_values)):
|
|
275
|
+
request_output_values = output_values[k]
|
|
276
|
+
(
|
|
277
|
+
original_indices,
|
|
278
|
+
fdb_node_ranges,
|
|
279
|
+
) = fdb_requests_decoding_info[k]
|
|
280
|
+
sorted_fdb_range_nodes = [fdb_node_ranges[i] for i in original_indices]
|
|
281
|
+
for i in range(len(sorted_fdb_range_nodes)):
|
|
282
|
+
n = sorted_fdb_range_nodes[i][0]
|
|
283
|
+
if len(request_output_values[0]) == 0:
|
|
284
|
+
# If we are here, no data was found for this path in the fdb
|
|
285
|
+
none_array = [None] * len(n.values)
|
|
286
|
+
n.result.extend(none_array)
|
|
287
|
+
else:
|
|
288
|
+
interm_request_output_values = request_output_values[0][i][0]
|
|
289
|
+
n.result.extend(interm_request_output_values)
|
|
290
|
+
|
|
291
|
+
def sort_fdb_request_ranges(self, current_start_idx, lat_length, fdb_node_ranges):
|
|
292
|
+
(new_fdb_node_ranges, new_current_start_idx) = self.remove_duplicates_in_request_ranges(
|
|
293
|
+
fdb_node_ranges, current_start_idx
|
|
294
|
+
)
|
|
295
|
+
interm_request_ranges = []
|
|
296
|
+
# TODO: modify the start indexes to have as many arrays as the request ranges
|
|
297
|
+
new_fdb_node_ranges = []
|
|
298
|
+
for i in range(lat_length):
|
|
299
|
+
interm_fdb_nodes = fdb_node_ranges[i]
|
|
300
|
+
old_interm_start_idx = current_start_idx[i]
|
|
301
|
+
for j in range(len(old_interm_start_idx)):
|
|
302
|
+
# TODO: if we sorted the cyclic values in increasing order on the tree too,
|
|
303
|
+
# then we wouldn't have to sort here?
|
|
304
|
+
sorted_list = sorted(enumerate(old_interm_start_idx[j]), key=lambda x: x[1])
|
|
305
|
+
original_indices_idx, interm_start_idx = zip(*sorted_list)
|
|
306
|
+
for interm_fdb_nodes_obj in interm_fdb_nodes[j]:
|
|
307
|
+
interm_fdb_nodes_obj.values = tuple([interm_fdb_nodes_obj.values[k] for k in original_indices_idx])
|
|
308
|
+
if abs(interm_start_idx[-1] + 1 - interm_start_idx[0]) <= len(interm_start_idx):
|
|
309
|
+
current_request_ranges = (interm_start_idx[0], interm_start_idx[-1] + 1)
|
|
310
|
+
interm_request_ranges.append(current_request_ranges)
|
|
311
|
+
new_fdb_node_ranges.append(interm_fdb_nodes[j])
|
|
312
|
+
else:
|
|
313
|
+
jumps = list(map(operator.sub, interm_start_idx[1:], interm_start_idx[:-1]))
|
|
314
|
+
last_idx = 0
|
|
315
|
+
for k, jump in enumerate(jumps):
|
|
316
|
+
if jump > 1:
|
|
317
|
+
current_request_ranges = (interm_start_idx[last_idx], interm_start_idx[k] + 1)
|
|
318
|
+
new_fdb_node_ranges.append(interm_fdb_nodes[j])
|
|
319
|
+
last_idx = k + 1
|
|
320
|
+
interm_request_ranges.append(current_request_ranges)
|
|
321
|
+
if k == len(interm_start_idx) - 2:
|
|
322
|
+
current_request_ranges = (interm_start_idx[last_idx], interm_start_idx[-1] + 1)
|
|
323
|
+
interm_request_ranges.append(current_request_ranges)
|
|
324
|
+
new_fdb_node_ranges.append(interm_fdb_nodes[j])
|
|
325
|
+
request_ranges_with_idx = list(enumerate(interm_request_ranges))
|
|
326
|
+
sorted_list = sorted(request_ranges_with_idx, key=lambda x: x[1][0])
|
|
327
|
+
original_indices, sorted_request_ranges = zip(*sorted_list)
|
|
328
|
+
return (original_indices, sorted_request_ranges, new_fdb_node_ranges)
|
|
329
|
+
|
|
330
|
+
def datacube_natural_indexes(self, axis, subarray):
|
|
331
|
+
indexes = subarray.get(axis.name, None)
|
|
332
|
+
return indexes
|
|
333
|
+
|
|
334
|
+
def select(self, path, unmapped_path):
|
|
335
|
+
return self.fdb_coordinates
|
|
336
|
+
|
|
337
|
+
def ax_vals(self, name):
|
|
338
|
+
return self.fdb_coordinates.get(name, None)
|
|
339
|
+
|
|
340
|
+
def prep_tree_encoding(self, node, unwanted_path=None):
|
|
341
|
+
# TODO: prepare the tree for protobuf encoding
|
|
342
|
+
# ie transform all axes for gribjump and adding the index property on the leaves
|
|
343
|
+
if unwanted_path is None:
|
|
344
|
+
unwanted_path = {}
|
|
345
|
+
|
|
346
|
+
ax = node.axis
|
|
347
|
+
(new_node, unwanted_path) = ax.unmap_tree_node(node, unwanted_path)
|
|
348
|
+
|
|
349
|
+
if len(node.children) != 0:
|
|
350
|
+
for c in new_node.children:
|
|
351
|
+
self.prep_tree_encoding(c, unwanted_path)
|
|
352
|
+
|
|
353
|
+
def prep_tree_decoding(self, tree):
|
|
354
|
+
# TODO: transform the tree after decoding from protobuf
|
|
355
|
+
# ie unstransform all axes from gribjump and put the indexes back as a leaf/extra node
|
|
356
|
+
pass
|
{polytope-python-1.0.2/polytope/datacube → polytope-python-1.0.4/polytope/datacube/backends}/mock.py
RENAMED
|
@@ -1,20 +1,21 @@
|
|
|
1
1
|
import math
|
|
2
2
|
from copy import deepcopy
|
|
3
3
|
|
|
4
|
-
from
|
|
5
|
-
from
|
|
6
|
-
from .
|
|
4
|
+
from ...utility.combinatorics import validate_axes
|
|
5
|
+
from ..datacube_axis import IntDatacubeAxis
|
|
6
|
+
from .datacube import Datacube, DatacubePath, TensorIndexTree
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
class MockDatacube(Datacube):
|
|
10
|
-
def __init__(self, dimensions):
|
|
10
|
+
def __init__(self, dimensions, compressed_axes_options=[]):
|
|
11
|
+
super().__init__({}, compressed_axes_options)
|
|
11
12
|
assert isinstance(dimensions, dict)
|
|
12
13
|
|
|
13
14
|
self.dimensions = dimensions
|
|
14
15
|
|
|
15
16
|
self.mappers = {}
|
|
16
17
|
for name in self.dimensions:
|
|
17
|
-
self.mappers[name] = deepcopy(
|
|
18
|
+
self.mappers[name] = deepcopy(IntDatacubeAxis())
|
|
18
19
|
self.mappers[name].name = name
|
|
19
20
|
|
|
20
21
|
self.stride = {}
|
|
@@ -23,14 +24,14 @@ class MockDatacube(Datacube):
|
|
|
23
24
|
self.stride[k] = stride_cumulative
|
|
24
25
|
stride_cumulative *= self.dimensions[k]
|
|
25
26
|
|
|
26
|
-
def get(self, requests:
|
|
27
|
+
def get(self, requests: TensorIndexTree):
|
|
27
28
|
# Takes in a datacube and verifies the leaves of the tree are complete
|
|
28
29
|
# (ie it found values for all datacube axis)
|
|
29
30
|
|
|
30
31
|
for r in requests.leaves:
|
|
31
32
|
path = r.flatten()
|
|
32
33
|
if len(path.items()) == len(self.dimensions.items()):
|
|
33
|
-
result = 0
|
|
34
|
+
result = (0,)
|
|
34
35
|
for k, v in path.items():
|
|
35
36
|
result += v * self.stride[k]
|
|
36
37
|
|
|
@@ -41,7 +42,7 @@ class MockDatacube(Datacube):
|
|
|
41
42
|
def get_mapper(self, axis):
|
|
42
43
|
return self.mappers[axis]
|
|
43
44
|
|
|
44
|
-
def get_indices(self, path: DatacubePath, axis, lower, upper):
|
|
45
|
+
def get_indices(self, path: DatacubePath, axis, lower, upper, method=None):
|
|
45
46
|
if lower == upper == math.ceil(lower):
|
|
46
47
|
if lower >= 0:
|
|
47
48
|
return [int(lower)]
|
|
@@ -60,3 +61,9 @@ class MockDatacube(Datacube):
|
|
|
60
61
|
|
|
61
62
|
def validate(self, axes):
|
|
62
63
|
return validate_axes(self.axes, axes)
|
|
64
|
+
|
|
65
|
+
def ax_vals(self, name):
|
|
66
|
+
return []
|
|
67
|
+
|
|
68
|
+
def _find_indexes_between(self, axis, indexes, low, up):
|
|
69
|
+
pass
|