polytope-python 1.0.31__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- polytope_feature/__init__.py +1 -0
- polytope_feature/datacube/__init__.py +1 -0
- polytope_feature/datacube/backends/__init__.py +1 -0
- polytope_feature/datacube/backends/datacube.py +171 -0
- polytope_feature/datacube/backends/fdb.py +399 -0
- polytope_feature/datacube/backends/mock.py +71 -0
- polytope_feature/datacube/backends/xarray.py +142 -0
- polytope_feature/datacube/datacube_axis.py +332 -0
- polytope_feature/datacube/index_tree_pb2.py +27 -0
- polytope_feature/datacube/tensor_index_tree.py +228 -0
- polytope_feature/datacube/transformations/__init__.py +1 -0
- polytope_feature/datacube/transformations/datacube_cyclic/__init__.py +1 -0
- polytope_feature/datacube/transformations/datacube_cyclic/datacube_cyclic.py +171 -0
- polytope_feature/datacube/transformations/datacube_mappers/__init__.py +1 -0
- polytope_feature/datacube/transformations/datacube_mappers/datacube_mappers.py +141 -0
- polytope_feature/datacube/transformations/datacube_mappers/mapper_types/__init__.py +5 -0
- polytope_feature/datacube/transformations/datacube_mappers/mapper_types/healpix.py +147 -0
- polytope_feature/datacube/transformations/datacube_mappers/mapper_types/healpix_nested.py +229 -0
- polytope_feature/datacube/transformations/datacube_mappers/mapper_types/local_regular.py +95 -0
- polytope_feature/datacube/transformations/datacube_mappers/mapper_types/octahedral.py +7896 -0
- polytope_feature/datacube/transformations/datacube_mappers/mapper_types/reduced_gaussian.py +1459 -0
- polytope_feature/datacube/transformations/datacube_mappers/mapper_types/reduced_ll.py +5128 -0
- polytope_feature/datacube/transformations/datacube_mappers/mapper_types/regular.py +75 -0
- polytope_feature/datacube/transformations/datacube_merger/__init__.py +1 -0
- polytope_feature/datacube/transformations/datacube_merger/datacube_merger.py +95 -0
- polytope_feature/datacube/transformations/datacube_reverse/__init__.py +1 -0
- polytope_feature/datacube/transformations/datacube_reverse/datacube_reverse.py +65 -0
- polytope_feature/datacube/transformations/datacube_transformations.py +96 -0
- polytope_feature/datacube/transformations/datacube_type_change/__init__.py +1 -0
- polytope_feature/datacube/transformations/datacube_type_change/datacube_type_change.py +124 -0
- polytope_feature/datacube/tree_encoding.py +132 -0
- polytope_feature/engine/__init__.py +1 -0
- polytope_feature/engine/engine.py +19 -0
- polytope_feature/engine/hullslicer.py +316 -0
- polytope_feature/options.py +77 -0
- polytope_feature/polytope.py +71 -0
- polytope_feature/shapes.py +405 -0
- polytope_feature/utility/__init__.py +0 -0
- polytope_feature/utility/combinatorics.py +48 -0
- polytope_feature/utility/exceptions.py +45 -0
- polytope_feature/utility/geometry.py +26 -0
- polytope_feature/utility/list_tools.py +41 -0
- polytope_feature/utility/profiling.py +14 -0
- polytope_feature/version.py +1 -0
- polytope_python-1.0.31.dist-info/LICENSE +201 -0
- polytope_python-1.0.31.dist-info/METADATA +21 -0
- polytope_python-1.0.31.dist-info/RECORD +49 -0
- polytope_python-1.0.31.dist-info/WHEEL +5 -0
- polytope_python-1.0.31.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,316 @@
|
|
|
1
|
+
import math
|
|
2
|
+
from copy import copy
|
|
3
|
+
from itertools import chain
|
|
4
|
+
from typing import List
|
|
5
|
+
|
|
6
|
+
import scipy.spatial
|
|
7
|
+
|
|
8
|
+
from ..datacube.backends.datacube import Datacube
|
|
9
|
+
from ..datacube.datacube_axis import UnsliceableDatacubeAxis
|
|
10
|
+
from ..datacube.tensor_index_tree import TensorIndexTree
|
|
11
|
+
from ..shapes import ConvexPolytope
|
|
12
|
+
from ..utility.combinatorics import group, tensor_product
|
|
13
|
+
from ..utility.exceptions import UnsliceableShapeError
|
|
14
|
+
from ..utility.geometry import lerp
|
|
15
|
+
from ..utility.list_tools import argmax, argmin, unique
|
|
16
|
+
from .engine import Engine
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class HullSlicer(Engine):
|
|
20
|
+
def __init__(self):
|
|
21
|
+
self.ax_is_unsliceable = {}
|
|
22
|
+
self.axis_values_between = {}
|
|
23
|
+
self.has_value = {}
|
|
24
|
+
self.sliced_polytopes = {}
|
|
25
|
+
self.remapped_vals = {}
|
|
26
|
+
self.compressed_axes = []
|
|
27
|
+
|
|
28
|
+
def _unique_continuous_points(self, p: ConvexPolytope, datacube: Datacube):
|
|
29
|
+
for i, ax in enumerate(p._axes):
|
|
30
|
+
mapper = datacube.get_mapper(ax)
|
|
31
|
+
if self.ax_is_unsliceable.get(ax, None) is None:
|
|
32
|
+
self.ax_is_unsliceable[ax] = isinstance(mapper, UnsliceableDatacubeAxis)
|
|
33
|
+
if self.ax_is_unsliceable[ax]:
|
|
34
|
+
break
|
|
35
|
+
for j, val in enumerate(p.points):
|
|
36
|
+
p.points[j][i] = mapper.to_float(mapper.parse(p.points[j][i]))
|
|
37
|
+
# Remove duplicate points
|
|
38
|
+
unique(p.points)
|
|
39
|
+
|
|
40
|
+
def _build_unsliceable_child(self, polytope, ax, node, datacube, lowers, next_nodes, slice_axis_idx):
|
|
41
|
+
if not polytope.is_flat:
|
|
42
|
+
raise UnsliceableShapeError(ax)
|
|
43
|
+
path = node.flatten()
|
|
44
|
+
|
|
45
|
+
# all unsliceable children are natively 1D so can group them together in a tuple...
|
|
46
|
+
flattened_tuple = tuple()
|
|
47
|
+
if len(datacube.coupled_axes) > 0:
|
|
48
|
+
if path.get(datacube.coupled_axes[0][0], None) is not None:
|
|
49
|
+
flattened_tuple = (datacube.coupled_axes[0][0], path.get(datacube.coupled_axes[0][0], None))
|
|
50
|
+
path = {flattened_tuple[0]: flattened_tuple[1]}
|
|
51
|
+
|
|
52
|
+
for i, lower in enumerate(lowers):
|
|
53
|
+
if self.axis_values_between.get((flattened_tuple, ax.name, lower), None) is None:
|
|
54
|
+
self.axis_values_between[(flattened_tuple, ax.name, lower)] = datacube.has_index(path, ax, lower)
|
|
55
|
+
datacube_has_index = self.axis_values_between[(flattened_tuple, ax.name, lower)]
|
|
56
|
+
|
|
57
|
+
if datacube_has_index:
|
|
58
|
+
if i == 0:
|
|
59
|
+
(child, next_nodes) = node.create_child(ax, lower, next_nodes)
|
|
60
|
+
child["unsliced_polytopes"] = copy(node["unsliced_polytopes"])
|
|
61
|
+
child["unsliced_polytopes"].remove(polytope)
|
|
62
|
+
next_nodes.append(child)
|
|
63
|
+
else:
|
|
64
|
+
child.add_value(lower)
|
|
65
|
+
else:
|
|
66
|
+
# raise a value not found error
|
|
67
|
+
errmsg = (
|
|
68
|
+
f"Datacube does not have expected index {lower} of type {type(lower)}"
|
|
69
|
+
f"on {ax.name} along the path {path}"
|
|
70
|
+
)
|
|
71
|
+
raise ValueError(errmsg)
|
|
72
|
+
|
|
73
|
+
def find_values_between(self, polytope, ax, node, datacube, lower, upper):
|
|
74
|
+
tol = ax.tol
|
|
75
|
+
lower = ax.from_float(lower - tol)
|
|
76
|
+
upper = ax.from_float(upper + tol)
|
|
77
|
+
flattened = node.flatten()
|
|
78
|
+
method = polytope.method
|
|
79
|
+
if method == "nearest":
|
|
80
|
+
datacube.nearest_search[ax.name] = polytope.points
|
|
81
|
+
|
|
82
|
+
# NOTE: caching
|
|
83
|
+
# Create a coupled_axes list inside of datacube and add to it during axis formation, then here
|
|
84
|
+
# do something like if ax is in second place of coupled_axes, then take the flattened part of the array that
|
|
85
|
+
# corresponds to the first place of cooupled_axes in the hashing
|
|
86
|
+
# Else, if we do not need the flattened bit in the hash, can just put an empty string instead?
|
|
87
|
+
|
|
88
|
+
flattened_tuple = tuple()
|
|
89
|
+
if len(datacube.coupled_axes) > 0:
|
|
90
|
+
if flattened.get(datacube.coupled_axes[0][0], None) is not None:
|
|
91
|
+
flattened_tuple = (datacube.coupled_axes[0][0], flattened.get(datacube.coupled_axes[0][0], None))
|
|
92
|
+
flattened = {flattened_tuple[0]: flattened_tuple[1]}
|
|
93
|
+
|
|
94
|
+
values = self.axis_values_between.get((flattened_tuple, ax.name, lower, upper, method), None)
|
|
95
|
+
if values is None:
|
|
96
|
+
values = datacube.get_indices(flattened, ax, lower, upper, method)
|
|
97
|
+
self.axis_values_between[(flattened_tuple, ax.name, lower, upper, method)] = values
|
|
98
|
+
return values
|
|
99
|
+
|
|
100
|
+
def remap_values(self, ax, value):
|
|
101
|
+
remapped_val = self.remapped_vals.get((value, ax.name), None)
|
|
102
|
+
if remapped_val is None:
|
|
103
|
+
remapped_val = value
|
|
104
|
+
if ax.is_cyclic:
|
|
105
|
+
remapped_val_interm = ax.remap([value, value])[0]
|
|
106
|
+
remapped_val = (remapped_val_interm[0] + remapped_val_interm[1]) / 2
|
|
107
|
+
if ax.can_round:
|
|
108
|
+
remapped_val = round(remapped_val, int(-math.log10(ax.tol)))
|
|
109
|
+
self.remapped_vals[(value, ax.name)] = remapped_val
|
|
110
|
+
return remapped_val
|
|
111
|
+
|
|
112
|
+
def _build_sliceable_child(self, polytope, ax, node, datacube, values, next_nodes, slice_axis_idx):
|
|
113
|
+
for i, value in enumerate(values):
|
|
114
|
+
if i == 0 or ax.name not in self.compressed_axes:
|
|
115
|
+
fvalue = ax.to_float(value)
|
|
116
|
+
new_polytope = slice(polytope, ax.name, fvalue, slice_axis_idx)
|
|
117
|
+
remapped_val = self.remap_values(ax, value)
|
|
118
|
+
(child, next_nodes) = node.create_child(ax, remapped_val, next_nodes)
|
|
119
|
+
child["unsliced_polytopes"] = copy(node["unsliced_polytopes"])
|
|
120
|
+
child["unsliced_polytopes"].remove(polytope)
|
|
121
|
+
if new_polytope is not None:
|
|
122
|
+
child["unsliced_polytopes"].add(new_polytope)
|
|
123
|
+
next_nodes.append(child)
|
|
124
|
+
else:
|
|
125
|
+
remapped_val = self.remap_values(ax, value)
|
|
126
|
+
child.add_value(remapped_val)
|
|
127
|
+
|
|
128
|
+
def _build_branch(self, ax, node, datacube, next_nodes):
|
|
129
|
+
if ax.name not in self.compressed_axes:
|
|
130
|
+
parent_node = node.parent
|
|
131
|
+
right_unsliced_polytopes = []
|
|
132
|
+
for polytope in node["unsliced_polytopes"]:
|
|
133
|
+
if ax.name in polytope._axes:
|
|
134
|
+
right_unsliced_polytopes.append(polytope)
|
|
135
|
+
for i, polytope in enumerate(right_unsliced_polytopes):
|
|
136
|
+
node._parent = parent_node
|
|
137
|
+
lower, upper, slice_axis_idx = polytope.extents(ax.name)
|
|
138
|
+
# here, first check if the axis is an unsliceable axis and directly build node if it is
|
|
139
|
+
# NOTE: we should have already created the ax_is_unsliceable cache before
|
|
140
|
+
if self.ax_is_unsliceable[ax.name]:
|
|
141
|
+
self._build_unsliceable_child(polytope, ax, node, datacube, [lower], next_nodes, slice_axis_idx)
|
|
142
|
+
else:
|
|
143
|
+
values = self.find_values_between(polytope, ax, node, datacube, lower, upper)
|
|
144
|
+
# NOTE: need to only remove the branches if the values are empty,
|
|
145
|
+
# but only if there are no other possible children left in the tree that
|
|
146
|
+
# we can append and if somehow this happens before and we need to remove, then what do we do??
|
|
147
|
+
if i == len(right_unsliced_polytopes) - 1:
|
|
148
|
+
# we have iterated all polytopes and we can now remove the node if we need to
|
|
149
|
+
if len(values) == 0 and len(node.children) == 0:
|
|
150
|
+
node.remove_branch()
|
|
151
|
+
self._build_sliceable_child(polytope, ax, node, datacube, values, next_nodes, slice_axis_idx)
|
|
152
|
+
else:
|
|
153
|
+
all_values = []
|
|
154
|
+
all_lowers = []
|
|
155
|
+
first_polytope = False
|
|
156
|
+
first_slice_axis_idx = False
|
|
157
|
+
parent_node = node.parent
|
|
158
|
+
for polytope in node["unsliced_polytopes"]:
|
|
159
|
+
node._parent = parent_node
|
|
160
|
+
if ax.name in polytope._axes:
|
|
161
|
+
# keep track of the first polytope defined on the given axis
|
|
162
|
+
if not first_polytope:
|
|
163
|
+
first_polytope = polytope
|
|
164
|
+
lower, upper, slice_axis_idx = polytope.extents(ax.name)
|
|
165
|
+
if not first_slice_axis_idx:
|
|
166
|
+
first_slice_axis_idx = slice_axis_idx
|
|
167
|
+
if self.ax_is_unsliceable[ax.name]:
|
|
168
|
+
all_lowers.append(lower)
|
|
169
|
+
else:
|
|
170
|
+
values = self.find_values_between(polytope, ax, node, datacube, lower, upper)
|
|
171
|
+
all_values.extend(values)
|
|
172
|
+
if self.ax_is_unsliceable[ax.name]:
|
|
173
|
+
self._build_unsliceable_child(
|
|
174
|
+
first_polytope, ax, node, datacube, all_lowers, next_nodes, first_slice_axis_idx
|
|
175
|
+
)
|
|
176
|
+
else:
|
|
177
|
+
if len(all_values) == 0:
|
|
178
|
+
node.remove_branch()
|
|
179
|
+
self._build_sliceable_child(
|
|
180
|
+
first_polytope, ax, node, datacube, all_values, next_nodes, first_slice_axis_idx
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
del node["unsliced_polytopes"]
|
|
184
|
+
|
|
185
|
+
def find_compressed_axes(self, datacube, polytopes):
|
|
186
|
+
# First determine compressable axes from input polytopes
|
|
187
|
+
compressable_axes = []
|
|
188
|
+
for polytope in polytopes:
|
|
189
|
+
if polytope.is_orthogonal:
|
|
190
|
+
for ax in polytope.axes():
|
|
191
|
+
compressable_axes.append(ax)
|
|
192
|
+
# Cross check this list with list of compressable axis from datacube
|
|
193
|
+
# (should not include any merged or coupled axes)
|
|
194
|
+
for compressed_axis in compressable_axes:
|
|
195
|
+
if compressed_axis in datacube.compressed_axes:
|
|
196
|
+
self.compressed_axes.append(compressed_axis)
|
|
197
|
+
# add the last axis of the grid always (longitude) as a compressed axis
|
|
198
|
+
k, last_value = _, datacube.axes[k] = datacube.axes.popitem()
|
|
199
|
+
self.compressed_axes.append(k)
|
|
200
|
+
|
|
201
|
+
def remove_compressed_axis_in_union(self, polytopes):
|
|
202
|
+
for p in polytopes:
|
|
203
|
+
if p.is_in_union:
|
|
204
|
+
for axis in p.axes():
|
|
205
|
+
if axis == self.compressed_axes[-1]:
|
|
206
|
+
self.compressed_axes.remove(axis)
|
|
207
|
+
|
|
208
|
+
def extract(self, datacube: Datacube, polytopes: List[ConvexPolytope]):
|
|
209
|
+
# Determine list of axes to compress
|
|
210
|
+
self.find_compressed_axes(datacube, polytopes)
|
|
211
|
+
|
|
212
|
+
# remove compressed axes which are in a union
|
|
213
|
+
self.remove_compressed_axis_in_union(polytopes)
|
|
214
|
+
|
|
215
|
+
# Convert the polytope points to float type to support triangulation and interpolation
|
|
216
|
+
for p in polytopes:
|
|
217
|
+
self._unique_continuous_points(p, datacube)
|
|
218
|
+
|
|
219
|
+
groups, input_axes = group(polytopes)
|
|
220
|
+
datacube.validate(input_axes)
|
|
221
|
+
request = TensorIndexTree()
|
|
222
|
+
combinations = tensor_product(groups)
|
|
223
|
+
|
|
224
|
+
# NOTE: could optimise here if we know combinations will always be for one request.
|
|
225
|
+
# Then we do not need to create a new index tree and merge it to request, but can just
|
|
226
|
+
# directly work on request and return it...
|
|
227
|
+
|
|
228
|
+
for c in combinations:
|
|
229
|
+
r = TensorIndexTree()
|
|
230
|
+
new_c = []
|
|
231
|
+
for combi in c:
|
|
232
|
+
if isinstance(combi, list):
|
|
233
|
+
new_c.extend(combi)
|
|
234
|
+
else:
|
|
235
|
+
new_c.append(combi)
|
|
236
|
+
r["unsliced_polytopes"] = set(new_c)
|
|
237
|
+
current_nodes = [r]
|
|
238
|
+
for ax in datacube.axes.values():
|
|
239
|
+
next_nodes = []
|
|
240
|
+
interm_next_nodes = []
|
|
241
|
+
for node in current_nodes:
|
|
242
|
+
self._build_branch(ax, node, datacube, interm_next_nodes)
|
|
243
|
+
next_nodes.extend(interm_next_nodes)
|
|
244
|
+
interm_next_nodes = []
|
|
245
|
+
current_nodes = next_nodes
|
|
246
|
+
|
|
247
|
+
request.merge(r)
|
|
248
|
+
return request
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def _find_intersects(polytope, slice_axis_idx, value):
|
|
252
|
+
intersects = []
|
|
253
|
+
# Find all points above and below slice axis
|
|
254
|
+
above_slice = [p for p in polytope.points if p[slice_axis_idx] >= value]
|
|
255
|
+
below_slice = [p for p in polytope.points if p[slice_axis_idx] <= value]
|
|
256
|
+
|
|
257
|
+
# Get the intersection of every pair above and below, this will create excess interior points
|
|
258
|
+
for a in above_slice:
|
|
259
|
+
for b in below_slice:
|
|
260
|
+
# edge is incident with slice plane, don't need these points
|
|
261
|
+
if a[slice_axis_idx] == b[slice_axis_idx]:
|
|
262
|
+
intersects.append(b)
|
|
263
|
+
continue
|
|
264
|
+
|
|
265
|
+
# Linearly interpolate all coordinates of two points (a,b) of the polytope
|
|
266
|
+
interp_coeff = (value - b[slice_axis_idx]) / (a[slice_axis_idx] - b[slice_axis_idx])
|
|
267
|
+
intersect = lerp(a, b, interp_coeff)
|
|
268
|
+
intersects.append(intersect)
|
|
269
|
+
return intersects
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def _reduce_dimension(intersects, slice_axis_idx):
|
|
273
|
+
temp_intersects = []
|
|
274
|
+
for point in intersects:
|
|
275
|
+
point = [p for i, p in enumerate(point) if i != slice_axis_idx]
|
|
276
|
+
temp_intersects.append(point)
|
|
277
|
+
return temp_intersects
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
def slice(polytope: ConvexPolytope, axis, value, slice_axis_idx):
|
|
281
|
+
if polytope.is_flat:
|
|
282
|
+
if value in chain(*polytope.points):
|
|
283
|
+
intersects = [[value]]
|
|
284
|
+
else:
|
|
285
|
+
return None
|
|
286
|
+
else:
|
|
287
|
+
intersects = _find_intersects(polytope, slice_axis_idx, value)
|
|
288
|
+
|
|
289
|
+
if len(intersects) == 0:
|
|
290
|
+
return None
|
|
291
|
+
|
|
292
|
+
# Reduce dimension of intersection points, removing slice axis
|
|
293
|
+
intersects = _reduce_dimension(intersects, slice_axis_idx)
|
|
294
|
+
|
|
295
|
+
axes = copy(polytope._axes)
|
|
296
|
+
axes.remove(axis)
|
|
297
|
+
|
|
298
|
+
if len(intersects) < len(intersects[0]) + 1:
|
|
299
|
+
return ConvexPolytope(axes, intersects)
|
|
300
|
+
# Compute convex hull (removing interior points)
|
|
301
|
+
if len(intersects[0]) == 0:
|
|
302
|
+
return None
|
|
303
|
+
elif len(intersects[0]) == 1: # qhull doesn't like 1D, do it ourselves
|
|
304
|
+
amin = argmin(intersects)
|
|
305
|
+
amax = argmax(intersects)
|
|
306
|
+
vertices = [amin, amax]
|
|
307
|
+
else:
|
|
308
|
+
try:
|
|
309
|
+
hull = scipy.spatial.ConvexHull(intersects)
|
|
310
|
+
vertices = hull.vertices
|
|
311
|
+
|
|
312
|
+
except scipy.spatial.qhull.QhullError as e:
|
|
313
|
+
if "less than" or "flat" in str(e):
|
|
314
|
+
return ConvexPolytope(axes, intersects)
|
|
315
|
+
# Sliced result is simply the convex hull
|
|
316
|
+
return ConvexPolytope(axes, [intersects[i] for i in vertices])
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
from abc import ABC
|
|
2
|
+
from typing import Dict, List, Literal, Optional, Union
|
|
3
|
+
|
|
4
|
+
from conflator import ConfigModel
|
|
5
|
+
from pydantic import ConfigDict
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class TransformationConfig(ConfigModel):
|
|
9
|
+
model_config = ConfigDict(extra="forbid")
|
|
10
|
+
name: str = ""
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class CyclicConfig(TransformationConfig):
|
|
14
|
+
name: Literal["cyclic"]
|
|
15
|
+
range: List[float] = [0]
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class MapperConfig(TransformationConfig):
|
|
19
|
+
name: Literal["mapper"]
|
|
20
|
+
type: str = ""
|
|
21
|
+
resolution: Union[int, List[int]] = 0
|
|
22
|
+
axes: List[str] = [""]
|
|
23
|
+
md5_hash: Optional[str] = None
|
|
24
|
+
local: Optional[List[float]] = None
|
|
25
|
+
axis_reversed: Optional[Dict[str, bool]] = None
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class ReverseConfig(TransformationConfig):
|
|
29
|
+
name: Literal["reverse"]
|
|
30
|
+
is_reverse: bool = False
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class TypeChangeConfig(TransformationConfig):
|
|
34
|
+
name: Literal["type_change"]
|
|
35
|
+
type: str = "int"
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class MergeConfig(TransformationConfig):
|
|
39
|
+
name: Literal["merge"]
|
|
40
|
+
other_axis: str = ""
|
|
41
|
+
linkers: List[str] = [""]
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
action_subclasses_union = Union[CyclicConfig, MapperConfig, ReverseConfig, TypeChangeConfig, MergeConfig]
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class AxisConfig(ConfigModel):
|
|
48
|
+
axis_name: str = ""
|
|
49
|
+
transformations: list[action_subclasses_union]
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
path_subclasses_union = Union[str, int, float]
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class GribJumpAxesConfig(ConfigModel):
|
|
56
|
+
axis_name: str = ""
|
|
57
|
+
values: List[str] = [""]
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class Config(ConfigModel):
|
|
61
|
+
axis_config: List[AxisConfig] = []
|
|
62
|
+
compressed_axes_config: List[str] = [""]
|
|
63
|
+
pre_path: Optional[Dict[str, path_subclasses_union]] = {}
|
|
64
|
+
alternative_axes: List[GribJumpAxesConfig] = []
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class PolytopeOptions(ABC):
|
|
68
|
+
@staticmethod
|
|
69
|
+
def get_polytope_options(options):
|
|
70
|
+
config_options = Config.model_validate(options)
|
|
71
|
+
|
|
72
|
+
axis_config = config_options.axis_config
|
|
73
|
+
compressed_axes_config = config_options.compressed_axes_config
|
|
74
|
+
pre_path = config_options.pre_path
|
|
75
|
+
alternative_axes = config_options.alternative_axes
|
|
76
|
+
|
|
77
|
+
return (axis_config, compressed_axes_config, pre_path, alternative_axes)
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import List
|
|
3
|
+
|
|
4
|
+
from .options import PolytopeOptions
|
|
5
|
+
from .shapes import ConvexPolytope
|
|
6
|
+
from .utility.exceptions import AxisOverdefinedError
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Request:
|
|
10
|
+
"""Encapsulates a request for data"""
|
|
11
|
+
|
|
12
|
+
def __init__(self, *shapes):
|
|
13
|
+
self.shapes = list(shapes)
|
|
14
|
+
self.check_axes()
|
|
15
|
+
|
|
16
|
+
def check_axes(self):
|
|
17
|
+
"""Check that all axes are defined by the combination of shapes, and that they are defined only once"""
|
|
18
|
+
defined_axes = []
|
|
19
|
+
|
|
20
|
+
for shape in self.shapes:
|
|
21
|
+
for axis in shape.axes():
|
|
22
|
+
if axis not in defined_axes:
|
|
23
|
+
defined_axes.append(axis)
|
|
24
|
+
else:
|
|
25
|
+
raise AxisOverdefinedError(axis)
|
|
26
|
+
|
|
27
|
+
def polytopes(self):
|
|
28
|
+
"""Returns the representation of the request as polytopes"""
|
|
29
|
+
polytopes = []
|
|
30
|
+
for shape in self.shapes:
|
|
31
|
+
polytopes.extend(shape.polytope())
|
|
32
|
+
return polytopes
|
|
33
|
+
|
|
34
|
+
def __repr__(self):
|
|
35
|
+
return_str = ""
|
|
36
|
+
for shape in self.shapes:
|
|
37
|
+
return_str += shape.__repr__() + "\n"
|
|
38
|
+
return return_str
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class Polytope:
|
|
42
|
+
def __init__(self, datacube, engine=None, options=None, context=None):
|
|
43
|
+
from .datacube import Datacube
|
|
44
|
+
from .engine import Engine
|
|
45
|
+
|
|
46
|
+
if options is None:
|
|
47
|
+
options = {}
|
|
48
|
+
|
|
49
|
+
axis_options, compressed_axes_options, config, alternative_axes = PolytopeOptions.get_polytope_options(options)
|
|
50
|
+
|
|
51
|
+
self.context = context
|
|
52
|
+
|
|
53
|
+
self.datacube = Datacube.create(
|
|
54
|
+
datacube, config, axis_options, compressed_axes_options, alternative_axes, self.context
|
|
55
|
+
)
|
|
56
|
+
self.engine = engine if engine is not None else Engine.default()
|
|
57
|
+
self.time = 0
|
|
58
|
+
|
|
59
|
+
def slice(self, polytopes: List[ConvexPolytope]):
|
|
60
|
+
"""Low-level API which takes a polytope geometry object and uses it to slice the datacube"""
|
|
61
|
+
return self.engine.extract(self.datacube, polytopes)
|
|
62
|
+
|
|
63
|
+
def retrieve(self, request: Request, method="standard"):
|
|
64
|
+
"""Higher-level API which takes a request and uses it to slice the datacube"""
|
|
65
|
+
logging.info("Starting request for %s ", self.context)
|
|
66
|
+
self.datacube.check_branching_axes(request)
|
|
67
|
+
request_tree = self.engine.extract(self.datacube, request.polytopes())
|
|
68
|
+
logging.info("Created request tree for %s ", self.context)
|
|
69
|
+
self.datacube.get(request_tree, self.context)
|
|
70
|
+
logging.info("Retrieved data for %s ", self.context)
|
|
71
|
+
return request_tree
|