polytope-python 1.0.31__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. polytope_feature/__init__.py +1 -0
  2. polytope_feature/datacube/__init__.py +1 -0
  3. polytope_feature/datacube/backends/__init__.py +1 -0
  4. polytope_feature/datacube/backends/datacube.py +171 -0
  5. polytope_feature/datacube/backends/fdb.py +399 -0
  6. polytope_feature/datacube/backends/mock.py +71 -0
  7. polytope_feature/datacube/backends/xarray.py +142 -0
  8. polytope_feature/datacube/datacube_axis.py +332 -0
  9. polytope_feature/datacube/index_tree_pb2.py +27 -0
  10. polytope_feature/datacube/tensor_index_tree.py +228 -0
  11. polytope_feature/datacube/transformations/__init__.py +1 -0
  12. polytope_feature/datacube/transformations/datacube_cyclic/__init__.py +1 -0
  13. polytope_feature/datacube/transformations/datacube_cyclic/datacube_cyclic.py +171 -0
  14. polytope_feature/datacube/transformations/datacube_mappers/__init__.py +1 -0
  15. polytope_feature/datacube/transformations/datacube_mappers/datacube_mappers.py +141 -0
  16. polytope_feature/datacube/transformations/datacube_mappers/mapper_types/__init__.py +5 -0
  17. polytope_feature/datacube/transformations/datacube_mappers/mapper_types/healpix.py +147 -0
  18. polytope_feature/datacube/transformations/datacube_mappers/mapper_types/healpix_nested.py +229 -0
  19. polytope_feature/datacube/transformations/datacube_mappers/mapper_types/local_regular.py +95 -0
  20. polytope_feature/datacube/transformations/datacube_mappers/mapper_types/octahedral.py +7896 -0
  21. polytope_feature/datacube/transformations/datacube_mappers/mapper_types/reduced_gaussian.py +1459 -0
  22. polytope_feature/datacube/transformations/datacube_mappers/mapper_types/reduced_ll.py +5128 -0
  23. polytope_feature/datacube/transformations/datacube_mappers/mapper_types/regular.py +75 -0
  24. polytope_feature/datacube/transformations/datacube_merger/__init__.py +1 -0
  25. polytope_feature/datacube/transformations/datacube_merger/datacube_merger.py +95 -0
  26. polytope_feature/datacube/transformations/datacube_reverse/__init__.py +1 -0
  27. polytope_feature/datacube/transformations/datacube_reverse/datacube_reverse.py +65 -0
  28. polytope_feature/datacube/transformations/datacube_transformations.py +96 -0
  29. polytope_feature/datacube/transformations/datacube_type_change/__init__.py +1 -0
  30. polytope_feature/datacube/transformations/datacube_type_change/datacube_type_change.py +124 -0
  31. polytope_feature/datacube/tree_encoding.py +132 -0
  32. polytope_feature/engine/__init__.py +1 -0
  33. polytope_feature/engine/engine.py +19 -0
  34. polytope_feature/engine/hullslicer.py +316 -0
  35. polytope_feature/options.py +77 -0
  36. polytope_feature/polytope.py +71 -0
  37. polytope_feature/shapes.py +405 -0
  38. polytope_feature/utility/__init__.py +0 -0
  39. polytope_feature/utility/combinatorics.py +48 -0
  40. polytope_feature/utility/exceptions.py +45 -0
  41. polytope_feature/utility/geometry.py +26 -0
  42. polytope_feature/utility/list_tools.py +41 -0
  43. polytope_feature/utility/profiling.py +14 -0
  44. polytope_feature/version.py +1 -0
  45. polytope_python-1.0.31.dist-info/LICENSE +201 -0
  46. polytope_python-1.0.31.dist-info/METADATA +21 -0
  47. polytope_python-1.0.31.dist-info/RECORD +49 -0
  48. polytope_python-1.0.31.dist-info/WHEEL +5 -0
  49. polytope_python-1.0.31.dist-info/top_level.txt +1 -0
@@ -0,0 +1,142 @@
1
+ from copy import deepcopy
2
+
3
+ import numpy as np
4
+ import xarray as xr
5
+
6
+ from .datacube import Datacube
7
+
8
+
9
+ class XArrayDatacube(Datacube):
10
+ """Xarray arrays are labelled, axes can be defined as strings or integers (e.g. "time" or 0)."""
11
+
12
+ def __init__(self, dataarray: xr.DataArray, axis_options=None, compressed_axes_options=[], context=None):
13
+ super().__init__(axis_options, compressed_axes_options)
14
+ if axis_options is None:
15
+ axis_options = {}
16
+ self.axis_options = axis_options
17
+ self.axis_counter = 0
18
+ self._axes = None
19
+ self.dataarray = dataarray
20
+
21
+ for name, values in dataarray.coords.variables.items():
22
+ options = None
23
+ for opt in self.axis_options:
24
+ if opt.axis_name == name:
25
+ options = opt
26
+ if name in dataarray.dims:
27
+ self._check_and_add_axes(options, name, values)
28
+ self.treated_axes.append(name)
29
+ self.complete_axes.append(name)
30
+ else:
31
+ if self.dataarray[name].dims == ():
32
+ self._check_and_add_axes(options, name, values)
33
+ self.treated_axes.append(name)
34
+ for name in dataarray.dims:
35
+ if name not in self.treated_axes:
36
+ options = None
37
+ for opt in self.axis_options:
38
+ if opt.axis_name == name:
39
+ options = opt
40
+ val = dataarray[name].values[0]
41
+ self._check_and_add_axes(options, name, val)
42
+ self.treated_axes.append(name)
43
+ # add other options to axis which were just created above like "lat" for the mapper transformations for eg
44
+ for name in self._axes:
45
+ if name not in self.treated_axes:
46
+ options = None
47
+ for opt in self.axis_options:
48
+ if opt.axis_name == name:
49
+ options = opt
50
+ val = self._axes[name].type
51
+ self._check_and_add_axes(options, name, val)
52
+
53
+ def get(self, requests, context=None, leaf_path=None, axis_counter=0):
54
+ if context is None:
55
+ context = {}
56
+ if leaf_path is None:
57
+ leaf_path = {}
58
+ if requests.axis.name == "root":
59
+ for c in requests.children:
60
+ self.get(c, context, leaf_path, axis_counter + 1)
61
+ else:
62
+ key_value_path = {requests.axis.name: requests.values}
63
+ ax = requests.axis
64
+ (key_value_path, leaf_path, self.unwanted_path) = ax.unmap_path_key(
65
+ key_value_path, leaf_path, self.unwanted_path
66
+ )
67
+ leaf_path.update(key_value_path)
68
+ if len(requests.children) != 0:
69
+ # We are not a leaf and we loop over
70
+ for c in requests.children:
71
+ self.get(c, context, leaf_path, axis_counter + 1)
72
+ else:
73
+ if self.axis_counter != axis_counter:
74
+ requests.remove_branch()
75
+ else:
76
+ # We are at a leaf and need to assign value to it
77
+ leaf_path_copy = deepcopy(leaf_path)
78
+ unmapped_path = {}
79
+ self.refit_path(leaf_path_copy, unmapped_path, leaf_path)
80
+ for key in leaf_path_copy:
81
+ leaf_path_copy[key] = list(leaf_path_copy[key])
82
+ for key in unmapped_path:
83
+ if isinstance(unmapped_path[key], tuple):
84
+ unmapped_path[key] = list(unmapped_path[key])
85
+ subxarray = self.dataarray.sel(leaf_path_copy, method="nearest")
86
+ subxarray = subxarray.sel(unmapped_path)
87
+ value = subxarray.values
88
+ key = subxarray.name
89
+ requests.result = (key, value)
90
+
91
+ def datacube_natural_indexes(self, axis, subarray):
92
+ if axis.name in self.complete_axes:
93
+ indexes = next(iter(subarray.xindexes.values())).to_pandas_index()
94
+ else:
95
+ if subarray[axis.name].values.ndim == 0:
96
+ # NOTE how we handle the two special datetime and timedelta cases to conform with numpy arrays
97
+ if np.issubdtype(subarray[axis.name].values.dtype, np.datetime64):
98
+ indexes = [subarray[axis.name].astype("datetime64[us]").values]
99
+ elif np.issubdtype(subarray[axis.name].values.dtype, np.timedelta64):
100
+ indexes = [subarray[axis.name].astype("timedelta64[us]").values]
101
+ else:
102
+ indexes = [subarray[axis.name].values.tolist()]
103
+ else:
104
+ indexes = subarray[axis.name].values
105
+ return indexes
106
+
107
+ def refit_path(self, path_copy, unmapped_path, path):
108
+ for key in path.keys():
109
+ if key not in self.dataarray.dims:
110
+ path_copy.pop(key)
111
+ if key not in self.dataarray.coords.dtypes:
112
+ unmapped_path.update({key: path[key]})
113
+ path_copy.pop(key)
114
+ for key in self.dataarray.coords.dtypes:
115
+ key_dtype = self.dataarray.coords.dtypes[key]
116
+ if key_dtype.type is np.str_ and key in path.keys():
117
+ unmapped_path.update({key: path[key]})
118
+ path_copy.pop(key, None)
119
+
120
+ def select(self, path, unmapped_path):
121
+ for key in path:
122
+ key_value = path[key][0]
123
+ path[key] = key_value
124
+ for key in unmapped_path:
125
+ key_value = unmapped_path[key][0]
126
+ unmapped_path[key] = key_value
127
+ path_copy = deepcopy(path)
128
+ self.refit_path(path_copy, unmapped_path, path)
129
+ subarray = self.dataarray.sel(path_copy, method="nearest")
130
+ subarray = subarray.sel(unmapped_path)
131
+ return subarray
132
+
133
+ def ax_vals(self, name):
134
+ treated_axes = []
135
+ for _name, values in self.dataarray.coords.variables.items():
136
+ treated_axes.append(_name)
137
+ if _name == name:
138
+ return values.values
139
+ for _name in self.dataarray.dims:
140
+ if _name not in treated_axes:
141
+ if _name == name:
142
+ return self.dataarray[name].values[0]
@@ -0,0 +1,332 @@
1
+ import bisect
2
+ from abc import ABC, abstractmethod
3
+ from copy import deepcopy
4
+ from typing import Any, List
5
+
6
+ import numpy as np
7
+ import pandas as pd
8
+ import xarray as xr
9
+
10
+ from .transformations.datacube_cyclic.datacube_cyclic import DatacubeAxisCyclic
11
+ from .transformations.datacube_mappers.datacube_mappers import DatacubeMapper
12
+ from .transformations.datacube_merger.datacube_merger import DatacubeAxisMerger
13
+ from .transformations.datacube_reverse.datacube_reverse import DatacubeAxisReverse
14
+ from .transformations.datacube_type_change.datacube_type_change import (
15
+ DatacubeAxisTypeChange,
16
+ )
17
+
18
+
19
+ class DatacubeAxis(ABC):
20
+ is_cyclic = False
21
+ has_mapper = False
22
+ has_merger = False
23
+ reorder = False
24
+ type_change = False
25
+
26
+ def order_tranformations(self):
27
+ self.transformations = sorted(self.transformations, key=lambda x: transformations_order[type(x)])
28
+
29
+ def give_transformations_parents(self):
30
+ for i, transform in enumerate(self.transformations[1:]):
31
+ transform.parent = self.transformations[i - 1]
32
+
33
+ # Convert from user-provided value to CONTINUOUS type (e.g. float, pd.timestamp)
34
+ @abstractmethod
35
+ def parse(self, value: Any) -> Any:
36
+ pass
37
+
38
+ # Convert from CONTINUOUS type to FLOAT
39
+ @abstractmethod
40
+ def to_float(self, value: Any) -> float:
41
+ pass
42
+
43
+ # Convert from FLOAT type to CONTINUOUS type
44
+ @abstractmethod
45
+ def from_float(self, value: float) -> Any:
46
+ pass
47
+
48
+ def serialize(self, value: Any) -> Any:
49
+ pass
50
+
51
+ def to_intervals(self, range):
52
+ intervals = [range]
53
+ for transformation in self.transformations[::-1]:
54
+ intervals = transformation.to_intervals(range, intervals, self)
55
+ return intervals
56
+
57
+ def remap(self, range: List) -> Any:
58
+ ranges = [range]
59
+ for transformation in self.transformations[::-1]:
60
+ ranges = transformation.remap(range, ranges, self)
61
+ return ranges
62
+
63
+ def unmap_to_datacube(self, path, unmapped_path):
64
+ return (path, unmapped_path)
65
+
66
+ def find_standard_indexes(self, path, datacube):
67
+ unmapped_path = {}
68
+ path_copy = deepcopy(path)
69
+ for key in path_copy:
70
+ axis = datacube._axes[key]
71
+ (path, unmapped_path) = axis.unmap_to_datacube(path, unmapped_path)
72
+ subarray = datacube.select(path, unmapped_path)
73
+ return datacube.datacube_natural_indexes(self, subarray)
74
+
75
+ def find_indexes(self, path, datacube):
76
+ indexes = self.find_standard_indexes(path, datacube)
77
+ for transformation in self.transformations[::-1]:
78
+ indexes = transformation.find_modified_indexes(indexes, path, datacube, self)
79
+ return indexes
80
+
81
+ def offset(self, value):
82
+ offset = 0
83
+ for transformation in self.transformations[::-1]:
84
+ offset = transformation.offset(value, self, offset)
85
+ return offset
86
+
87
+ def unmap_path_key(self, key_value_path, leaf_path, unwanted_path):
88
+ for transformation in self.transformations[::-1]:
89
+ (key_value_path, leaf_path, unwanted_path) = transformation.unmap_path_key(
90
+ key_value_path, leaf_path, unwanted_path, self
91
+ )
92
+ return (key_value_path, leaf_path, unwanted_path)
93
+
94
+ def unmap_tree_node(self, node, unwanted_path):
95
+ for transformation in self.transformations[::-1]:
96
+ (node, unwanted_path) = transformation.unmap_tree_node(node, unwanted_path)
97
+ return (node, unwanted_path)
98
+
99
+ def _remap_val_to_axis_range(self, value):
100
+ for transformation in self.transformations[::-1]:
101
+ value = transformation._remap_val_to_axis_range(value, self)
102
+ return value
103
+
104
+ def find_standard_indices_between(self, indexes, low, up, datacube, method=None):
105
+ indexes_between_ranges = []
106
+
107
+ if self.name in datacube.complete_axes and self.name not in datacube.transformed_axes:
108
+ # Find the range of indexes between lower and upper
109
+ # https://pandas.pydata.org/docs/reference/api/pandas.Index.searchsorted.html
110
+ # Assumes the indexes are already sorted (could sort to be sure) and monotonically increasing
111
+ if method == "surrounding" or method == "nearest":
112
+ start = indexes.searchsorted(low, "left")
113
+ end = indexes.searchsorted(up, "right")
114
+ start = max(start - 1, 0)
115
+ end = min(end + 1, len(indexes))
116
+ indexes_between = indexes[start:end].to_list()
117
+ indexes_between_ranges.extend(indexes_between)
118
+ else:
119
+ start = indexes.searchsorted(low, "left")
120
+ end = indexes.searchsorted(up, "right")
121
+ indexes_between = indexes[start:end].to_list()
122
+ indexes_between_ranges.extend(indexes_between)
123
+ else:
124
+ if method == "surrounding" or method == "nearest":
125
+ start = bisect.bisect_left(indexes, low)
126
+ end = bisect.bisect_right(indexes, up)
127
+ start = max(start - 1, 0)
128
+ end = min(end + 1, len(indexes))
129
+ indexes_between = indexes[start:end]
130
+ indexes_between_ranges.extend(indexes_between)
131
+ else:
132
+ lower_idx = bisect.bisect_left(indexes, low)
133
+ upper_idx = bisect.bisect_right(indexes, up)
134
+ indexes_between = indexes[lower_idx:upper_idx]
135
+ indexes_between_ranges.extend(indexes_between)
136
+ return indexes_between_ranges
137
+
138
+ def find_indices_between(self, indexes_ranges, low, up, datacube, method=None):
139
+ indexes_between_ranges = self.find_standard_indices_between(indexes_ranges, low, up, datacube, method)
140
+ for transformation in self.transformations[::-1]:
141
+ indexes_between_ranges = transformation.find_indices_between(
142
+ indexes_ranges, low, up, datacube, method, indexes_between_ranges, self
143
+ )
144
+ return indexes_between_ranges
145
+
146
+ @staticmethod
147
+ def values_type(values):
148
+ type_ = None
149
+ if isinstance(values, xr.core.variable.IndexVariable) or isinstance(values, xr.core.variable.Variable):
150
+ # If we have some xarray variable, transform them to actual variable type
151
+ values = np.array(values)
152
+ type_ = values.dtype.type
153
+ else:
154
+ if len(values) == 0:
155
+ # If we have no values (newly created axis), default to a float
156
+ values = np.array(values)
157
+ type_ = values.dtype.type
158
+ else:
159
+ type_ = type(values[0])
160
+ return type_
161
+
162
+ @staticmethod
163
+ def create_standard(name, values, datacube):
164
+ val_type = DatacubeAxis.values_type(values)
165
+
166
+ DatacubeAxis.check_axis_type(name, val_type)
167
+ if datacube._axes is None:
168
+ datacube._axes = {name: deepcopy(_type_to_axis_lookup[val_type])}
169
+ else:
170
+ datacube._axes[name] = deepcopy(_type_to_axis_lookup[val_type])
171
+
172
+ datacube._axes[name].name = name
173
+ datacube.axis_counter += 1
174
+
175
+ @staticmethod
176
+ def check_axis_type(name, val_type):
177
+ if val_type not in _type_to_axis_lookup:
178
+ raise ValueError(f"Could not create a mapper for index type {val_type} for axis {name}")
179
+
180
+
181
+ transformations_order = [
182
+ DatacubeAxisMerger,
183
+ DatacubeAxisReverse,
184
+ DatacubeAxisCyclic,
185
+ DatacubeMapper,
186
+ DatacubeAxisTypeChange,
187
+ ]
188
+ transformations_order = {key: i for i, key in enumerate(transformations_order)}
189
+
190
+
191
+ class IntDatacubeAxis(DatacubeAxis):
192
+ def __init__(self):
193
+ self.name = None
194
+ self.tol = 1e-12
195
+ self.range = None
196
+ # TODO: Maybe here, store transformations as a dico instead
197
+ self.transformations = []
198
+ self.type = 0
199
+ self.can_round = True
200
+
201
+ def parse(self, value: Any) -> Any:
202
+ return float(value)
203
+
204
+ def to_float(self, value):
205
+ return float(value)
206
+
207
+ def from_float(self, value):
208
+ return float(value)
209
+
210
+ def serialize(self, value):
211
+ return value
212
+
213
+
214
+ class FloatDatacubeAxis(DatacubeAxis):
215
+ def __init__(self):
216
+ self.name = None
217
+ self.tol = 1e-12
218
+ self.range = None
219
+ self.transformations = []
220
+ self.type = 0.0
221
+ self.can_round = True
222
+
223
+ def parse(self, value: Any) -> Any:
224
+ return float(value)
225
+
226
+ def to_float(self, value):
227
+ return float(value)
228
+
229
+ def from_float(self, value):
230
+ return float(value)
231
+
232
+ def serialize(self, value):
233
+ return value
234
+
235
+
236
+ class PandasTimestampDatacubeAxis(DatacubeAxis):
237
+ def __init__(self):
238
+ self.name = None
239
+ self.tol = 1e-12
240
+ self.range = None
241
+ self.transformations = []
242
+ self.type = pd.Timestamp("2000-01-01T00:00:00")
243
+ self.can_round = False
244
+
245
+ def parse(self, value: Any) -> Any:
246
+ if isinstance(value, np.str_):
247
+ value = str(value)
248
+ return pd.Timestamp(value)
249
+
250
+ def to_float(self, value: pd.Timestamp):
251
+ if isinstance(value, np.datetime64):
252
+ return float((value - np.datetime64("1970-01-01T00:00:00")).astype("int"))
253
+ else:
254
+ return float(value.value / 10**9)
255
+
256
+ def from_float(self, value):
257
+ return pd.Timestamp(int(value), unit="s")
258
+
259
+ def serialize(self, value):
260
+ return str(value)
261
+
262
+ def offset(self, value):
263
+ return None
264
+
265
+
266
+ class PandasTimedeltaDatacubeAxis(DatacubeAxis):
267
+ def __init__(self):
268
+ self.name = None
269
+ self.tol = 1e-12
270
+ self.range = None
271
+ self.transformations = []
272
+ self.type = np.timedelta64(0, "s")
273
+ self.can_round = False
274
+
275
+ def parse(self, value: Any) -> Any:
276
+ if isinstance(value, np.str_):
277
+ value = str(value)
278
+ return pd.Timedelta(value)
279
+
280
+ def to_float(self, value: pd.Timedelta):
281
+ if isinstance(value, np.timedelta64):
282
+ return value.astype("timedelta64[s]").astype(int)
283
+ else:
284
+ return float(value.value / 10**9)
285
+
286
+ def from_float(self, value):
287
+ return pd.Timedelta(int(value), unit="s")
288
+
289
+ def serialize(self, value):
290
+ return str(value)
291
+
292
+ def offset(self, value):
293
+ return None
294
+
295
+
296
+ class UnsliceableDatacubeAxis(DatacubeAxis):
297
+ def __init__(self):
298
+ self.name = None
299
+ self.tol = float("NaN")
300
+ self.range = None
301
+ self.transformations = []
302
+ self.can_round = False
303
+
304
+ def parse(self, value: Any) -> Any:
305
+ return value
306
+
307
+ def to_float(self, value: pd.Timedelta):
308
+ raise TypeError("Tried to slice unsliceable axis")
309
+
310
+ def from_float(self, value):
311
+ raise TypeError("Tried to slice unsliceable axis")
312
+
313
+ def serialize(self, value):
314
+ raise TypeError("Tried to slice unsliceable axis")
315
+
316
+
317
+ _type_to_axis_lookup = {
318
+ pd.Int64Dtype: IntDatacubeAxis(),
319
+ pd.Timestamp: PandasTimestampDatacubeAxis(),
320
+ np.int64: IntDatacubeAxis(),
321
+ np.datetime64: PandasTimestampDatacubeAxis(),
322
+ np.timedelta64: PandasTimedeltaDatacubeAxis(),
323
+ pd.Timedelta: PandasTimedeltaDatacubeAxis(),
324
+ np.float64: FloatDatacubeAxis(),
325
+ np.float32: FloatDatacubeAxis(),
326
+ np.int32: IntDatacubeAxis(),
327
+ np.str_: UnsliceableDatacubeAxis(),
328
+ str: UnsliceableDatacubeAxis(),
329
+ np.object_: UnsliceableDatacubeAxis(),
330
+ int: IntDatacubeAxis(),
331
+ float: FloatDatacubeAxis(),
332
+ }
@@ -0,0 +1,27 @@
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # source: index_tree.proto
4
+ # Protobuf Python Version: 5.26.1
5
+ """Generated protocol buffer code."""
6
+ from google.protobuf import descriptor as _descriptor
7
+ from google.protobuf import descriptor_pool as _descriptor_pool
8
+ from google.protobuf import symbol_database as _symbol_database
9
+ from google.protobuf.internal import builder as _builder
10
+
11
+ # @@protoc_insertion_point(imports)
12
+
13
+ _sym_db = _symbol_database.Default()
14
+
15
+
16
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
17
+ b'\n\x10index_tree.proto\x12\nindex_tree"\x9a\x01\n\x04Node\x12\x0c\n\x04\x61xis\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x03(\t\x12\x0f\n\x07indexes\x18\x03 \x03(\x03\x12\x0e\n\x06result\x18\x04 \x03(\x01\x12\x13\n\x0bsize_result\x18\x05 \x03(\x03\x12"\n\x08\x63hildren\x18\x06 \x03(\x0b\x32\x10.index_tree.Node\x12\x1b\n\x13size_indexes_branch\x18\x07 \x03(\x03\x62\x06proto3'
18
+ )
19
+
20
+ _globals = globals()
21
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
22
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "index_tree_pb2", _globals)
23
+ if not _descriptor._USE_C_DESCRIPTORS:
24
+ DESCRIPTOR._loaded_options = None
25
+ _globals["_NODE"]._serialized_start = 33
26
+ _globals["_NODE"]._serialized_end = 187
27
+ # @@protoc_insertion_point(module_scope)