pydpm_xl 0.1.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. py_dpm/AST/ASTConstructor.py +503 -0
  2. py_dpm/AST/ASTObjects.py +827 -0
  3. py_dpm/AST/ASTTemplate.py +101 -0
  4. py_dpm/AST/ASTVisitor.py +13 -0
  5. py_dpm/AST/MLGeneration.py +588 -0
  6. py_dpm/AST/ModuleAnalyzer.py +79 -0
  7. py_dpm/AST/ModuleDependencies.py +203 -0
  8. py_dpm/AST/WhereClauseChecker.py +12 -0
  9. py_dpm/AST/__init__.py +0 -0
  10. py_dpm/AST/check_operands.py +302 -0
  11. py_dpm/DataTypes/ScalarTypes.py +324 -0
  12. py_dpm/DataTypes/TimeClasses.py +370 -0
  13. py_dpm/DataTypes/TypePromotion.py +195 -0
  14. py_dpm/DataTypes/__init__.py +0 -0
  15. py_dpm/Exceptions/__init__.py +0 -0
  16. py_dpm/Exceptions/exceptions.py +84 -0
  17. py_dpm/Exceptions/messages.py +114 -0
  18. py_dpm/OperationScopes/OperationScopeService.py +247 -0
  19. py_dpm/OperationScopes/__init__.py +0 -0
  20. py_dpm/Operators/AggregateOperators.py +138 -0
  21. py_dpm/Operators/BooleanOperators.py +30 -0
  22. py_dpm/Operators/ClauseOperators.py +159 -0
  23. py_dpm/Operators/ComparisonOperators.py +69 -0
  24. py_dpm/Operators/ConditionalOperators.py +362 -0
  25. py_dpm/Operators/NumericOperators.py +101 -0
  26. py_dpm/Operators/Operator.py +388 -0
  27. py_dpm/Operators/StringOperators.py +27 -0
  28. py_dpm/Operators/TimeOperators.py +53 -0
  29. py_dpm/Operators/__init__.py +0 -0
  30. py_dpm/Utils/ValidationsGenerationUtils.py +429 -0
  31. py_dpm/Utils/__init__.py +0 -0
  32. py_dpm/Utils/operands_mapping.py +73 -0
  33. py_dpm/Utils/operator_mapping.py +89 -0
  34. py_dpm/Utils/tokens.py +172 -0
  35. py_dpm/Utils/utils.py +2 -0
  36. py_dpm/ValidationsGeneration/PropertiesConstraintsProcessor.py +190 -0
  37. py_dpm/ValidationsGeneration/Utils.py +364 -0
  38. py_dpm/ValidationsGeneration/VariantsProcessor.py +265 -0
  39. py_dpm/ValidationsGeneration/__init__.py +0 -0
  40. py_dpm/ValidationsGeneration/auxiliary_functions.py +98 -0
  41. py_dpm/__init__.py +61 -0
  42. py_dpm/api/__init__.py +140 -0
  43. py_dpm/api/ast_generator.py +438 -0
  44. py_dpm/api/complete_ast.py +241 -0
  45. py_dpm/api/data_dictionary_validation.py +577 -0
  46. py_dpm/api/migration.py +77 -0
  47. py_dpm/api/semantic.py +224 -0
  48. py_dpm/api/syntax.py +182 -0
  49. py_dpm/client.py +106 -0
  50. py_dpm/data_handlers.py +99 -0
  51. py_dpm/db_utils.py +117 -0
  52. py_dpm/grammar/__init__.py +0 -0
  53. py_dpm/grammar/dist/__init__.py +0 -0
  54. py_dpm/grammar/dist/dpm_xlLexer.interp +428 -0
  55. py_dpm/grammar/dist/dpm_xlLexer.py +804 -0
  56. py_dpm/grammar/dist/dpm_xlLexer.tokens +106 -0
  57. py_dpm/grammar/dist/dpm_xlParser.interp +249 -0
  58. py_dpm/grammar/dist/dpm_xlParser.py +5224 -0
  59. py_dpm/grammar/dist/dpm_xlParser.tokens +106 -0
  60. py_dpm/grammar/dist/dpm_xlParserListener.py +742 -0
  61. py_dpm/grammar/dist/dpm_xlParserVisitor.py +419 -0
  62. py_dpm/grammar/dist/listeners.py +10 -0
  63. py_dpm/grammar/dpm_xlLexer.g4 +435 -0
  64. py_dpm/grammar/dpm_xlParser.g4 +260 -0
  65. py_dpm/migration.py +282 -0
  66. py_dpm/models.py +2139 -0
  67. py_dpm/semantics/DAG/DAGAnalyzer.py +158 -0
  68. py_dpm/semantics/DAG/__init__.py +0 -0
  69. py_dpm/semantics/SemanticAnalyzer.py +320 -0
  70. py_dpm/semantics/Symbols.py +223 -0
  71. py_dpm/semantics/__init__.py +0 -0
  72. py_dpm/utils/__init__.py +0 -0
  73. py_dpm/utils/ast_serialization.py +481 -0
  74. py_dpm/views/data_types.sql +12 -0
  75. py_dpm/views/datapoints.sql +65 -0
  76. py_dpm/views/hierarchy_operand_reference.sql +11 -0
  77. py_dpm/views/hierarchy_preconditions.sql +13 -0
  78. py_dpm/views/hierarchy_variables.sql +26 -0
  79. py_dpm/views/hierarchy_variables_context.sql +14 -0
  80. py_dpm/views/key_components.sql +18 -0
  81. py_dpm/views/module_from_table.sql +11 -0
  82. py_dpm/views/open_keys.sql +13 -0
  83. py_dpm/views/operation_info.sql +27 -0
  84. py_dpm/views/operation_list.sql +18 -0
  85. py_dpm/views/operations_versions_from_module_version.sql +30 -0
  86. py_dpm/views/precondition_info.sql +17 -0
  87. py_dpm/views/report_type_operand_reference_info.sql +18 -0
  88. py_dpm/views/subcategory_info.sql +17 -0
  89. py_dpm/views/table_info.sql +19 -0
  90. pydpm_xl-0.1.10.dist-info/LICENSE +674 -0
  91. pydpm_xl-0.1.10.dist-info/METADATA +50 -0
  92. pydpm_xl-0.1.10.dist-info/RECORD +94 -0
  93. pydpm_xl-0.1.10.dist-info/WHEEL +4 -0
  94. pydpm_xl-0.1.10.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,362 @@
1
+ import warnings
2
+ from typing import Union
3
+
4
+ import pandas as pd
5
+
6
+ from py_dpm.DataTypes.ScalarTypes import Mixed, ScalarFactory
7
+ from py_dpm.DataTypes.TypePromotion import binary_implicit_type_promotion, binary_implicit_type_promotion_with_mixed_types, \
8
+ unary_implicit_type_promotion
9
+ from py_dpm.Exceptions import exceptions
10
+ from py_dpm.Exceptions.exceptions import SemanticError
11
+ from py_dpm.Operators.Operator import Binary, Operator
12
+ from py_dpm.Utils import tokens
13
+ from py_dpm.semantics.Symbols import ConstantOperand, RecordSet, Scalar, Structure
14
+
15
+
16
+ class ConditionalOperator(Operator):
17
+ propagate_attributes = False
18
+
19
+ @classmethod
20
+ def validate(cls, *args):
21
+ pass
22
+
23
+ @classmethod
24
+ def create_labeled_scalar(cls, rslt_structure, rslt_type, origin):
25
+ """
26
+ """
27
+ if not isinstance(rslt_structure, ConstantOperand):
28
+ scalar = cls._create_labeled_scalar(origin=origin, result_type=rslt_type)
29
+ return scalar
30
+ else:
31
+ value = rslt_structure.value
32
+ return ConstantOperand(type_=ScalarFactory().scalar_factory(str(rslt_type)), name=None,
33
+ origin=origin, value=value)
34
+
35
+ @classmethod
36
+ def _check_same_recordset_structures(cls, left: RecordSet, right: RecordSet, origin) -> bool:
37
+ """
38
+ Used for recordset-recordset
39
+ """
40
+ left = left.structure
41
+ right = right.structure
42
+ if len(left.get_key_components()) == len(right.get_key_components()):
43
+ # For better error management
44
+ class_check = Binary()
45
+ class_check.op = cls.op
46
+ class_check.check_same_components(left, right, origin)
47
+ return True
48
+ return False
49
+
50
+ @classmethod
51
+ def _check_structures(cls, left: RecordSet, right: RecordSet, origin: str, subset_allowed: bool = True) -> bool:
52
+ """
53
+ Used for recordset-recordset
54
+ """
55
+ left_records = left.records
56
+ right_records = right.records
57
+ if cls._check_same_recordset_structures(left, right, origin):
58
+ # validation for records
59
+ if left_records is not None and right_records is not None:
60
+ result_dataframe = pd.merge(left_records, right_records, on=[col for col in left_records.columns if col != 'data_type'])
61
+ if len(result_dataframe) != len(left_records):
62
+ raise SemanticError("4-6-0-1")
63
+
64
+ return True
65
+
66
+ if subset_allowed:
67
+ # operand_is_subset = cls.check_condition_is_subset(selection=left, condition=right)
68
+ operand_is_subset = cls.check_condition_is_subset(selection=right, condition=left)
69
+ if cls.op in (tokens.NVL, tokens.IF):
70
+ operand_is_subset = cls.check_condition_is_subset(selection=right, condition=left)
71
+ else:
72
+ operand_is_subset = cls.check_condition_is_subset(selection=left, condition=right)
73
+ if operand_is_subset:
74
+ return True
75
+ raise exceptions.SemanticError("4-6-0-2", condition=left.name, operand=right.name)
76
+
77
+ raise SemanticError(
78
+ "5", op=cls.op, structure_1=left.get_key_components_names(),
79
+ structure_2=right.get_key_components_names()
80
+ )
81
+
82
+ @classmethod
83
+ def check_condition_is_subset(cls, selection: RecordSet, condition: RecordSet):
84
+
85
+ selection_dpm_components = selection.get_dpm_components()
86
+ condition_dpm_components = condition.get_dpm_components()
87
+
88
+ if set(condition.get_key_components_names()) <= set(selection.get_key_components_names()):
89
+ for comp_key, comp_value in condition_dpm_components.items():
90
+ if comp_key not in selection_dpm_components:
91
+ return False
92
+ if comp_value.type.__class__ != selection_dpm_components[comp_key].type.__class__:
93
+ return False
94
+ return True
95
+ return False
96
+
97
+ @staticmethod
98
+ def generate_result_dataframe(left: RecordSet, right: RecordSet):
99
+ if left.records is not None and right.records is not None:
100
+ result_dataframe = pd.merge(left.records, right.records,
101
+ on=[col for col in right.records.columns if col != 'data_type'],
102
+ suffixes=('_left', '_right'))
103
+
104
+ result_dataframe['data_type'] = result_dataframe['data_type_left']
105
+ result_dataframe = result_dataframe.drop(columns=['data_type_left', 'data_type_right'])
106
+
107
+ return result_dataframe
108
+
109
+ return None
110
+
111
+
112
+ class IfOperator(ConditionalOperator):
113
+ """
114
+ """
115
+ op = tokens.IF
116
+
117
+ @classmethod
118
+ def create_origin_expression(cls, condition, then_op, else_op=None) -> str:
119
+ condition_name = getattr(condition, 'name', None) or getattr(condition, 'origin')
120
+ then_name = getattr(then_op, 'name', None) or getattr(then_op, 'origin')
121
+ if else_op:
122
+ else_name = getattr(else_op, 'name', None) or getattr(else_op, 'origin')
123
+ origin = f"If {condition_name} then {then_name} else {else_name}"
124
+ else:
125
+ origin = f"If {condition_name} then {then_name}"
126
+ return origin
127
+
128
+ @classmethod
129
+ def check_condition(cls, condition: Union[RecordSet, Scalar]) -> bool:
130
+ """
131
+ Check if the condition has Boolean type
132
+ """
133
+ if isinstance(condition, RecordSet):
134
+ condition_type = condition.structure.components["f"].type
135
+ else:
136
+ condition_type = condition.type
137
+ # unary implicit promotion
138
+ error_info = {
139
+ 'operand_name': condition.name,
140
+ 'op': cls.op
141
+ }
142
+ boolean_type = ScalarFactory().scalar_factory("Boolean")
143
+ type_promotion = unary_implicit_type_promotion(
144
+ operand=condition_type, op_type_to_check=boolean_type, error_info=error_info
145
+ )
146
+ if type_promotion.strictly_same_class(boolean_type):
147
+ return True
148
+
149
+ raise SemanticError("4-6-1-1")
150
+
151
+ @classmethod
152
+ def check_structures(
153
+ cls, condition: Union[RecordSet, Scalar],
154
+ first: Union[RecordSet, Scalar], second: Union[RecordSet, Scalar], origin):
155
+ """
156
+ """
157
+ if isinstance(condition, Scalar):
158
+ if second:
159
+ if isinstance(first, RecordSet) and isinstance(second, RecordSet):
160
+ if cls._check_structures(first, second, origin, subset_allowed=False):
161
+ return first.structure, first.records
162
+ elif isinstance(first, Scalar) and isinstance(second, Scalar):
163
+ return first, None
164
+ else:
165
+ raise SemanticError("4-6-1-3")
166
+ else:
167
+ if isinstance(first, RecordSet):
168
+ raise SemanticError("4-6-1-2")
169
+ return first, None
170
+ else: # RecordSet
171
+ if second:
172
+ if isinstance(first, RecordSet) and isinstance(second, RecordSet):
173
+ cls._check_structures(condition, first, origin)
174
+ cls._check_structures(condition, second, origin)
175
+ if isinstance(first, RecordSet) and isinstance(second, Scalar):
176
+ cls._check_structures(condition, first, origin)
177
+ if isinstance(first, Scalar) and isinstance(second, RecordSet):
178
+ cls._check_structures(condition, second, origin)
179
+ return condition.structure, condition.records
180
+ else:
181
+ if isinstance(first, RecordSet):
182
+ cls._check_structures(condition, first, origin)
183
+ return condition.structure, condition.records
184
+
185
+
186
+ @classmethod
187
+ def check_types(cls, first: Union[RecordSet, Scalar], result_dataframe: pd.DataFrame, second: Union[RecordSet, Scalar] = None):
188
+ if second:
189
+ if isinstance(first, RecordSet):
190
+ first_type = first.structure.components["f"].type
191
+ else:
192
+ first_type = first.type
193
+ if isinstance(second, RecordSet):
194
+ second_type = second.structure.components["f"].type
195
+ else:
196
+ second_type = second.type
197
+ if isinstance(first_type, Mixed) or isinstance(second_type, Mixed):
198
+ result_dataframe = cls.generate_result_dataframe(first, second)
199
+ else:
200
+ if isinstance(first, RecordSet):
201
+ first_type = first.structure.components["f"].type
202
+ else:
203
+ first_type = first.type
204
+ return first_type, result_dataframe
205
+
206
+ if isinstance(first_type, Mixed) or isinstance(second_type, Mixed):
207
+ type_promotion, result_dataframe = binary_implicit_type_promotion_with_mixed_types(result_dataframe, first_type, second_type)
208
+ else:
209
+ type_promotion = binary_implicit_type_promotion(first_type, second_type)
210
+
211
+ return type_promotion, result_dataframe
212
+
213
+ @classmethod
214
+ def validate(
215
+ cls, condition: Union[RecordSet, Scalar],
216
+ then_op: Union[RecordSet, Scalar], else_op: Union[RecordSet, Scalar] = None) -> Union[RecordSet, Scalar]:
217
+ """
218
+ """
219
+ origin = cls.create_origin_expression(condition, then_op, else_op)
220
+ # check condition
221
+ cls.check_condition(condition)
222
+ # check structures
223
+ rslt_structure, rslt_dataframe = cls.check_structures(condition, then_op, else_op, origin)
224
+ # check_types (with implicit cast)
225
+ rslt_type, rslt_dataframe = cls.check_types(then_op, rslt_dataframe, else_op)
226
+ # Create the result structure with label
227
+ if isinstance(rslt_structure, Structure):
228
+ recordset = cls._create_labeled_recordset(origin=origin, rslt_type=rslt_type, rslt_structure=rslt_structure,
229
+ result_dataframe=rslt_dataframe)
230
+ return recordset
231
+ labeled_scalar = cls.create_labeled_scalar(rslt_structure, rslt_type, origin)
232
+ return labeled_scalar
233
+
234
+
235
+ class Nvl(ConditionalOperator):
236
+ """
237
+ """
238
+ op = tokens.NVL
239
+
240
+ @classmethod
241
+ def create_origin_expression(cls, left, right) -> str:
242
+ left_name = getattr(left, 'name', None) or getattr(left, 'origin')
243
+ right_name = getattr(right, 'name', None) or getattr(right, 'origin')
244
+
245
+ origin = f"{cls.op}({left_name},{right_name})"
246
+ return origin
247
+
248
+ @classmethod
249
+ def check_structures(cls, left: Union[RecordSet, Scalar], right: Union[RecordSet, Scalar], origin: str):
250
+ if isinstance(left, RecordSet) and isinstance(right, RecordSet):
251
+ if cls._check_structures(left, right, origin):
252
+ result_dataframe = cls.generate_result_dataframe(left, right)
253
+ return left.structure, result_dataframe
254
+ elif isinstance(left, RecordSet) and isinstance(right, Scalar):
255
+ return left.structure, left.records
256
+ elif isinstance(left, Scalar) and isinstance(right, RecordSet):
257
+ raise SemanticError("4-6-2-1")
258
+ elif isinstance(left, Scalar) and isinstance(right, Scalar):
259
+ return left, None
260
+
261
+ @classmethod
262
+ def check_types(cls, first: Union[RecordSet, Scalar], result_dataframe, second: Union[RecordSet, Scalar] = None):
263
+ """
264
+ """
265
+ if isinstance(first, RecordSet):
266
+ first_type = first.structure.components["f"].type
267
+ else:
268
+ first_type = first.type
269
+
270
+ if isinstance(second, RecordSet):
271
+ second_type = second.structure.components["f"].type
272
+ else:
273
+ second_type = second.type
274
+
275
+ if isinstance(first_type, Mixed) or isinstance(second_type, Mixed):
276
+ type_promotion, result_dataframe = binary_implicit_type_promotion_with_mixed_types(result_dataframe, first_type, second_type)
277
+ else:
278
+ type_promotion = binary_implicit_type_promotion(first_type, second_type)
279
+ if result_dataframe is not None:
280
+ if 'data_type_left' in result_dataframe.columns:
281
+ result_dataframe = result_dataframe.drop(columns=['data_type_left', 'data_type_right'])
282
+ result_dataframe = result_dataframe.assign(data_type=type_promotion)
283
+
284
+ return type_promotion, result_dataframe
285
+
286
+ @classmethod
287
+ def validate(cls, left: Union[RecordSet, Scalar], right: Union[RecordSet, Scalar]) -> Union[RecordSet, Scalar]:
288
+ """
289
+ """
290
+ origin: str = cls.create_origin_expression(left, right)
291
+ # check structures
292
+ rslt_structure, rslt_dataframe = cls.check_structures(left, right, origin)
293
+ # check_types
294
+ rslt_type, rslt_dataframe = cls.check_types(first=left, result_dataframe=rslt_dataframe, second=right)
295
+ # Create the result structure with label
296
+ if isinstance(rslt_structure, Structure):
297
+ recordset = cls._create_labeled_recordset(origin=origin, rslt_type=rslt_type, rslt_structure=rslt_structure,
298
+ result_dataframe=rslt_dataframe)
299
+ return recordset
300
+ labeled_scalar = cls.create_labeled_scalar(rslt_structure=rslt_structure, rslt_type=rslt_type, origin=origin)
301
+ return labeled_scalar
302
+
303
+
304
+ class Filter(ConditionalOperator):
305
+ op = tokens.FILTER
306
+ propagate_attributes = False
307
+
308
+ @classmethod
309
+ def create_origin_expression(cls, selection, condition) -> str:
310
+ selection_name = getattr(selection, 'name', None) or getattr(selection, 'origin', None)
311
+ condition_name = getattr(condition, 'name', None) or getattr(condition, 'origin', None)
312
+
313
+ origin = f"{cls.op} ( {selection_name}, {condition_name} )"
314
+ return origin
315
+
316
+ @classmethod
317
+ def _check_filter_structures(cls, selection: RecordSet, condition: RecordSet) -> Structure:
318
+ origin: str = cls.create_origin_expression(selection, condition)
319
+ if cls._check_same_recordset_structures(selection, condition, origin):
320
+ return selection.structure
321
+
322
+ else:
323
+ condition_is_subset = cls.check_condition_is_subset(selection=selection, condition=condition)
324
+ if condition_is_subset:
325
+ return selection.structure
326
+ raise exceptions.SemanticError("4-6-0-2", operand=selection.name, condition=condition.name)
327
+
328
+ @classmethod
329
+ def validate(cls, selection, condition):
330
+
331
+ if isinstance(selection, RecordSet) and isinstance(condition, RecordSet):
332
+
333
+ if selection.has_only_global_components:
334
+ warnings.warn(
335
+ f"Performing a filter operation on recordset: {selection.name} which has only global key components")
336
+
337
+ check_condition_type = ScalarFactory().scalar_factory("Boolean")
338
+ condition_fact_component = condition.get_fact_component()
339
+ error_info = {
340
+ 'operand_name': condition.name,
341
+ 'op': cls.op
342
+ }
343
+ unary_implicit_type_promotion(condition_fact_component.type, check_condition_type, error_info=error_info)
344
+ result_structure = cls._check_filter_structures(selection, condition)
345
+
346
+ result_dataframe = None
347
+ if selection.records is not None and condition.records is not None:
348
+ result_dataframe = cls.generate_result_dataframe(selection, condition)
349
+
350
+ return cls.create_labeled_recordset(selection=selection, condition=condition,
351
+ result_structure=result_structure, result_dataframe=result_dataframe)
352
+
353
+ raise exceptions.SemanticError("4-6-3-1")
354
+
355
+ @classmethod
356
+ def create_labeled_recordset(cls, selection, condition, result_structure, result_dataframe):
357
+ origin: str = cls.create_origin_expression(selection, condition)
358
+ recordset = cls._create_labeled_recordset(
359
+ origin=origin, rslt_type=result_structure.components["f"].type,
360
+ rslt_structure=result_structure, result_dataframe=result_dataframe
361
+ )
362
+ return recordset
@@ -0,0 +1,101 @@
1
+ import math
2
+ import operator
3
+
4
+ from py_dpm.DataTypes.ScalarTypes import Number
5
+ from py_dpm.Operators import Operator
6
+ from py_dpm.Utils import tokens
7
+
8
+
9
+ class Unary(Operator.Unary):
10
+ op = None
11
+ type_to_check = Number
12
+ return_type = None
13
+ interval_allowed: bool = True
14
+
15
+
16
+ class UnPlus(Unary):
17
+ op = tokens.PLUS
18
+ py_op = operator.pos
19
+
20
+
21
+ class UnMinus(Unary):
22
+ op = tokens.MINUS
23
+ py_op = operator.neg
24
+
25
+
26
+ class AbsoluteValue(Unary):
27
+ op = tokens.ABS
28
+ py_op = operator.abs
29
+
30
+
31
+ class Exponential(Unary):
32
+ op = tokens.EXP
33
+ py_op = math.exp
34
+ return_type = Number
35
+ interval_allowed: bool = False
36
+
37
+
38
+ class NaturalLogarithm(Unary):
39
+ op = tokens.LN
40
+ py_op = math.log
41
+ return_type = Number
42
+ interval_allowed: bool = False
43
+
44
+
45
+ class SquareRoot(Unary):
46
+ op = tokens.SQRT
47
+ py_op = math.sqrt
48
+ return_type = Number
49
+ interval_allowed: bool = False
50
+
51
+
52
+ class NumericBinary(Operator.Binary):
53
+ type_to_check = Number
54
+ interval_allowed:bool = True
55
+
56
+
57
+ class BinPlus(NumericBinary):
58
+ op = tokens.PLUS
59
+ py_op = operator.add
60
+
61
+
62
+ class BinMinus(NumericBinary):
63
+ op = tokens.MINUS
64
+ py_op = operator.sub
65
+
66
+
67
+ class Mult(NumericBinary):
68
+ op = tokens.MULT
69
+ py_op = operator.mul
70
+
71
+
72
+ class Div(NumericBinary):
73
+ op = tokens.DIV
74
+ py_op = operator.truediv
75
+ return_type = Number
76
+
77
+
78
+ class Power(NumericBinary):
79
+ op = tokens.POW
80
+ py_op = operator.pow
81
+ interval_allowed:bool = False
82
+
83
+
84
+ class Logarithm(NumericBinary):
85
+ op = tokens.LOG
86
+ py_op = math.log
87
+ return_type = Number
88
+ interval_allowed:bool = False
89
+
90
+
91
+ class NumericComplex(Operator.Complex):
92
+ type_to_check = Number
93
+ interval_allowed:bool = True
94
+
95
+
96
+ class Max(NumericComplex):
97
+ op = tokens.MAX
98
+
99
+
100
+ class Min(NumericComplex):
101
+ op = tokens.MIN