pydpm_xl 0.1.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py_dpm/AST/ASTConstructor.py +503 -0
- py_dpm/AST/ASTObjects.py +827 -0
- py_dpm/AST/ASTTemplate.py +101 -0
- py_dpm/AST/ASTVisitor.py +13 -0
- py_dpm/AST/MLGeneration.py +588 -0
- py_dpm/AST/ModuleAnalyzer.py +79 -0
- py_dpm/AST/ModuleDependencies.py +203 -0
- py_dpm/AST/WhereClauseChecker.py +12 -0
- py_dpm/AST/__init__.py +0 -0
- py_dpm/AST/check_operands.py +302 -0
- py_dpm/DataTypes/ScalarTypes.py +324 -0
- py_dpm/DataTypes/TimeClasses.py +370 -0
- py_dpm/DataTypes/TypePromotion.py +195 -0
- py_dpm/DataTypes/__init__.py +0 -0
- py_dpm/Exceptions/__init__.py +0 -0
- py_dpm/Exceptions/exceptions.py +84 -0
- py_dpm/Exceptions/messages.py +114 -0
- py_dpm/OperationScopes/OperationScopeService.py +247 -0
- py_dpm/OperationScopes/__init__.py +0 -0
- py_dpm/Operators/AggregateOperators.py +138 -0
- py_dpm/Operators/BooleanOperators.py +30 -0
- py_dpm/Operators/ClauseOperators.py +159 -0
- py_dpm/Operators/ComparisonOperators.py +69 -0
- py_dpm/Operators/ConditionalOperators.py +362 -0
- py_dpm/Operators/NumericOperators.py +101 -0
- py_dpm/Operators/Operator.py +388 -0
- py_dpm/Operators/StringOperators.py +27 -0
- py_dpm/Operators/TimeOperators.py +53 -0
- py_dpm/Operators/__init__.py +0 -0
- py_dpm/Utils/ValidationsGenerationUtils.py +429 -0
- py_dpm/Utils/__init__.py +0 -0
- py_dpm/Utils/operands_mapping.py +73 -0
- py_dpm/Utils/operator_mapping.py +89 -0
- py_dpm/Utils/tokens.py +172 -0
- py_dpm/Utils/utils.py +2 -0
- py_dpm/ValidationsGeneration/PropertiesConstraintsProcessor.py +190 -0
- py_dpm/ValidationsGeneration/Utils.py +364 -0
- py_dpm/ValidationsGeneration/VariantsProcessor.py +265 -0
- py_dpm/ValidationsGeneration/__init__.py +0 -0
- py_dpm/ValidationsGeneration/auxiliary_functions.py +98 -0
- py_dpm/__init__.py +61 -0
- py_dpm/api/__init__.py +140 -0
- py_dpm/api/ast_generator.py +438 -0
- py_dpm/api/complete_ast.py +241 -0
- py_dpm/api/data_dictionary_validation.py +577 -0
- py_dpm/api/migration.py +77 -0
- py_dpm/api/semantic.py +224 -0
- py_dpm/api/syntax.py +182 -0
- py_dpm/client.py +106 -0
- py_dpm/data_handlers.py +99 -0
- py_dpm/db_utils.py +117 -0
- py_dpm/grammar/__init__.py +0 -0
- py_dpm/grammar/dist/__init__.py +0 -0
- py_dpm/grammar/dist/dpm_xlLexer.interp +428 -0
- py_dpm/grammar/dist/dpm_xlLexer.py +804 -0
- py_dpm/grammar/dist/dpm_xlLexer.tokens +106 -0
- py_dpm/grammar/dist/dpm_xlParser.interp +249 -0
- py_dpm/grammar/dist/dpm_xlParser.py +5224 -0
- py_dpm/grammar/dist/dpm_xlParser.tokens +106 -0
- py_dpm/grammar/dist/dpm_xlParserListener.py +742 -0
- py_dpm/grammar/dist/dpm_xlParserVisitor.py +419 -0
- py_dpm/grammar/dist/listeners.py +10 -0
- py_dpm/grammar/dpm_xlLexer.g4 +435 -0
- py_dpm/grammar/dpm_xlParser.g4 +260 -0
- py_dpm/migration.py +282 -0
- py_dpm/models.py +2139 -0
- py_dpm/semantics/DAG/DAGAnalyzer.py +158 -0
- py_dpm/semantics/DAG/__init__.py +0 -0
- py_dpm/semantics/SemanticAnalyzer.py +320 -0
- py_dpm/semantics/Symbols.py +223 -0
- py_dpm/semantics/__init__.py +0 -0
- py_dpm/utils/__init__.py +0 -0
- py_dpm/utils/ast_serialization.py +481 -0
- py_dpm/views/data_types.sql +12 -0
- py_dpm/views/datapoints.sql +65 -0
- py_dpm/views/hierarchy_operand_reference.sql +11 -0
- py_dpm/views/hierarchy_preconditions.sql +13 -0
- py_dpm/views/hierarchy_variables.sql +26 -0
- py_dpm/views/hierarchy_variables_context.sql +14 -0
- py_dpm/views/key_components.sql +18 -0
- py_dpm/views/module_from_table.sql +11 -0
- py_dpm/views/open_keys.sql +13 -0
- py_dpm/views/operation_info.sql +27 -0
- py_dpm/views/operation_list.sql +18 -0
- py_dpm/views/operations_versions_from_module_version.sql +30 -0
- py_dpm/views/precondition_info.sql +17 -0
- py_dpm/views/report_type_operand_reference_info.sql +18 -0
- py_dpm/views/subcategory_info.sql +17 -0
- py_dpm/views/table_info.sql +19 -0
- pydpm_xl-0.1.10.dist-info/LICENSE +674 -0
- pydpm_xl-0.1.10.dist-info/METADATA +50 -0
- pydpm_xl-0.1.10.dist-info/RECORD +94 -0
- pydpm_xl-0.1.10.dist-info/WHEEL +4 -0
- pydpm_xl-0.1.10.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
import warnings
|
|
2
|
+
|
|
3
|
+
import pandas as pd
|
|
4
|
+
|
|
5
|
+
from py_dpm.DataTypes.ScalarTypes import Integer, Number, ScalarFactory
|
|
6
|
+
from py_dpm.DataTypes.TypePromotion import unary_implicit_type_promotion
|
|
7
|
+
from py_dpm.Exceptions import exceptions
|
|
8
|
+
from py_dpm.Operators import Operator
|
|
9
|
+
from py_dpm.Utils import tokens
|
|
10
|
+
from py_dpm.semantics.Symbols import RecordSet
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class AggregateOperator(Operator.Unary):
|
|
14
|
+
"""
|
|
15
|
+
Aggregate operators involve all operators with a Recordset and a Grouping clause.
|
|
16
|
+
|
|
17
|
+
The grouping clause components must be present in the operand recordset.
|
|
18
|
+
"""
|
|
19
|
+
interval_allowed: bool = True
|
|
20
|
+
|
|
21
|
+
@staticmethod
|
|
22
|
+
def check_grouping(grouping_clause, key_components):
|
|
23
|
+
|
|
24
|
+
if grouping_clause and not all(item in key_components for item in grouping_clause):
|
|
25
|
+
not_present = [item for item in grouping_clause if item not in key_components]
|
|
26
|
+
raise exceptions.SemanticError("4-4-0-2", not_present=not_present)
|
|
27
|
+
|
|
28
|
+
@staticmethod
|
|
29
|
+
def format_structure_with_grouping(operand, grouping_clause):
|
|
30
|
+
structure = operand.structure
|
|
31
|
+
|
|
32
|
+
components_to_delete = []
|
|
33
|
+
|
|
34
|
+
for component in structure.components:
|
|
35
|
+
if component not in grouping_clause and component != 'refPeriod':
|
|
36
|
+
components_to_delete.append(component)
|
|
37
|
+
|
|
38
|
+
for item in components_to_delete:
|
|
39
|
+
del structure.components[item]
|
|
40
|
+
|
|
41
|
+
return structure
|
|
42
|
+
|
|
43
|
+
@staticmethod
|
|
44
|
+
def manage_records(records: pd.DataFrame, grouping_clause: list):
|
|
45
|
+
if records is None:
|
|
46
|
+
return records
|
|
47
|
+
columns_to_preserve = ['data_type']
|
|
48
|
+
for item in grouping_clause:
|
|
49
|
+
columns_to_preserve.append(item)
|
|
50
|
+
|
|
51
|
+
columns_to_delete = [item for item in records.columns if item not in columns_to_preserve]
|
|
52
|
+
|
|
53
|
+
for col in columns_to_delete:
|
|
54
|
+
del records[col]
|
|
55
|
+
|
|
56
|
+
if len(records.columns) == 1 and records.columns[0] == 'data_type':
|
|
57
|
+
return None
|
|
58
|
+
|
|
59
|
+
records = records.loc[records.astype(str).drop_duplicates(keep='first').index].reset_index(drop=True)
|
|
60
|
+
|
|
61
|
+
return records
|
|
62
|
+
|
|
63
|
+
@classmethod
|
|
64
|
+
def create_grouped_recordset(cls, operand: RecordSet, grouping_clause, final_type):
|
|
65
|
+
|
|
66
|
+
# Creating new structure with only the grouped components
|
|
67
|
+
rslt_structure = cls.format_structure_with_grouping(operand, grouping_clause)
|
|
68
|
+
origin = f"{cls.op}({operand.name} group by {', '.join(grouping_clause)})"
|
|
69
|
+
result_dataframe = cls.manage_records(operand.records, grouping_clause) if operand.records is not None else None
|
|
70
|
+
recordset = cls._create_labeled_recordset(origin=origin, rslt_type=final_type, rslt_structure=rslt_structure,
|
|
71
|
+
result_dataframe=result_dataframe)
|
|
72
|
+
return recordset
|
|
73
|
+
|
|
74
|
+
@classmethod
|
|
75
|
+
def validate(cls, operand, grouping_clause):
|
|
76
|
+
cls.check_operator_well_defined()
|
|
77
|
+
return_type = None if not cls.return_type else ScalarFactory().scalar_factory(cls.return_type.__name__)
|
|
78
|
+
op_type_to_check = None if not cls.type_to_check else ScalarFactory().scalar_factory(cls.type_to_check.__name__)
|
|
79
|
+
|
|
80
|
+
error_info ={
|
|
81
|
+
'operand_name': operand.name,
|
|
82
|
+
'op': cls.op
|
|
83
|
+
}
|
|
84
|
+
fact_component_type = operand.structure.components["f"].type
|
|
85
|
+
|
|
86
|
+
final_type = unary_implicit_type_promotion(
|
|
87
|
+
fact_component_type, op_type_to_check, return_type=return_type, interval_allowed=cls.interval_allowed, error_info=error_info)
|
|
88
|
+
if operand.records is not None:
|
|
89
|
+
operand.records['data_type'] = final_type
|
|
90
|
+
|
|
91
|
+
if grouping_clause is None:
|
|
92
|
+
return cls.create_labeled_scalar(operand, final_type)
|
|
93
|
+
|
|
94
|
+
key_components = operand.get_key_components_names()
|
|
95
|
+
cls.check_grouping(grouping_clause, key_components)
|
|
96
|
+
if len(grouping_clause) == len(key_components):
|
|
97
|
+
warnings.warn(f"Grouping by all the key components of the Recordset: {','.join(key_components)}")
|
|
98
|
+
|
|
99
|
+
return cls.create_grouped_recordset(operand, grouping_clause, final_type)
|
|
100
|
+
|
|
101
|
+
@classmethod
|
|
102
|
+
def generate_origin_expression(cls, operand, group_by=None):
|
|
103
|
+
operand_name = getattr(operand, 'name', None) or getattr(operand, 'origin', None)
|
|
104
|
+
if group_by:
|
|
105
|
+
return f"{cls.op}({operand_name} group by {group_by})"
|
|
106
|
+
else:
|
|
107
|
+
return f"{cls.op}({operand_name})"
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
class MaxAggr(AggregateOperator):
|
|
111
|
+
op = tokens.MAX_AGGR
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
class MinAggr(AggregateOperator):
|
|
115
|
+
op = tokens.MIN_AGGR
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
class Sum(AggregateOperator):
|
|
119
|
+
op = tokens.SUM
|
|
120
|
+
type_to_check = Number
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
class Count(AggregateOperator):
|
|
124
|
+
op = tokens.COUNT
|
|
125
|
+
type_to_check = None
|
|
126
|
+
return_type = Integer
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
class Avg(AggregateOperator):
|
|
130
|
+
op = tokens.AVG
|
|
131
|
+
type_to_check = Number
|
|
132
|
+
return_type = Number
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class Median(AggregateOperator):
|
|
136
|
+
op = tokens.MEDIAN
|
|
137
|
+
type_to_check = Number
|
|
138
|
+
return_type = Number
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import operator
|
|
2
|
+
|
|
3
|
+
from py_dpm.DataTypes.ScalarTypes import Boolean
|
|
4
|
+
from py_dpm.Operators import Operator
|
|
5
|
+
from py_dpm.Utils import tokens
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class Binary(Operator.Binary):
|
|
9
|
+
type_to_check = Boolean
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class And(Binary):
|
|
13
|
+
op = tokens.AND
|
|
14
|
+
py_op = operator.and_
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class Or(Binary):
|
|
18
|
+
op = tokens.OR
|
|
19
|
+
py_op = operator.or_
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class Xor(Binary):
|
|
23
|
+
op = tokens.XOR
|
|
24
|
+
py_op = operator.xor
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class Not(Operator.Unary):
|
|
28
|
+
type_to_check = Boolean
|
|
29
|
+
op = tokens.NOT
|
|
30
|
+
py_op = operator.not_
|
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
from typing import List
|
|
2
|
+
|
|
3
|
+
from py_dpm.DataTypes.ScalarTypes import ScalarFactory
|
|
4
|
+
from py_dpm.DataTypes.TypePromotion import unary_implicit_type_promotion
|
|
5
|
+
from py_dpm.Exceptions import exceptions
|
|
6
|
+
from py_dpm.Operators.ConditionalOperators import ConditionalOperator
|
|
7
|
+
from py_dpm.Operators.Operator import Binary, Operator
|
|
8
|
+
from py_dpm.Utils import tokens
|
|
9
|
+
from py_dpm.Utils.operands_mapping import generate_new_label, set_operand_label
|
|
10
|
+
from py_dpm.semantics.Symbols import FactComponent, RecordSet
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ClauseOperator(Operator):
|
|
14
|
+
op = None
|
|
15
|
+
check_new_names = False
|
|
16
|
+
precondition = False
|
|
17
|
+
propagate_attributes = True
|
|
18
|
+
|
|
19
|
+
@classmethod
|
|
20
|
+
def validate(cls, operand, key_names, new_names=None, condition=None):
|
|
21
|
+
if not isinstance(operand, RecordSet):
|
|
22
|
+
raise exceptions.SemanticError("4-5-0-2", operator=cls.op)
|
|
23
|
+
|
|
24
|
+
if condition:
|
|
25
|
+
cls._validate_condition(operand, condition)
|
|
26
|
+
|
|
27
|
+
if any(x in operand.get_standard_components() for x in key_names) or tokens.FACT in key_names:
|
|
28
|
+
raise exceptions.SemanticError("4-5-0-1", recordset=operand.name)
|
|
29
|
+
|
|
30
|
+
dpm_components = {**operand.get_dpm_components(), **operand.get_attributes()}
|
|
31
|
+
|
|
32
|
+
not_found = [name for name in key_names if name not in dpm_components]
|
|
33
|
+
if not_found:
|
|
34
|
+
raise exceptions.SemanticError("2-8", op=cls.op, dpm_keys=not_found, recordset=operand.name)
|
|
35
|
+
|
|
36
|
+
if cls.op == tokens.RENAME:
|
|
37
|
+
if len(new_names) > len(set(new_names)):
|
|
38
|
+
seen = set()
|
|
39
|
+
duplicated = list(set(x for x in new_names if x in seen or seen.add(x)))
|
|
40
|
+
raise exceptions.SemanticError("4-5-1-2", duplicated=duplicated)
|
|
41
|
+
existing_components = [name for name in new_names if name in dpm_components]
|
|
42
|
+
if existing_components:
|
|
43
|
+
raise exceptions.SemanticError("4-5-1-1", names=existing_components, recordset=operand.name)
|
|
44
|
+
|
|
45
|
+
for name, new_name in list(zip(key_names, new_names)):
|
|
46
|
+
if new_name in (
|
|
47
|
+
tokens.ROW, tokens.COLUMN, tokens.SHEET, tokens.FACT, tokens.INDEX_X, tokens.INDEX_Y,
|
|
48
|
+
tokens.INDEX_Z):
|
|
49
|
+
raise exceptions.SemanticError("4-5-1-3", recordset=operand.name)
|
|
50
|
+
elif name not in operand.structure.components:
|
|
51
|
+
raise exceptions.SemanticError("4-5-1-4", component=name, recordset=operand.name)
|
|
52
|
+
cls.rename_component(operand=operand, name=name, new_name=new_name)
|
|
53
|
+
|
|
54
|
+
if cls.op == tokens.WHERE:
|
|
55
|
+
origin = cls.generate_origin_expression(operand, condition)
|
|
56
|
+
elif cls.op == tokens.RENAME:
|
|
57
|
+
origin = cls.generate_origin_expression(operand, key_names, new_names)
|
|
58
|
+
else:
|
|
59
|
+
origin = cls.generate_origin_expression(operand, key_names[0])
|
|
60
|
+
|
|
61
|
+
return cls.generate_result_structure(operand, key_names, condition, origin)
|
|
62
|
+
|
|
63
|
+
@classmethod
|
|
64
|
+
def _validate_condition(cls, operand: RecordSet, condition):
|
|
65
|
+
boolean_type = ScalarFactory().scalar_factory('Boolean')
|
|
66
|
+
if isinstance(condition, RecordSet):
|
|
67
|
+
fact_component = condition.get_fact_component()
|
|
68
|
+
unary_implicit_type_promotion(fact_component.type, boolean_type)
|
|
69
|
+
cls._check_structures(operand, condition)
|
|
70
|
+
else:
|
|
71
|
+
unary_implicit_type_promotion(condition.type, boolean_type)
|
|
72
|
+
|
|
73
|
+
@classmethod
|
|
74
|
+
def _check_structures(cls, operand: RecordSet, condition: RecordSet):
|
|
75
|
+
operand_structure = operand.structure
|
|
76
|
+
condition_structure = condition.structure
|
|
77
|
+
if len(operand_structure.get_key_components()) == len(condition.get_key_components()):
|
|
78
|
+
origin = f"{operand.origin}[where {condition.origin}]"
|
|
79
|
+
# For better error management
|
|
80
|
+
class_check = Binary()
|
|
81
|
+
class_check.op = cls.op
|
|
82
|
+
class_check.check_same_components(operand_structure, condition_structure, origin)
|
|
83
|
+
else:
|
|
84
|
+
is_subset = ConditionalOperator.check_condition_is_subset(operand, condition)
|
|
85
|
+
if not is_subset:
|
|
86
|
+
raise exceptions.SemanticError("4-5-2-2", operand=operand.name, condition=condition.name)
|
|
87
|
+
|
|
88
|
+
@classmethod
|
|
89
|
+
def rename_component(cls, operand: RecordSet, name: str, new_name: str):
|
|
90
|
+
component = operand.structure.components[name]
|
|
91
|
+
del operand.structure.components[name]
|
|
92
|
+
component.name = new_name
|
|
93
|
+
operand.structure.components[new_name] = component
|
|
94
|
+
|
|
95
|
+
@classmethod
|
|
96
|
+
def generate_result_structure(cls, operand: RecordSet, key_names: List[str], condition, origin):
|
|
97
|
+
|
|
98
|
+
new_label = generate_new_label()
|
|
99
|
+
operand.structure.replace_components_parent(new_label)
|
|
100
|
+
|
|
101
|
+
if cls.op == tokens.GET:
|
|
102
|
+
selected_component = key_names[0]
|
|
103
|
+
component = operand.structure.components[selected_component]
|
|
104
|
+
del operand.structure.components[tokens.FACT]
|
|
105
|
+
fact_component = FactComponent(type_=component.type, parent=component.parent)
|
|
106
|
+
operand.structure.components[tokens.FACT] = fact_component
|
|
107
|
+
|
|
108
|
+
if not cls.propagate_attributes:
|
|
109
|
+
operand.structure.remove_attributes()
|
|
110
|
+
|
|
111
|
+
result = RecordSet(structure=operand.structure, name=new_label, origin=origin)
|
|
112
|
+
if condition and isinstance(condition, RecordSet):
|
|
113
|
+
result_dataframe = ConditionalOperator.generate_result_dataframe(operand, condition)
|
|
114
|
+
result.records = result_dataframe
|
|
115
|
+
else:
|
|
116
|
+
result.records = operand.records
|
|
117
|
+
set_operand_label(result.name, result.origin)
|
|
118
|
+
return result
|
|
119
|
+
|
|
120
|
+
@classmethod
|
|
121
|
+
def generate_origin_expression(cls, *args) -> str:
|
|
122
|
+
pass
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
class Where(ClauseOperator):
|
|
126
|
+
op = tokens.WHERE
|
|
127
|
+
|
|
128
|
+
@classmethod
|
|
129
|
+
def validate_condition_type(cls, condition):
|
|
130
|
+
boolean_type = ScalarFactory().scalar_factory('Boolean')
|
|
131
|
+
error_info = {
|
|
132
|
+
'operand_name': condition.name,
|
|
133
|
+
'op': cls.op
|
|
134
|
+
}
|
|
135
|
+
unary_implicit_type_promotion(condition.type, boolean_type, error_info=error_info)
|
|
136
|
+
|
|
137
|
+
@classmethod
|
|
138
|
+
def generate_origin_expression(cls, operand, condition):
|
|
139
|
+
operand_name = getattr(operand, 'name', None) or getattr(operand, 'origin', None)
|
|
140
|
+
condition_name = getattr(condition, 'name', None) or getattr(condition, 'origin', None)
|
|
141
|
+
return f"{operand_name}[ where {condition_name}]"
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
class Rename(ClauseOperator):
|
|
145
|
+
op = tokens.RENAME
|
|
146
|
+
|
|
147
|
+
@classmethod
|
|
148
|
+
def generate_origin_expression(cls, operand, old_names, new_names):
|
|
149
|
+
origin_nodes = [f"{old_names[i]} to {new_names[i]}" for i in range(len(old_names))]
|
|
150
|
+
return f"{operand.name} [ rename " + ', '.join(origin_nodes) + ']'
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
class Get(ClauseOperator):
|
|
154
|
+
op = tokens.GET
|
|
155
|
+
propagate_attributes = False
|
|
156
|
+
|
|
157
|
+
@classmethod
|
|
158
|
+
def generate_origin_expression(cls, operand, component) -> str:
|
|
159
|
+
return f"{operand.name} [ get {component} ]"
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import operator
|
|
2
|
+
import re
|
|
3
|
+
|
|
4
|
+
from py_dpm.DataTypes.ScalarTypes import Boolean, String
|
|
5
|
+
from py_dpm.Operators import Operator
|
|
6
|
+
from py_dpm.Utils import tokens
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class IsNull(Operator.Unary):
|
|
10
|
+
op = tokens.ISNULL
|
|
11
|
+
py_op = operator.truth
|
|
12
|
+
do_not_check_with_return_type = True
|
|
13
|
+
return_type = Boolean
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class Binary(Operator.Binary):
|
|
17
|
+
do_not_check_with_return_type = True
|
|
18
|
+
return_type = Boolean
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class Equal(Binary):
|
|
22
|
+
op = tokens.EQ
|
|
23
|
+
py_op = operator.eq
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class NotEqual(Binary):
|
|
27
|
+
op = tokens.NEQ
|
|
28
|
+
py_op = operator.ne
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class Greater(Binary):
|
|
32
|
+
op = tokens.GT
|
|
33
|
+
py_op = operator.gt
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class GreaterEqual(Binary):
|
|
37
|
+
op = tokens.GTE
|
|
38
|
+
py_op = operator.ge
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class Less(Binary):
|
|
42
|
+
op = tokens.LT
|
|
43
|
+
py_op = operator.lt
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class LessEqual(Binary):
|
|
47
|
+
op = tokens.LTE
|
|
48
|
+
py_op = operator.le
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class In(Binary):
|
|
52
|
+
op = tokens.IN
|
|
53
|
+
|
|
54
|
+
@classmethod
|
|
55
|
+
def py_op(cls, x, y):
|
|
56
|
+
return operator.contains(y, x)
|
|
57
|
+
|
|
58
|
+
py_op = py_op
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class Match(Binary):
|
|
62
|
+
op = tokens.MATCH
|
|
63
|
+
type_to_check = String
|
|
64
|
+
|
|
65
|
+
@classmethod
|
|
66
|
+
def py_op(cls, x, y):
|
|
67
|
+
return bool(re.fullmatch(y, x))
|
|
68
|
+
|
|
69
|
+
py_op = py_op
|