pydpm_xl 0.1.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py_dpm/AST/ASTConstructor.py +503 -0
- py_dpm/AST/ASTObjects.py +827 -0
- py_dpm/AST/ASTTemplate.py +101 -0
- py_dpm/AST/ASTVisitor.py +13 -0
- py_dpm/AST/MLGeneration.py +588 -0
- py_dpm/AST/ModuleAnalyzer.py +79 -0
- py_dpm/AST/ModuleDependencies.py +203 -0
- py_dpm/AST/WhereClauseChecker.py +12 -0
- py_dpm/AST/__init__.py +0 -0
- py_dpm/AST/check_operands.py +302 -0
- py_dpm/DataTypes/ScalarTypes.py +324 -0
- py_dpm/DataTypes/TimeClasses.py +370 -0
- py_dpm/DataTypes/TypePromotion.py +195 -0
- py_dpm/DataTypes/__init__.py +0 -0
- py_dpm/Exceptions/__init__.py +0 -0
- py_dpm/Exceptions/exceptions.py +84 -0
- py_dpm/Exceptions/messages.py +114 -0
- py_dpm/OperationScopes/OperationScopeService.py +247 -0
- py_dpm/OperationScopes/__init__.py +0 -0
- py_dpm/Operators/AggregateOperators.py +138 -0
- py_dpm/Operators/BooleanOperators.py +30 -0
- py_dpm/Operators/ClauseOperators.py +159 -0
- py_dpm/Operators/ComparisonOperators.py +69 -0
- py_dpm/Operators/ConditionalOperators.py +362 -0
- py_dpm/Operators/NumericOperators.py +101 -0
- py_dpm/Operators/Operator.py +388 -0
- py_dpm/Operators/StringOperators.py +27 -0
- py_dpm/Operators/TimeOperators.py +53 -0
- py_dpm/Operators/__init__.py +0 -0
- py_dpm/Utils/ValidationsGenerationUtils.py +429 -0
- py_dpm/Utils/__init__.py +0 -0
- py_dpm/Utils/operands_mapping.py +73 -0
- py_dpm/Utils/operator_mapping.py +89 -0
- py_dpm/Utils/tokens.py +172 -0
- py_dpm/Utils/utils.py +2 -0
- py_dpm/ValidationsGeneration/PropertiesConstraintsProcessor.py +190 -0
- py_dpm/ValidationsGeneration/Utils.py +364 -0
- py_dpm/ValidationsGeneration/VariantsProcessor.py +265 -0
- py_dpm/ValidationsGeneration/__init__.py +0 -0
- py_dpm/ValidationsGeneration/auxiliary_functions.py +98 -0
- py_dpm/__init__.py +61 -0
- py_dpm/api/__init__.py +140 -0
- py_dpm/api/ast_generator.py +438 -0
- py_dpm/api/complete_ast.py +241 -0
- py_dpm/api/data_dictionary_validation.py +577 -0
- py_dpm/api/migration.py +77 -0
- py_dpm/api/semantic.py +224 -0
- py_dpm/api/syntax.py +182 -0
- py_dpm/client.py +106 -0
- py_dpm/data_handlers.py +99 -0
- py_dpm/db_utils.py +117 -0
- py_dpm/grammar/__init__.py +0 -0
- py_dpm/grammar/dist/__init__.py +0 -0
- py_dpm/grammar/dist/dpm_xlLexer.interp +428 -0
- py_dpm/grammar/dist/dpm_xlLexer.py +804 -0
- py_dpm/grammar/dist/dpm_xlLexer.tokens +106 -0
- py_dpm/grammar/dist/dpm_xlParser.interp +249 -0
- py_dpm/grammar/dist/dpm_xlParser.py +5224 -0
- py_dpm/grammar/dist/dpm_xlParser.tokens +106 -0
- py_dpm/grammar/dist/dpm_xlParserListener.py +742 -0
- py_dpm/grammar/dist/dpm_xlParserVisitor.py +419 -0
- py_dpm/grammar/dist/listeners.py +10 -0
- py_dpm/grammar/dpm_xlLexer.g4 +435 -0
- py_dpm/grammar/dpm_xlParser.g4 +260 -0
- py_dpm/migration.py +282 -0
- py_dpm/models.py +2139 -0
- py_dpm/semantics/DAG/DAGAnalyzer.py +158 -0
- py_dpm/semantics/DAG/__init__.py +0 -0
- py_dpm/semantics/SemanticAnalyzer.py +320 -0
- py_dpm/semantics/Symbols.py +223 -0
- py_dpm/semantics/__init__.py +0 -0
- py_dpm/utils/__init__.py +0 -0
- py_dpm/utils/ast_serialization.py +481 -0
- py_dpm/views/data_types.sql +12 -0
- py_dpm/views/datapoints.sql +65 -0
- py_dpm/views/hierarchy_operand_reference.sql +11 -0
- py_dpm/views/hierarchy_preconditions.sql +13 -0
- py_dpm/views/hierarchy_variables.sql +26 -0
- py_dpm/views/hierarchy_variables_context.sql +14 -0
- py_dpm/views/key_components.sql +18 -0
- py_dpm/views/module_from_table.sql +11 -0
- py_dpm/views/open_keys.sql +13 -0
- py_dpm/views/operation_info.sql +27 -0
- py_dpm/views/operation_list.sql +18 -0
- py_dpm/views/operations_versions_from_module_version.sql +30 -0
- py_dpm/views/precondition_info.sql +17 -0
- py_dpm/views/report_type_operand_reference_info.sql +18 -0
- py_dpm/views/subcategory_info.sql +17 -0
- py_dpm/views/table_info.sql +19 -0
- pydpm_xl-0.1.10.dist-info/LICENSE +674 -0
- pydpm_xl-0.1.10.dist-info/METADATA +50 -0
- pydpm_xl-0.1.10.dist-info/RECORD +94 -0
- pydpm_xl-0.1.10.dist-info/WHEEL +4 -0
- pydpm_xl-0.1.10.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
import warnings
|
|
2
|
+
|
|
3
|
+
from py_dpm.DataTypes.ScalarTypes import Boolean, Date, Duration, Integer, Item, Mixed, Null, Number, \
|
|
4
|
+
ScalarType, String, Subcategory, TimeInterval, TimePeriod
|
|
5
|
+
from py_dpm.Exceptions.exceptions import SemanticError
|
|
6
|
+
|
|
7
|
+
implicit_type_promotion_dict = {
|
|
8
|
+
String: {String},
|
|
9
|
+
Number: {String, Number},
|
|
10
|
+
Integer: {String, Number, Integer},
|
|
11
|
+
TimeInterval: {String, TimeInterval},
|
|
12
|
+
Date: {String, TimeInterval, Date},
|
|
13
|
+
TimePeriod: {String, TimeInterval, TimePeriod},
|
|
14
|
+
Duration: {String, Duration},
|
|
15
|
+
Boolean: {String, Boolean},
|
|
16
|
+
Item: {String, Item},
|
|
17
|
+
Subcategory: {String, Subcategory},
|
|
18
|
+
Null: {String, Number, Integer, TimeInterval, Date, TimePeriod, Duration, Boolean, Item, Subcategory, Null}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def binary_implicit_type_promotion(
|
|
23
|
+
left: ScalarType, right: ScalarType, op_type_to_check: ScalarType = None, return_type: ScalarType = None,
|
|
24
|
+
interval_allowed: bool = True, error_info: dict = None):
|
|
25
|
+
"""
|
|
26
|
+
"""
|
|
27
|
+
left_implicities = implicit_type_promotion_dict[left.__class__]
|
|
28
|
+
right_implicities = implicit_type_promotion_dict[right.__class__]
|
|
29
|
+
|
|
30
|
+
if left and right:
|
|
31
|
+
warning_raising = not (isinstance(left, type(right)) or isinstance(right, type(left)))
|
|
32
|
+
else:
|
|
33
|
+
warning_raising = False
|
|
34
|
+
|
|
35
|
+
if op_type_to_check:
|
|
36
|
+
|
|
37
|
+
if op_type_to_check.is_included(
|
|
38
|
+
left_implicities.intersection(right_implicities)): # general case and date->str and boolean-> str in the str operator
|
|
39
|
+
|
|
40
|
+
if warning_raising:
|
|
41
|
+
warnings.warn(
|
|
42
|
+
f"Implicit promotion between {left} and {right} and op_type={op_type_to_check}.")
|
|
43
|
+
if return_type:
|
|
44
|
+
binary_check_interval(result_operand=return_type, left_operand=left, right_operand=right, op_type_to_check=op_type_to_check,
|
|
45
|
+
return_type=return_type, interval_allowed=interval_allowed, error_info=error_info)
|
|
46
|
+
return return_type
|
|
47
|
+
|
|
48
|
+
if not left.is_null_type() and left.is_included(right_implicities):
|
|
49
|
+
binary_check_interval(result_operand=left, left_operand=left, right_operand=right, op_type_to_check=op_type_to_check,
|
|
50
|
+
return_type=return_type, interval_allowed=interval_allowed, error_info=error_info)
|
|
51
|
+
return left
|
|
52
|
+
elif not right.is_null_type() and right.is_included(left_implicities):
|
|
53
|
+
binary_check_interval(result_operand=right, left_operand=left, right_operand=right, op_type_to_check=op_type_to_check,
|
|
54
|
+
return_type=return_type, interval_allowed=interval_allowed, error_info=error_info)
|
|
55
|
+
return right
|
|
56
|
+
else:
|
|
57
|
+
if isinstance(op_type_to_check, Number):
|
|
58
|
+
binary_check_interval(result_operand=op_type_to_check, left_operand=left, right_operand=right,
|
|
59
|
+
op_type_to_check=op_type_to_check, return_type=return_type, interval_allowed=interval_allowed,
|
|
60
|
+
error_info=error_info)
|
|
61
|
+
return op_type_to_check
|
|
62
|
+
|
|
63
|
+
else:
|
|
64
|
+
origin = None if error_info is None else "operator={operator} {left} {right}".format(left=error_info['left_name'],
|
|
65
|
+
operator=error_info['op'],
|
|
66
|
+
right=error_info['right_name'])
|
|
67
|
+
raise SemanticError("3-2", type_1=left, type_2=right, type_op=op_type_to_check, origin=origin)
|
|
68
|
+
else:
|
|
69
|
+
if warning_raising:
|
|
70
|
+
warnings.warn(f"Implicit promotion between {left} and {right}.")
|
|
71
|
+
if return_type and (left.is_included(right_implicities) or right.is_included(left_implicities)):
|
|
72
|
+
return return_type
|
|
73
|
+
elif left.is_included(right_implicities):
|
|
74
|
+
return left
|
|
75
|
+
elif right.is_included(left_implicities):
|
|
76
|
+
return right
|
|
77
|
+
else:
|
|
78
|
+
origin = None if error_info is None else "operator={operator} {left} {right}".format(left=error_info['left_name'],
|
|
79
|
+
operator=error_info['op'],
|
|
80
|
+
right=error_info['right_name'])
|
|
81
|
+
raise SemanticError("3-1", type_1=left, type_2=right, origin=origin)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def binary_implicit_type_promotion_with_mixed_types(
|
|
85
|
+
result_dataframe, left_type, right_type, op_type_to_check=None, return_type=None, interval_allowed: bool = False, error_info=None):
|
|
86
|
+
"""
|
|
87
|
+
"""
|
|
88
|
+
|
|
89
|
+
if result_dataframe.empty:
|
|
90
|
+
return Mixed(), result_dataframe
|
|
91
|
+
|
|
92
|
+
# If there is not a data_type column, result_dataframe is the result of merging two recordsets
|
|
93
|
+
if 'data_type' not in result_dataframe.columns:
|
|
94
|
+
result_dataframe['data_type'] = result_dataframe.apply(
|
|
95
|
+
lambda x: binary_implicit_type_promotion(x['data_type_left'], x['data_type_right'], op_type_to_check,
|
|
96
|
+
return_type, interval_allowed, error_info), axis=1)
|
|
97
|
+
|
|
98
|
+
result_dataframe = result_dataframe.drop(columns=['data_type_left', 'data_type_right'])
|
|
99
|
+
|
|
100
|
+
elif isinstance(left_type, Mixed):
|
|
101
|
+
result_dataframe['data_type'] = result_dataframe['data_type'].apply(
|
|
102
|
+
lambda x: binary_implicit_type_promotion(x, right_type, op_type_to_check, return_type, interval_allowed, error_info))
|
|
103
|
+
|
|
104
|
+
elif isinstance(right_type, Mixed):
|
|
105
|
+
result_dataframe['data_type'] = result_dataframe['data_type'].apply(
|
|
106
|
+
lambda x: binary_implicit_type_promotion(left_type, x, op_type_to_check, return_type, interval_allowed, error_info))
|
|
107
|
+
|
|
108
|
+
if return_type:
|
|
109
|
+
return return_type, result_dataframe
|
|
110
|
+
return Mixed(), result_dataframe
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def unary_implicit_type_promotion(operand: ScalarType, op_type_to_check: ScalarType = None, return_type: ScalarType = None,
|
|
114
|
+
interval_allowed: bool = True, error_info: dict = None):
|
|
115
|
+
"""
|
|
116
|
+
"""
|
|
117
|
+
|
|
118
|
+
operand_implicities = implicit_type_promotion_dict[operand.__class__]
|
|
119
|
+
|
|
120
|
+
unary_check_interval(operand=operand, op_type_to_check=op_type_to_check, return_type=return_type, interval_allowed=interval_allowed,
|
|
121
|
+
error_info=error_info)
|
|
122
|
+
|
|
123
|
+
if op_type_to_check:
|
|
124
|
+
if not op_type_to_check.is_included(operand_implicities):
|
|
125
|
+
origin = None if error_info is None else "{}({})".format(error_info['op'], error_info['operand_name'])
|
|
126
|
+
raise SemanticError("3-3", type_1=operand, type_op=op_type_to_check, origin=origin)
|
|
127
|
+
|
|
128
|
+
if return_type:
|
|
129
|
+
return return_type
|
|
130
|
+
if op_type_to_check and not operand.is_subtype(op_type_to_check):
|
|
131
|
+
return op_type_to_check
|
|
132
|
+
return operand
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def unary_implicit_type_promotion_with_mixed_types(operand_dataframe, op_type_to_check=None, return_type=None, interval_allowed=None,
|
|
136
|
+
error_info=None):
|
|
137
|
+
"""
|
|
138
|
+
"""
|
|
139
|
+
|
|
140
|
+
if operand_dataframe.empty:
|
|
141
|
+
return Mixed(), operand_dataframe
|
|
142
|
+
|
|
143
|
+
operand_dataframe['data_type'] = operand_dataframe['data_type'].apply(
|
|
144
|
+
lambda x: unary_implicit_type_promotion(x, op_type_to_check, return_type, interval_allowed=interval_allowed, error_info=error_info))
|
|
145
|
+
|
|
146
|
+
if return_type:
|
|
147
|
+
return return_type, operand_dataframe
|
|
148
|
+
return Mixed(), operand_dataframe
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def check_operator(return_type: ScalarType = None, op_check_type: ScalarType = None):
|
|
152
|
+
"""
|
|
153
|
+
"""
|
|
154
|
+
if return_type is None or op_check_type is None:
|
|
155
|
+
return True
|
|
156
|
+
|
|
157
|
+
op_check_type_implicities = implicit_type_promotion_dict[op_check_type.__class__]
|
|
158
|
+
|
|
159
|
+
if return_type.is_included(op_check_type_implicities):
|
|
160
|
+
return True
|
|
161
|
+
|
|
162
|
+
return False
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def unary_check_interval(operand: ScalarType, op_type_to_check: ScalarType = None, return_type: ScalarType = None,
|
|
166
|
+
interval_allowed: bool = False, error_info: dict = None):
|
|
167
|
+
if interval_allowed and getattr(operand, "interval", None):
|
|
168
|
+
if return_type and isinstance(return_type, Integer):
|
|
169
|
+
return None
|
|
170
|
+
if return_type and isinstance(return_type, Number):
|
|
171
|
+
return_type.set_interval(operand.interval)
|
|
172
|
+
if op_type_to_check and isinstance(op_type_to_check, Number):
|
|
173
|
+
op_type_to_check.set_interval(operand.interval)
|
|
174
|
+
elif not interval_allowed and getattr(operand, "interval", None):
|
|
175
|
+
origin = None if error_info is None else "{}({})".format(error_info['op'], error_info['operand_name'])
|
|
176
|
+
raise SemanticError("3-5", origin=origin)
|
|
177
|
+
else:
|
|
178
|
+
return None
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def binary_check_interval(result_operand: ScalarType = None, left_operand: ScalarType = None, right_operand: ScalarType = None,
|
|
182
|
+
op_type_to_check: ScalarType = None, return_type: ScalarType = None, interval_allowed: bool = False,
|
|
183
|
+
error_info: dict = None):
|
|
184
|
+
"""
|
|
185
|
+
"""
|
|
186
|
+
if op_type_to_check is None:
|
|
187
|
+
return None
|
|
188
|
+
if return_type and isinstance(return_type, Integer):
|
|
189
|
+
return None
|
|
190
|
+
if isinstance(result_operand, Number):
|
|
191
|
+
interval = getattr(left_operand, "interval", None) or getattr(right_operand, "interval", None)
|
|
192
|
+
if interval and not interval_allowed:
|
|
193
|
+
origin = None if error_info is None else "{}({})".format(error_info['op'], error_info['operand_name'])
|
|
194
|
+
raise SemanticError("3-5", origin=origin)
|
|
195
|
+
result_operand.set_interval(interval)
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
from py_dpm.Exceptions.messages import centralised_messages
|
|
2
|
+
from py_dpm.Utils.operands_mapping import LabelHandler, get_type_from_label
|
|
3
|
+
|
|
4
|
+
"""
|
|
5
|
+
Exceptions management.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class DrrException(Exception):
|
|
10
|
+
"""Base class for exceptions in this module."""
|
|
11
|
+
|
|
12
|
+
def __init__(self, message, lino=None, colno=None, code=None):
|
|
13
|
+
if code is not None:
|
|
14
|
+
super().__init__(message, code)
|
|
15
|
+
else:
|
|
16
|
+
super().__init__(message)
|
|
17
|
+
self.lino = lino
|
|
18
|
+
self.colno = colno
|
|
19
|
+
|
|
20
|
+
@property
|
|
21
|
+
def pos(self):
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
"""
|
|
25
|
+
return [self.lino, self.colno]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class SyntaxError(DrrException):
|
|
29
|
+
|
|
30
|
+
def __init__(self, code, **kwargs):
|
|
31
|
+
message = centralised_messages[code].format(**kwargs)
|
|
32
|
+
|
|
33
|
+
super().__init__(message, None, None, code)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def gather_expression(operand):
|
|
37
|
+
operands_labels = LabelHandler().operands_labels
|
|
38
|
+
|
|
39
|
+
expression = operand
|
|
40
|
+
for key in operands_labels.__reversed__():
|
|
41
|
+
if key in expression:
|
|
42
|
+
expression = expression.replace(key, operands_labels[key])
|
|
43
|
+
|
|
44
|
+
if expression is None:
|
|
45
|
+
return expression
|
|
46
|
+
return expression
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class SemanticError(DrrException):
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
def __init__(self, code, **kwargs):
|
|
55
|
+
operands_labels = LabelHandler().operands_labels
|
|
56
|
+
message = centralised_messages[code].format(**kwargs)
|
|
57
|
+
for operand in reversed(operands_labels):
|
|
58
|
+
if operand in message:
|
|
59
|
+
generated = gather_expression(operand)
|
|
60
|
+
not_single = True if get_type_from_label(operand) == 'not_single' else False
|
|
61
|
+
if not_single:
|
|
62
|
+
message = message.replace(operand, f"GENERATED:' {generated} '")
|
|
63
|
+
else:
|
|
64
|
+
message = message.replace(operand, generated)
|
|
65
|
+
|
|
66
|
+
super().__init__(message, None, None, code)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class DataTypeError(Exception):
|
|
70
|
+
"""
|
|
71
|
+
|
|
72
|
+
"""
|
|
73
|
+
|
|
74
|
+
def __init__(self, value, dataType):
|
|
75
|
+
super().__init__("Invalid Scalar value '{}' for data type {}.".format(
|
|
76
|
+
value, dataType
|
|
77
|
+
))
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class ScriptingError(DrrException):
|
|
81
|
+
|
|
82
|
+
def __init__(self, code, **kwargs):
|
|
83
|
+
message = centralised_messages[code].format(**kwargs)
|
|
84
|
+
super().__init__(message, None, None, code)
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Exceptions.messages.py
|
|
3
|
+
======================
|
|
4
|
+
|
|
5
|
+
Description
|
|
6
|
+
-----------
|
|
7
|
+
All exceptions possibles.
|
|
8
|
+
"""
|
|
9
|
+
centralised_messages = {
|
|
10
|
+
# Syntax errors
|
|
11
|
+
"0-1": "For match operator, provided regex has syntax errors: {message}",
|
|
12
|
+
"0-2": "Cannot specify {argument} more than one time in the same cell reference.",
|
|
13
|
+
"0-3": "Cannot use null literal, you must use the isnull function",
|
|
14
|
+
# Variable not founded
|
|
15
|
+
"1-1": "The following items were not found: {items}.",
|
|
16
|
+
"1-2": "Cell expression {cell_expression} was not found.",
|
|
17
|
+
"1-3": "Variable {variable} was not found.",
|
|
18
|
+
"1-4": "Table {table} was not found.",
|
|
19
|
+
"1-5": "The following open keys were not found: {open_keys}.",
|
|
20
|
+
"1-6": "Table group {table_group} not found.",
|
|
21
|
+
"1-7": "Property signature {property_code} associated with the property constraint cannot be found.",
|
|
22
|
+
"1-8": "The following operations {operations} were not found.",
|
|
23
|
+
"1-9": "Previous operation: {operation_code} was not found",
|
|
24
|
+
"1-10": "Can't be analyzed an expression with table groups. Table reference {table} is defined as table group.", # TODO: only used in semantic not in webservice
|
|
25
|
+
"1-11": "Category with code: {category_code} was not found",
|
|
26
|
+
"1-12": "Subcategory with code: {subcategory_code} was not found",
|
|
27
|
+
"1-13": "No module versions found for table versions: {table_version_ids}.",
|
|
28
|
+
"1-14": "No module versions found for preconditions items: {precondition_items}.",
|
|
29
|
+
"1-15": "Subcategory with id: {subcategory_id} was not found",
|
|
30
|
+
"1-16": "Category with id: {category_id} was not found",
|
|
31
|
+
"1-17": "Grey cells {cell_expression} were found.",
|
|
32
|
+
"1-18": "Found explicit s* on the expression, but no sheets for the cells were found for the "
|
|
33
|
+
"operand(s).",
|
|
34
|
+
"1-19": "Found explicit r* on the expression, but no rows for the cells were found for the "
|
|
35
|
+
"operand(s).",
|
|
36
|
+
"1-20": "Missing explicit {header} on the expression for table {table}",
|
|
37
|
+
# Structures
|
|
38
|
+
"2-1": "If preconditions are involved the result has to be a boolean scalar.",
|
|
39
|
+
"2-2": "This operation is not allowed, because there is no match between DPM components. Operator={op} left={left}, right={right}.",
|
|
40
|
+
"2-3": "Structures are different (or types on key components) for this operator {op} between {structure_1} and {structure_2}. "
|
|
41
|
+
"Please check {origin}.",
|
|
42
|
+
"2-4": "For this operator {op} this component {name} is not present in both structures. "
|
|
43
|
+
"Please check {origin}.",
|
|
44
|
+
"2-5": "For this component {name} types are different for this operator {op} found {type_1} and {type_2}."
|
|
45
|
+
"Please check {origin}.",
|
|
46
|
+
"2-6": "In RecordSet: {name}, standard keys: {keys} with values {values} are duplicated.",
|
|
47
|
+
"2-7": "Component creation for {component_name} was not possible.",
|
|
48
|
+
"2-8": "At op {op}: Key components {dpm_keys} not found on recordset {recordset}.",
|
|
49
|
+
"2-9": "At op {op}: Found different number of headers on left and right hand side.",
|
|
50
|
+
# Data types
|
|
51
|
+
"3-1": "Implicit cast is not available between {type_1} and {type_2}."
|
|
52
|
+
"Please check {origin}.",
|
|
53
|
+
"3-2": "Types are wrong, type_op_1={type_1},type_op_2={type_2}, operator works for {type_op}."
|
|
54
|
+
"Please check {origin}.",
|
|
55
|
+
"3-3": "Types are wrong, type_op_1={type_1}, operator works for {type_op}."
|
|
56
|
+
"Please check {origin}.",
|
|
57
|
+
"3-4": "Interval can't be used for this operand_type={operand_type}",
|
|
58
|
+
"3-5": "Interval can't be used for this operator."
|
|
59
|
+
"Please check {origin}.",
|
|
60
|
+
"3-6": "Invalid default type, default is a {default_type} but it has to be a {expected_type}.",
|
|
61
|
+
# Operators
|
|
62
|
+
# - Aggregate Operators
|
|
63
|
+
"4-4-0-1": "Only a Recordset is allowed in the Aggregation operator {op}.",
|
|
64
|
+
"4-4-0-2": "Grouping components {not_present} are not present in key components",
|
|
65
|
+
"4-4-0-3": "Mixed type can't be used in aggregation. "
|
|
66
|
+
"Please check {origin}.",
|
|
67
|
+
# - Clause Operators
|
|
68
|
+
"4-5-0-1": "On recordset {recordset}: clause operators can't be used with standard key components or fact component.",
|
|
69
|
+
"4-5-0-2": "In {operator} operator only operands of recordset type are supported.", #
|
|
70
|
+
# -- Rename
|
|
71
|
+
"4-5-1-1": "For rename operator component names: {names} already exists on recordset {recordset}.",
|
|
72
|
+
"4-5-1-2": "Duplicated new names after rename operator: {duplicated}.",
|
|
73
|
+
"4-5-1-3": "For rename operator, new names can't be standard key names or indexes names(x,y,z) on recordset: {recordset}",
|
|
74
|
+
"4-5-1-4": "For rename operator, key component {component} not found on recordset: {recordset} because it has already been renamed.",
|
|
75
|
+
# -- Where
|
|
76
|
+
"4-5-2-1": "Invalid clause for where operator, at least one dpm key component of recordset {recordset} must be used.",
|
|
77
|
+
"4-5-2-2": "For where operator, operand {operand} and condition {condition} must have the same structure or condition must be a subset of selection.",
|
|
78
|
+
# - Conditional Operators
|
|
79
|
+
"4-6-0-1": "For Conditional operators DPM components have to have the same records.",
|
|
80
|
+
"4-6-0-2": "For conditional operators, condition {condition} and operand/selection {operand} must have the same structure or condition must be a subset",
|
|
81
|
+
# -- if then else
|
|
82
|
+
"4-6-1-1": "Error for the condition in if then else operator",
|
|
83
|
+
"4-6-1-2": "For if then else operator, if the condition is a scalar and else operand is not provided, then operand can't be a recordset.",
|
|
84
|
+
"4-6-1-3": "For if then else operator, then and else have to be both recordset or both scalars, if the condition is a scalar.",
|
|
85
|
+
# -- nvl
|
|
86
|
+
"4-6-2-1": "Invalid input structures for Nvl operator, right op has to be scalar too.",
|
|
87
|
+
# -- filter
|
|
88
|
+
"4-6-3-1": "Filter operator only supports recordset as selection and condition",
|
|
89
|
+
# - Time Operators
|
|
90
|
+
# -- time_shift (only exists this operator in time operators)
|
|
91
|
+
"4-7-1": "Only a Recordset or a scalar is allowed for {op} operator.",
|
|
92
|
+
"4-7-2": "For time_shift operator, only var component is allowed when operand is a recordset",
|
|
93
|
+
"4-7-3": "For time_shift operator, var component must be specified when operand is a recordset",
|
|
94
|
+
# Generation of validations
|
|
95
|
+
"5-0-1": "This expression doesn't generate any correct validation.",
|
|
96
|
+
"5-0-2": "This expression can't be used for the generation of children.",
|
|
97
|
+
# - Properties constraints
|
|
98
|
+
"5-1-1": "No properties constraints were found.",
|
|
99
|
+
"5-1-2": "In an expression defined on properties constraints only there can be one property constraint.",
|
|
100
|
+
"5-1-3": "Operator '{operator}' not allowed in an expression defined on properties constraints.",
|
|
101
|
+
"5-1-4": "Property with signature {ref} was not found.",
|
|
102
|
+
# - Variants
|
|
103
|
+
"5-2-1": "The supplied expression does not contain any table group.",
|
|
104
|
+
# - Sign validations
|
|
105
|
+
"5-3-1": "Table version id {table_version_id} does not exist or does not have associated cells.",
|
|
106
|
+
# Scripting
|
|
107
|
+
"6-1": "Overwriting a variable is not allowed, trying it with {variable}.",
|
|
108
|
+
"6-2": "References to operations are not allowed in single expressions, trying it with {operation_code}.",
|
|
109
|
+
"6-3": "Preconditions are not allowed in scripting, trying it with {precondition}.",
|
|
110
|
+
"6-4": "Circular reference between operations {op1} and {op2}. Try removing or changing these references.",
|
|
111
|
+
# Other errors
|
|
112
|
+
"7-1": "Found a Property Reference in a regular validation. Please check Operation Source to use Property Constraint.",
|
|
113
|
+
"7-2": "Found a Variable Reference, please check expression"
|
|
114
|
+
}
|
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from itertools import product
|
|
4
|
+
|
|
5
|
+
import numpy
|
|
6
|
+
import pandas as pd
|
|
7
|
+
|
|
8
|
+
from py_dpm.Exceptions import exceptions
|
|
9
|
+
from py_dpm.models import ModuleVersion, OperationScope, OperationScopeComposition
|
|
10
|
+
from py_dpm.Utils.tokens import VARIABLE_VID, WARNING_SEVERITY
|
|
11
|
+
from py_dpm.db_utils import get_session
|
|
12
|
+
|
|
13
|
+
FROM_REFERENCE_DATE = "FromReferenceDate"
|
|
14
|
+
TO_REFERENCE_DATE = "ToReferenceDate"
|
|
15
|
+
MODULE_VID = "ModuleVID"
|
|
16
|
+
TABLE_VID = "TableVID"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _check_if_existing(composition_modules, existing_scopes):
|
|
20
|
+
existing_scopes = existing_scopes[existing_scopes[MODULE_VID].isin(composition_modules)][MODULE_VID].tolist()
|
|
21
|
+
if len(existing_scopes) and set(composition_modules) == set(existing_scopes):
|
|
22
|
+
return True
|
|
23
|
+
return False
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class OperationScopeService:
|
|
27
|
+
"""
|
|
28
|
+
Class to calculate OperationScope and OperationScopeComposition tables for an operation version
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
def __init__(self, operation_version_id, session=None):
|
|
32
|
+
self.operation_version_id = operation_version_id
|
|
33
|
+
self.session = session or get_session()
|
|
34
|
+
self.module_vids = []
|
|
35
|
+
self.current_date = datetime.today().date()
|
|
36
|
+
|
|
37
|
+
self.operation_scopes = []
|
|
38
|
+
|
|
39
|
+
def calculate_operation_scope(self, tables_vids: list, precondition_items: list, only_last_release=True):
|
|
40
|
+
"""
|
|
41
|
+
Calculate OperationScope and OperationScopeComposition tables for an operation version, taking as input
|
|
42
|
+
a list with the operation table version ids in order to calculate the module versions involved in the operation
|
|
43
|
+
:param tables_vids: List with table version ids
|
|
44
|
+
:param precondition_items: List with precondition codes
|
|
45
|
+
:return two list with existing and new scopes
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
modules_info_dataframe = self.extract_module_info(
|
|
49
|
+
tables_vids=tables_vids, precondition_items=precondition_items, only_last_release=only_last_release) # We extract all the releases from the database
|
|
50
|
+
if modules_info_dataframe is None:
|
|
51
|
+
return [], []
|
|
52
|
+
|
|
53
|
+
modules_vids = modules_info_dataframe[MODULE_VID].unique().tolist()
|
|
54
|
+
if len(modules_info_dataframe) == 1:
|
|
55
|
+
module_vid = modules_vids[0]
|
|
56
|
+
from_date = modules_info_dataframe['FromReferenceDate'].values[0]
|
|
57
|
+
operation_scope = self.create_operation_scope(from_date)
|
|
58
|
+
self.create_operation_scope_composition(operation_scope=operation_scope, module_vid=module_vid)
|
|
59
|
+
else:
|
|
60
|
+
intra_modules = []
|
|
61
|
+
cross_modules = {}
|
|
62
|
+
unique_operands_number = len(tables_vids) + len(precondition_items)
|
|
63
|
+
|
|
64
|
+
for module_vid, group_df in modules_info_dataframe.groupby(MODULE_VID):
|
|
65
|
+
vids = group_df[VARIABLE_VID].unique().tolist()
|
|
66
|
+
if len(vids) == unique_operands_number:
|
|
67
|
+
intra_modules.append(module_vid)
|
|
68
|
+
else:
|
|
69
|
+
for table_vid in vids:
|
|
70
|
+
if table_vid not in cross_modules:
|
|
71
|
+
cross_modules[table_vid] = []
|
|
72
|
+
cross_modules[table_vid].append(module_vid)
|
|
73
|
+
|
|
74
|
+
if len(intra_modules):
|
|
75
|
+
self.process_repeated(intra_modules, modules_info_dataframe)
|
|
76
|
+
|
|
77
|
+
if cross_modules:
|
|
78
|
+
if set(cross_modules.keys())==set(tables_vids):
|
|
79
|
+
self.process_cross_module(cross_modules=cross_modules, modules_dataframe=modules_info_dataframe)
|
|
80
|
+
else:
|
|
81
|
+
# add the missing table_vids to cross_modules
|
|
82
|
+
for table_vid in tables_vids:
|
|
83
|
+
if table_vid not in cross_modules:
|
|
84
|
+
cross_modules[table_vid] = modules_info_dataframe[modules_info_dataframe[VARIABLE_VID]==table_vid][MODULE_VID].unique().tolist()
|
|
85
|
+
self.process_cross_module(cross_modules=cross_modules, modules_dataframe=modules_info_dataframe)
|
|
86
|
+
|
|
87
|
+
return self.get_scopes_with_status()
|
|
88
|
+
|
|
89
|
+
def extract_module_info(self, tables_vids, precondition_items, only_last_release=True):
|
|
90
|
+
"""
|
|
91
|
+
Extracts modules information of tables version ids and preconditions from database and
|
|
92
|
+
joins them in a single dataframe
|
|
93
|
+
:param tables_vids: List with table version ids
|
|
94
|
+
:param precondition_items: List with precondition codes
|
|
95
|
+
:return two list with existing and new scopes
|
|
96
|
+
"""
|
|
97
|
+
modules_info_lst = []
|
|
98
|
+
modules_info_dataframe = None
|
|
99
|
+
if len(tables_vids):
|
|
100
|
+
tables_modules_info_dataframe = ModuleVersion.get_from_tables_vids(
|
|
101
|
+
session=self.session, tables_vids=tables_vids, only_last_release=only_last_release)
|
|
102
|
+
if tables_modules_info_dataframe.empty:
|
|
103
|
+
missing_table_modules = tables_vids
|
|
104
|
+
else:
|
|
105
|
+
modules_tables = tables_modules_info_dataframe[TABLE_VID].tolist()
|
|
106
|
+
missing_table_modules = set(tables_vids).difference(set(modules_tables))
|
|
107
|
+
|
|
108
|
+
if len(missing_table_modules):
|
|
109
|
+
raise exceptions.SemanticError("1-13", table_version_ids=missing_table_modules)
|
|
110
|
+
|
|
111
|
+
tables_modules_info_dataframe.rename(columns={TABLE_VID: VARIABLE_VID}, inplace=True)
|
|
112
|
+
modules_info_lst.append(tables_modules_info_dataframe)
|
|
113
|
+
|
|
114
|
+
if len(precondition_items):
|
|
115
|
+
preconditions_modules_info_dataframe = ModuleVersion.get_precondition_module_versions(session=self.session,
|
|
116
|
+
precondition_items=precondition_items)
|
|
117
|
+
|
|
118
|
+
if preconditions_modules_info_dataframe.empty:
|
|
119
|
+
missing_precondition_modules = precondition_items
|
|
120
|
+
else:
|
|
121
|
+
modules_preconditions = preconditions_modules_info_dataframe['Code'].tolist()
|
|
122
|
+
missing_precondition_modules = set(precondition_items).difference(set(modules_preconditions))
|
|
123
|
+
|
|
124
|
+
if missing_precondition_modules:
|
|
125
|
+
raise exceptions.SemanticError("1-14", precondition_items=missing_precondition_modules)
|
|
126
|
+
|
|
127
|
+
preconditions_modules_info_dataframe.rename(columns={'VariableVID': VARIABLE_VID}, inplace=True)
|
|
128
|
+
modules_info_lst.append(preconditions_modules_info_dataframe)
|
|
129
|
+
|
|
130
|
+
if len(modules_info_lst):
|
|
131
|
+
modules_info_dataframe = pd.concat(modules_info_lst)
|
|
132
|
+
return modules_info_dataframe
|
|
133
|
+
|
|
134
|
+
def process_repeated(self, modules_vids, modules_info):
|
|
135
|
+
"""
|
|
136
|
+
Method to calculate OperationScope and OperationScopeComposition tables for repeated operations
|
|
137
|
+
:param modules_vids: list with module version ids
|
|
138
|
+
"""
|
|
139
|
+
for module_vid in modules_vids:
|
|
140
|
+
from_date = modules_info[modules_info['ModuleVID'] == module_vid]['FromReferenceDate'].values[0]
|
|
141
|
+
operation_scope = self.create_operation_scope(from_date)
|
|
142
|
+
self.create_operation_scope_composition(operation_scope=operation_scope, module_vid=module_vid)
|
|
143
|
+
|
|
144
|
+
def process_cross_module(self, cross_modules, modules_dataframe):
|
|
145
|
+
"""
|
|
146
|
+
Method to calculate OperationScope and OperationScopeComposition tables for a cross module operation
|
|
147
|
+
:param cross_modules: dictionary with table version ids as key and its module version ids as values
|
|
148
|
+
:param modules_dataframe: dataframe with modules data
|
|
149
|
+
"""
|
|
150
|
+
modules_dataframe[FROM_REFERENCE_DATE] = pd.to_datetime(modules_dataframe[FROM_REFERENCE_DATE])
|
|
151
|
+
modules_dataframe[TO_REFERENCE_DATE] = pd.to_datetime(modules_dataframe[TO_REFERENCE_DATE])
|
|
152
|
+
|
|
153
|
+
values = cross_modules.values()
|
|
154
|
+
for combination in product(*values):
|
|
155
|
+
combination_info = modules_dataframe[modules_dataframe[MODULE_VID].isin(combination)]
|
|
156
|
+
from_dates = combination_info[FROM_REFERENCE_DATE].values
|
|
157
|
+
to_dates = combination_info[TO_REFERENCE_DATE].values
|
|
158
|
+
ref_from_date = from_dates.max()
|
|
159
|
+
ref_to_date = to_dates.min()
|
|
160
|
+
|
|
161
|
+
is_valid_combination = True
|
|
162
|
+
for from_date, to_date in zip(from_dates, to_dates):
|
|
163
|
+
if to_date < ref_from_date or ((not pd.isna(ref_to_date)) and from_date > ref_to_date):
|
|
164
|
+
is_valid_combination = False
|
|
165
|
+
|
|
166
|
+
if is_valid_combination:
|
|
167
|
+
from_submission_date = ref_from_date
|
|
168
|
+
else:
|
|
169
|
+
from_submission_date = None
|
|
170
|
+
operation_scope = self.create_operation_scope(from_submission_date)
|
|
171
|
+
combination = set(combination)
|
|
172
|
+
for module in combination:
|
|
173
|
+
self.create_operation_scope_composition(operation_scope=operation_scope, module_vid=module)
|
|
174
|
+
|
|
175
|
+
def create_operation_scope(self, submission_date):
|
|
176
|
+
"""
|
|
177
|
+
Method to populate OperationScope table
|
|
178
|
+
"""
|
|
179
|
+
if not pd.isnull(submission_date):
|
|
180
|
+
if isinstance(submission_date, numpy.datetime64):
|
|
181
|
+
submission_date = str(submission_date).split('T')[0]
|
|
182
|
+
if isinstance(submission_date, str):
|
|
183
|
+
submission_date = datetime.strptime(submission_date, '%Y-%m-%d').date()
|
|
184
|
+
elif isinstance(submission_date, datetime):
|
|
185
|
+
submission_date = submission_date.date()
|
|
186
|
+
else:
|
|
187
|
+
submission_date = None
|
|
188
|
+
operation_scope = OperationScope(
|
|
189
|
+
OperationVID=self.operation_version_id,
|
|
190
|
+
IsActive=True,
|
|
191
|
+
Severity=WARNING_SEVERITY,
|
|
192
|
+
FromSubmissionDate=submission_date,
|
|
193
|
+
RowGUID=uuid.uuid4()
|
|
194
|
+
)
|
|
195
|
+
self.session.add(operation_scope)
|
|
196
|
+
return operation_scope
|
|
197
|
+
|
|
198
|
+
def create_operation_scope_composition(self, operation_scope, module_vid):
|
|
199
|
+
"""
|
|
200
|
+
Method to populate OperationScopeComposition table
|
|
201
|
+
:param operation_scope: Operation scope data
|
|
202
|
+
:param module_vid: Module version id
|
|
203
|
+
"""
|
|
204
|
+
operation_scope_composition = OperationScopeComposition(
|
|
205
|
+
operation_scope=operation_scope,
|
|
206
|
+
ModuleVID=module_vid,
|
|
207
|
+
RowGUID=uuid.uuid4()
|
|
208
|
+
)
|
|
209
|
+
self.session.add(operation_scope_composition)
|
|
210
|
+
|
|
211
|
+
def get_scopes_with_status(self):
|
|
212
|
+
"""
|
|
213
|
+
Method that checks if operation scope exists in database and classifies it based on whether it exists or not
|
|
214
|
+
:return two list with existing and new scopes
|
|
215
|
+
"""
|
|
216
|
+
existing_scopes = []
|
|
217
|
+
new_scopes = []
|
|
218
|
+
operation_scopes = [o for o in self.session.new if isinstance(o, OperationScope)]
|
|
219
|
+
database_scopes = OperationScopeComposition.get_from_operation_version_id(self.session,
|
|
220
|
+
self.operation_version_id)
|
|
221
|
+
if database_scopes.empty:
|
|
222
|
+
new_scopes = operation_scopes
|
|
223
|
+
else:
|
|
224
|
+
for scope in operation_scopes:
|
|
225
|
+
composition_modules = [scope_comp.ModuleVID for scope_comp in scope.composition]
|
|
226
|
+
result = database_scopes.groupby('OperationScopeID').filter(
|
|
227
|
+
lambda x: _check_if_existing(composition_modules, x))
|
|
228
|
+
|
|
229
|
+
if not result.empty:
|
|
230
|
+
existing_scopes.append(scope)
|
|
231
|
+
else:
|
|
232
|
+
# if the module is closed and the operation is new, we haven't to create a new scope wih the old module
|
|
233
|
+
# because we have the new module
|
|
234
|
+
existing_previous = False
|
|
235
|
+
for vid in composition_modules:
|
|
236
|
+
if id not in existing_scopes:
|
|
237
|
+
aux = ModuleVersion.get_module_version_by_vid(session=self.session, vid=vid)
|
|
238
|
+
if aux.empty:
|
|
239
|
+
continue
|
|
240
|
+
if aux['EndReleaseID'][0] is not None:
|
|
241
|
+
existing_previous = True
|
|
242
|
+
break
|
|
243
|
+
|
|
244
|
+
if not existing_previous:
|
|
245
|
+
new_scopes.append(scope)
|
|
246
|
+
|
|
247
|
+
return existing_scopes, new_scopes
|
|
File without changes
|