pydpm_xl 0.1.39rc32__py3-none-any.whl → 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py_dpm/__init__.py +1 -1
- py_dpm/api/__init__.py +58 -189
- py_dpm/api/dpm/__init__.py +20 -0
- py_dpm/api/{data_dictionary.py → dpm/data_dictionary.py} +903 -984
- py_dpm/api/dpm/explorer.py +236 -0
- py_dpm/api/dpm/hierarchical_queries.py +142 -0
- py_dpm/api/{migration.py → dpm/migration.py} +16 -19
- py_dpm/api/{operation_scopes.py → dpm/operation_scopes.py} +319 -267
- py_dpm/api/dpm_xl/__init__.py +25 -0
- py_dpm/api/{ast_generator.py → dpm_xl/ast_generator.py} +3 -3
- py_dpm/api/{complete_ast.py → dpm_xl/complete_ast.py} +186 -284
- py_dpm/api/dpm_xl/semantic.py +358 -0
- py_dpm/api/{syntax.py → dpm_xl/syntax.py} +6 -5
- py_dpm/api/explorer.py +4 -0
- py_dpm/api/semantic.py +30 -306
- py_dpm/cli/__init__.py +9 -0
- py_dpm/{client.py → cli/main.py} +12 -10
- py_dpm/dpm/__init__.py +11 -0
- py_dpm/{models.py → dpm/models.py} +112 -88
- py_dpm/dpm/queries/base.py +100 -0
- py_dpm/dpm/queries/basic_objects.py +33 -0
- py_dpm/dpm/queries/explorer_queries.py +352 -0
- py_dpm/dpm/queries/filters.py +139 -0
- py_dpm/dpm/queries/glossary.py +45 -0
- py_dpm/dpm/queries/hierarchical_queries.py +838 -0
- py_dpm/dpm/queries/tables.py +133 -0
- py_dpm/dpm/utils.py +356 -0
- py_dpm/dpm_xl/__init__.py +8 -0
- py_dpm/dpm_xl/ast/__init__.py +14 -0
- py_dpm/{AST/ASTConstructor.py → dpm_xl/ast/constructor.py} +6 -6
- py_dpm/{AST/MLGeneration.py → dpm_xl/ast/ml_generation.py} +137 -87
- py_dpm/{AST/ModuleAnalyzer.py → dpm_xl/ast/module_analyzer.py} +7 -7
- py_dpm/{AST/ModuleDependencies.py → dpm_xl/ast/module_dependencies.py} +56 -41
- py_dpm/{AST/ASTObjects.py → dpm_xl/ast/nodes.py} +1 -1
- py_dpm/{AST/check_operands.py → dpm_xl/ast/operands.py} +16 -13
- py_dpm/{AST/ASTTemplate.py → dpm_xl/ast/template.py} +2 -2
- py_dpm/{AST/WhereClauseChecker.py → dpm_xl/ast/where_clause.py} +2 -2
- py_dpm/dpm_xl/grammar/__init__.py +18 -0
- py_dpm/dpm_xl/operators/__init__.py +19 -0
- py_dpm/{Operators/AggregateOperators.py → dpm_xl/operators/aggregate.py} +7 -7
- py_dpm/{Operators/NumericOperators.py → dpm_xl/operators/arithmetic.py} +6 -6
- py_dpm/{Operators/Operator.py → dpm_xl/operators/base.py} +5 -5
- py_dpm/{Operators/BooleanOperators.py → dpm_xl/operators/boolean.py} +5 -5
- py_dpm/{Operators/ClauseOperators.py → dpm_xl/operators/clause.py} +8 -8
- py_dpm/{Operators/ComparisonOperators.py → dpm_xl/operators/comparison.py} +5 -5
- py_dpm/{Operators/ConditionalOperators.py → dpm_xl/operators/conditional.py} +7 -7
- py_dpm/{Operators/StringOperators.py → dpm_xl/operators/string.py} +5 -5
- py_dpm/{Operators/TimeOperators.py → dpm_xl/operators/time.py} +6 -6
- py_dpm/{semantics/SemanticAnalyzer.py → dpm_xl/semantic_analyzer.py} +168 -68
- py_dpm/{semantics/Symbols.py → dpm_xl/symbols.py} +3 -3
- py_dpm/dpm_xl/types/__init__.py +13 -0
- py_dpm/{DataTypes/TypePromotion.py → dpm_xl/types/promotion.py} +2 -2
- py_dpm/{DataTypes/ScalarTypes.py → dpm_xl/types/scalar.py} +2 -2
- py_dpm/dpm_xl/utils/__init__.py +14 -0
- py_dpm/{data_handlers.py → dpm_xl/utils/data_handlers.py} +2 -2
- py_dpm/{Utils → dpm_xl/utils}/operands_mapping.py +1 -1
- py_dpm/{Utils → dpm_xl/utils}/operator_mapping.py +8 -8
- py_dpm/{OperationScopes/OperationScopeService.py → dpm_xl/utils/scopes_calculator.py} +148 -58
- py_dpm/{Utils/ast_serialization.py → dpm_xl/utils/serialization.py} +3 -4
- py_dpm/dpm_xl/validation/__init__.py +12 -0
- py_dpm/{Utils/ValidationsGenerationUtils.py → dpm_xl/validation/generation_utils.py} +2 -3
- py_dpm/{ValidationsGeneration/PropertiesConstraintsProcessor.py → dpm_xl/validation/property_constraints.py} +56 -21
- py_dpm/{ValidationsGeneration/auxiliary_functions.py → dpm_xl/validation/utils.py} +2 -2
- py_dpm/{ValidationsGeneration/VariantsProcessor.py → dpm_xl/validation/variants.py} +149 -55
- py_dpm/exceptions/__init__.py +23 -0
- py_dpm/{Exceptions → exceptions}/exceptions.py +7 -2
- pydpm_xl-0.2.1.dist-info/METADATA +278 -0
- pydpm_xl-0.2.1.dist-info/RECORD +88 -0
- pydpm_xl-0.2.1.dist-info/entry_points.txt +2 -0
- py_dpm/Exceptions/__init__.py +0 -0
- py_dpm/OperationScopes/__init__.py +0 -0
- py_dpm/Operators/__init__.py +0 -0
- py_dpm/Utils/__init__.py +0 -0
- py_dpm/Utils/utils.py +0 -2
- py_dpm/ValidationsGeneration/Utils.py +0 -364
- py_dpm/ValidationsGeneration/__init__.py +0 -0
- py_dpm/api/data_dictionary_validation.py +0 -614
- py_dpm/db_utils.py +0 -221
- py_dpm/grammar/__init__.py +0 -0
- py_dpm/grammar/dist/__init__.py +0 -0
- py_dpm/grammar/dpm_xlLexer.g4 +0 -437
- py_dpm/grammar/dpm_xlParser.g4 +0 -263
- py_dpm/semantics/DAG/DAGAnalyzer.py +0 -158
- py_dpm/semantics/DAG/__init__.py +0 -0
- py_dpm/semantics/__init__.py +0 -0
- py_dpm/views/data_types.sql +0 -12
- py_dpm/views/datapoints.sql +0 -65
- py_dpm/views/hierarchy_operand_reference.sql +0 -11
- py_dpm/views/hierarchy_preconditions.sql +0 -13
- py_dpm/views/hierarchy_variables.sql +0 -26
- py_dpm/views/hierarchy_variables_context.sql +0 -14
- py_dpm/views/key_components.sql +0 -18
- py_dpm/views/module_from_table.sql +0 -11
- py_dpm/views/open_keys.sql +0 -13
- py_dpm/views/operation_info.sql +0 -27
- py_dpm/views/operation_list.sql +0 -18
- py_dpm/views/operations_versions_from_module_version.sql +0 -30
- py_dpm/views/precondition_info.sql +0 -17
- py_dpm/views/report_type_operand_reference_info.sql +0 -18
- py_dpm/views/subcategory_info.sql +0 -17
- py_dpm/views/table_info.sql +0 -19
- pydpm_xl-0.1.39rc32.dist-info/METADATA +0 -53
- pydpm_xl-0.1.39rc32.dist-info/RECORD +0 -96
- pydpm_xl-0.1.39rc32.dist-info/entry_points.txt +0 -2
- /py_dpm/{AST → cli/commands}/__init__.py +0 -0
- /py_dpm/{migration.py → dpm/migration.py} +0 -0
- /py_dpm/{AST/ASTVisitor.py → dpm_xl/ast/visitor.py} +0 -0
- /py_dpm/{DataTypes → dpm_xl/grammar/generated}/__init__.py +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlLexer.interp +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlLexer.py +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlLexer.tokens +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParser.interp +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParser.py +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParser.tokens +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParserListener.py +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParserVisitor.py +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/listeners.py +0 -0
- /py_dpm/{DataTypes/TimeClasses.py → dpm_xl/types/time.py} +0 -0
- /py_dpm/{Utils → dpm_xl/utils}/tokens.py +0 -0
- /py_dpm/{Exceptions → exceptions}/messages.py +0 -0
- {pydpm_xl-0.1.39rc32.dist-info → pydpm_xl-0.2.1.dist-info}/WHEEL +0 -0
- {pydpm_xl-0.1.39rc32.dist-info → pydpm_xl-0.2.1.dist-info}/licenses/LICENSE +0 -0
- {pydpm_xl-0.1.39rc32.dist-info → pydpm_xl-0.2.1.dist-info}/top_level.txt +0 -0
|
@@ -5,10 +5,10 @@ from itertools import product
|
|
|
5
5
|
import numpy
|
|
6
6
|
import pandas as pd
|
|
7
7
|
|
|
8
|
-
from py_dpm.
|
|
9
|
-
from py_dpm.models import ModuleVersion, OperationScope, OperationScopeComposition
|
|
10
|
-
from py_dpm.
|
|
11
|
-
from py_dpm.
|
|
8
|
+
from py_dpm.exceptions import exceptions
|
|
9
|
+
from py_dpm.dpm.models import ModuleVersion, OperationScope, OperationScopeComposition
|
|
10
|
+
from py_dpm.dpm_xl.utils.tokens import VARIABLE_VID, WARNING_SEVERITY
|
|
11
|
+
from py_dpm.dpm.utils import get_session
|
|
12
12
|
|
|
13
13
|
FROM_REFERENCE_DATE = "FromReferenceDate"
|
|
14
14
|
TO_REFERENCE_DATE = "ToReferenceDate"
|
|
@@ -17,7 +17,9 @@ TABLE_VID = "TableVID"
|
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
def _check_if_existing(composition_modules, existing_scopes):
|
|
20
|
-
existing_scopes = existing_scopes[
|
|
20
|
+
existing_scopes = existing_scopes[
|
|
21
|
+
existing_scopes[MODULE_VID].isin(composition_modules)
|
|
22
|
+
][MODULE_VID].tolist()
|
|
21
23
|
if len(existing_scopes) and set(composition_modules) == set(existing_scopes):
|
|
22
24
|
return True
|
|
23
25
|
return False
|
|
@@ -36,7 +38,13 @@ class OperationScopeService:
|
|
|
36
38
|
|
|
37
39
|
self.operation_scopes = []
|
|
38
40
|
|
|
39
|
-
def calculate_operation_scope(
|
|
41
|
+
def calculate_operation_scope(
|
|
42
|
+
self,
|
|
43
|
+
tables_vids: list,
|
|
44
|
+
precondition_items: list,
|
|
45
|
+
release_id=None,
|
|
46
|
+
table_codes: list = None,
|
|
47
|
+
):
|
|
40
48
|
"""
|
|
41
49
|
Calculate OperationScope and OperationScopeComposition tables for an operation version, taking as input
|
|
42
50
|
a list with the operation table version ids in order to calculate the module versions involved in the operation
|
|
@@ -51,16 +59,22 @@ class OperationScopeService:
|
|
|
51
59
|
release_id = ModuleVersion.get_last_release(self.session)
|
|
52
60
|
|
|
53
61
|
modules_info_dataframe = self.extract_module_info(
|
|
54
|
-
tables_vids=tables_vids,
|
|
62
|
+
tables_vids=tables_vids,
|
|
63
|
+
precondition_items=precondition_items,
|
|
64
|
+
release_id=release_id,
|
|
65
|
+
table_codes=table_codes,
|
|
66
|
+
) # We extract all the releases from the database
|
|
55
67
|
if modules_info_dataframe is None:
|
|
56
68
|
return [], []
|
|
57
69
|
|
|
58
70
|
modules_vids = modules_info_dataframe[MODULE_VID].unique().tolist()
|
|
59
71
|
if len(modules_info_dataframe) == 1:
|
|
60
72
|
module_vid = modules_vids[0]
|
|
61
|
-
from_date = modules_info_dataframe[
|
|
73
|
+
from_date = modules_info_dataframe["FromReferenceDate"].values[0]
|
|
62
74
|
operation_scope = self.create_operation_scope(from_date)
|
|
63
|
-
self.create_operation_scope_composition(
|
|
75
|
+
self.create_operation_scope_composition(
|
|
76
|
+
operation_scope=operation_scope, module_vid=module_vid
|
|
77
|
+
)
|
|
64
78
|
else:
|
|
65
79
|
intra_modules = []
|
|
66
80
|
cross_modules = {}
|
|
@@ -70,20 +84,32 @@ class OperationScopeService:
|
|
|
70
84
|
unique_operands_number = len(table_codes) + len(precondition_items)
|
|
71
85
|
|
|
72
86
|
# Categorize modules by lifecycle: starting vs ending in this release
|
|
73
|
-
starting_modules =
|
|
74
|
-
|
|
87
|
+
starting_modules = (
|
|
88
|
+
{}
|
|
89
|
+
) # Modules that START in this release (replacements)
|
|
90
|
+
ending_modules = {} # Modules that END in this release (being replaced)
|
|
75
91
|
|
|
76
92
|
for module_vid, group_df in modules_info_dataframe.groupby(MODULE_VID):
|
|
77
|
-
table_codes_in_module =
|
|
93
|
+
table_codes_in_module = (
|
|
94
|
+
group_df["TableCode"].unique().tolist()
|
|
95
|
+
if "TableCode" in group_df.columns
|
|
96
|
+
else []
|
|
97
|
+
)
|
|
78
98
|
|
|
79
99
|
# Get module lifecycle info
|
|
80
|
-
start_release =
|
|
81
|
-
|
|
100
|
+
start_release = (
|
|
101
|
+
group_df["StartReleaseID"].values[0]
|
|
102
|
+
if "StartReleaseID" in group_df.columns
|
|
103
|
+
else None
|
|
104
|
+
)
|
|
105
|
+
end_release = group_df["EndReleaseID"].values[0]
|
|
82
106
|
|
|
83
107
|
# Determine if this is a "new" module starting in this release
|
|
84
108
|
# or an "old" module ending in this release
|
|
85
|
-
is_starting =
|
|
86
|
-
is_ending =
|
|
109
|
+
is_starting = start_release == release_id
|
|
110
|
+
is_ending = end_release == release_id or end_release == float(
|
|
111
|
+
release_id
|
|
112
|
+
)
|
|
87
113
|
|
|
88
114
|
if len(table_codes_in_module) == unique_operands_number:
|
|
89
115
|
# Intra-module: include ALL modules active in the release
|
|
@@ -91,7 +117,9 @@ class OperationScopeService:
|
|
|
91
117
|
intra_modules.append(module_vid)
|
|
92
118
|
else:
|
|
93
119
|
# For cross-module, group by table code AND lifecycle stage
|
|
94
|
-
target_dict =
|
|
120
|
+
target_dict = (
|
|
121
|
+
starting_modules if is_starting else ending_modules
|
|
122
|
+
)
|
|
95
123
|
for table_code in table_codes_in_module:
|
|
96
124
|
if table_code not in target_dict:
|
|
97
125
|
target_dict[table_code] = []
|
|
@@ -99,9 +127,9 @@ class OperationScopeService:
|
|
|
99
127
|
|
|
100
128
|
# Process cross-module scopes separately for each generation
|
|
101
129
|
if starting_modules:
|
|
102
|
-
cross_modules[
|
|
130
|
+
cross_modules["_starting"] = starting_modules
|
|
103
131
|
if ending_modules:
|
|
104
|
-
cross_modules[
|
|
132
|
+
cross_modules["_ending"] = ending_modules
|
|
105
133
|
else:
|
|
106
134
|
# Original logic for table VIDs
|
|
107
135
|
unique_operands_number = len(tables_vids) + len(precondition_items)
|
|
@@ -120,27 +148,52 @@ class OperationScopeService:
|
|
|
120
148
|
|
|
121
149
|
if cross_modules:
|
|
122
150
|
# When using table_codes with lifecycle grouping
|
|
123
|
-
if table_codes and (
|
|
151
|
+
if table_codes and (
|
|
152
|
+
"_starting" in cross_modules or "_ending" in cross_modules
|
|
153
|
+
):
|
|
124
154
|
# Process each generation separately
|
|
125
|
-
if
|
|
126
|
-
self.process_cross_module(
|
|
127
|
-
|
|
128
|
-
|
|
155
|
+
if "_starting" in cross_modules:
|
|
156
|
+
self.process_cross_module(
|
|
157
|
+
cross_modules=cross_modules["_starting"],
|
|
158
|
+
modules_dataframe=modules_info_dataframe,
|
|
159
|
+
)
|
|
160
|
+
if "_ending" in cross_modules:
|
|
161
|
+
self.process_cross_module(
|
|
162
|
+
cross_modules=cross_modules["_ending"],
|
|
163
|
+
modules_dataframe=modules_info_dataframe,
|
|
164
|
+
)
|
|
129
165
|
# Legacy table_codes without lifecycle grouping
|
|
130
166
|
elif table_codes:
|
|
131
|
-
self.process_cross_module(
|
|
132
|
-
|
|
133
|
-
|
|
167
|
+
self.process_cross_module(
|
|
168
|
+
cross_modules=cross_modules,
|
|
169
|
+
modules_dataframe=modules_info_dataframe,
|
|
170
|
+
)
|
|
171
|
+
elif set(cross_modules.keys()) == set(tables_vids):
|
|
172
|
+
self.process_cross_module(
|
|
173
|
+
cross_modules=cross_modules,
|
|
174
|
+
modules_dataframe=modules_info_dataframe,
|
|
175
|
+
)
|
|
134
176
|
else:
|
|
135
177
|
# add the missing table_vids to cross_modules
|
|
136
178
|
for table_vid in tables_vids:
|
|
137
179
|
if table_vid not in cross_modules:
|
|
138
|
-
cross_modules[table_vid] =
|
|
139
|
-
|
|
180
|
+
cross_modules[table_vid] = (
|
|
181
|
+
modules_info_dataframe[
|
|
182
|
+
modules_info_dataframe[VARIABLE_VID] == table_vid
|
|
183
|
+
][MODULE_VID]
|
|
184
|
+
.unique()
|
|
185
|
+
.tolist()
|
|
186
|
+
)
|
|
187
|
+
self.process_cross_module(
|
|
188
|
+
cross_modules=cross_modules,
|
|
189
|
+
modules_dataframe=modules_info_dataframe,
|
|
190
|
+
)
|
|
140
191
|
|
|
141
192
|
return self.get_scopes_with_status()
|
|
142
193
|
|
|
143
|
-
def extract_module_info(
|
|
194
|
+
def extract_module_info(
|
|
195
|
+
self, tables_vids, precondition_items, release_id=None, table_codes=None
|
|
196
|
+
):
|
|
144
197
|
"""
|
|
145
198
|
Extracts modules information of tables version ids and preconditions from database and
|
|
146
199
|
joins them in a single dataframe
|
|
@@ -156,14 +209,16 @@ class OperationScopeService:
|
|
|
156
209
|
# If table_codes are provided, query by codes to get ALL versions
|
|
157
210
|
if table_codes and len(table_codes):
|
|
158
211
|
tables_modules_info_dataframe = ModuleVersion.get_from_table_codes(
|
|
159
|
-
session=self.session, table_codes=table_codes, release_id=release_id
|
|
212
|
+
session=self.session, table_codes=table_codes, release_id=release_id
|
|
213
|
+
)
|
|
160
214
|
if tables_modules_info_dataframe.empty:
|
|
161
215
|
raise exceptions.SemanticError("1-13", table_codes=table_codes)
|
|
162
216
|
modules_info_lst.append(tables_modules_info_dataframe)
|
|
163
217
|
# Otherwise use the traditional table VID approach
|
|
164
218
|
elif len(tables_vids):
|
|
165
219
|
tables_modules_info_dataframe = ModuleVersion.get_from_tables_vids(
|
|
166
|
-
session=self.session, tables_vids=tables_vids, release_id=release_id
|
|
220
|
+
session=self.session, tables_vids=tables_vids, release_id=release_id
|
|
221
|
+
)
|
|
167
222
|
if tables_modules_info_dataframe.empty:
|
|
168
223
|
missing_table_modules = tables_vids
|
|
169
224
|
else:
|
|
@@ -171,23 +226,35 @@ class OperationScopeService:
|
|
|
171
226
|
missing_table_modules = set(tables_vids).difference(set(modules_tables))
|
|
172
227
|
|
|
173
228
|
if len(missing_table_modules):
|
|
174
|
-
raise exceptions.SemanticError(
|
|
229
|
+
raise exceptions.SemanticError(
|
|
230
|
+
"1-13", table_version_ids=missing_table_modules
|
|
231
|
+
)
|
|
175
232
|
|
|
176
233
|
modules_info_lst.append(tables_modules_info_dataframe)
|
|
177
234
|
|
|
178
235
|
if len(precondition_items):
|
|
179
|
-
preconditions_modules_info_dataframe =
|
|
180
|
-
|
|
181
|
-
|
|
236
|
+
preconditions_modules_info_dataframe = (
|
|
237
|
+
ModuleVersion.get_precondition_module_versions(
|
|
238
|
+
session=self.session,
|
|
239
|
+
precondition_items=precondition_items,
|
|
240
|
+
release_id=release_id,
|
|
241
|
+
)
|
|
242
|
+
)
|
|
182
243
|
|
|
183
244
|
if preconditions_modules_info_dataframe.empty:
|
|
184
245
|
missing_precondition_modules = precondition_items
|
|
185
246
|
else:
|
|
186
|
-
modules_preconditions = preconditions_modules_info_dataframe[
|
|
187
|
-
|
|
247
|
+
modules_preconditions = preconditions_modules_info_dataframe[
|
|
248
|
+
"Code"
|
|
249
|
+
].tolist()
|
|
250
|
+
missing_precondition_modules = set(precondition_items).difference(
|
|
251
|
+
set(modules_preconditions)
|
|
252
|
+
)
|
|
188
253
|
|
|
189
254
|
if missing_precondition_modules:
|
|
190
|
-
raise exceptions.SemanticError(
|
|
255
|
+
raise exceptions.SemanticError(
|
|
256
|
+
"1-14", precondition_items=missing_precondition_modules
|
|
257
|
+
)
|
|
191
258
|
|
|
192
259
|
modules_info_lst.append(preconditions_modules_info_dataframe)
|
|
193
260
|
|
|
@@ -201,9 +268,13 @@ class OperationScopeService:
|
|
|
201
268
|
:param modules_vids: list with module version ids
|
|
202
269
|
"""
|
|
203
270
|
for module_vid in modules_vids:
|
|
204
|
-
from_date = modules_info[modules_info[
|
|
271
|
+
from_date = modules_info[modules_info["ModuleVID"] == module_vid][
|
|
272
|
+
"FromReferenceDate"
|
|
273
|
+
].values[0]
|
|
205
274
|
operation_scope = self.create_operation_scope(from_date)
|
|
206
|
-
self.create_operation_scope_composition(
|
|
275
|
+
self.create_operation_scope_composition(
|
|
276
|
+
operation_scope=operation_scope, module_vid=module_vid
|
|
277
|
+
)
|
|
207
278
|
|
|
208
279
|
def process_cross_module(self, cross_modules, modules_dataframe):
|
|
209
280
|
"""
|
|
@@ -211,12 +282,18 @@ class OperationScopeService:
|
|
|
211
282
|
:param cross_modules: dictionary with table version ids as key and its module version ids as values
|
|
212
283
|
:param modules_dataframe: dataframe with modules data
|
|
213
284
|
"""
|
|
214
|
-
modules_dataframe[FROM_REFERENCE_DATE] = pd.to_datetime(
|
|
215
|
-
|
|
285
|
+
modules_dataframe[FROM_REFERENCE_DATE] = pd.to_datetime(
|
|
286
|
+
modules_dataframe[FROM_REFERENCE_DATE], format="mixed", dayfirst=True
|
|
287
|
+
)
|
|
288
|
+
modules_dataframe[TO_REFERENCE_DATE] = pd.to_datetime(
|
|
289
|
+
modules_dataframe[TO_REFERENCE_DATE], format="mixed", dayfirst=True
|
|
290
|
+
)
|
|
216
291
|
|
|
217
292
|
values = cross_modules.values()
|
|
218
293
|
for combination in product(*values):
|
|
219
|
-
combination_info = modules_dataframe[
|
|
294
|
+
combination_info = modules_dataframe[
|
|
295
|
+
modules_dataframe[MODULE_VID].isin(combination)
|
|
296
|
+
]
|
|
220
297
|
from_dates = combination_info[FROM_REFERENCE_DATE].values
|
|
221
298
|
to_dates = combination_info[TO_REFERENCE_DATE].values
|
|
222
299
|
ref_from_date = from_dates.max()
|
|
@@ -224,7 +301,9 @@ class OperationScopeService:
|
|
|
224
301
|
|
|
225
302
|
is_valid_combination = True
|
|
226
303
|
for from_date, to_date in zip(from_dates, to_dates):
|
|
227
|
-
if to_date < ref_from_date or (
|
|
304
|
+
if to_date < ref_from_date or (
|
|
305
|
+
(not pd.isna(ref_to_date)) and from_date > ref_to_date
|
|
306
|
+
):
|
|
228
307
|
is_valid_combination = False
|
|
229
308
|
|
|
230
309
|
if is_valid_combination:
|
|
@@ -234,7 +313,9 @@ class OperationScopeService:
|
|
|
234
313
|
operation_scope = self.create_operation_scope(from_submission_date)
|
|
235
314
|
combination = set(combination)
|
|
236
315
|
for module in combination:
|
|
237
|
-
self.create_operation_scope_composition(
|
|
316
|
+
self.create_operation_scope_composition(
|
|
317
|
+
operation_scope=operation_scope, module_vid=module
|
|
318
|
+
)
|
|
238
319
|
|
|
239
320
|
def create_operation_scope(self, submission_date):
|
|
240
321
|
"""
|
|
@@ -242,9 +323,9 @@ class OperationScopeService:
|
|
|
242
323
|
"""
|
|
243
324
|
if not pd.isnull(submission_date):
|
|
244
325
|
if isinstance(submission_date, numpy.datetime64):
|
|
245
|
-
submission_date = str(submission_date).split(
|
|
326
|
+
submission_date = str(submission_date).split("T")[0]
|
|
246
327
|
if isinstance(submission_date, str):
|
|
247
|
-
submission_date = datetime.strptime(submission_date,
|
|
328
|
+
submission_date = datetime.strptime(submission_date, "%Y-%m-%d").date()
|
|
248
329
|
elif isinstance(submission_date, datetime):
|
|
249
330
|
submission_date = submission_date.date()
|
|
250
331
|
else:
|
|
@@ -254,7 +335,7 @@ class OperationScopeService:
|
|
|
254
335
|
isactive=1, # Use 1 instead of True for PostgreSQL bigint compatibility
|
|
255
336
|
severity=WARNING_SEVERITY,
|
|
256
337
|
fromsubmissiondate=submission_date,
|
|
257
|
-
rowguid=str(uuid.uuid4())
|
|
338
|
+
rowguid=str(uuid.uuid4()),
|
|
258
339
|
)
|
|
259
340
|
self.session.add(operation_scope)
|
|
260
341
|
return operation_scope
|
|
@@ -268,7 +349,7 @@ class OperationScopeService:
|
|
|
268
349
|
operation_scope_composition = OperationScopeComposition(
|
|
269
350
|
operation_scope=operation_scope,
|
|
270
351
|
modulevid=module_vid,
|
|
271
|
-
rowguid=str(uuid.uuid4())
|
|
352
|
+
rowguid=str(uuid.uuid4()),
|
|
272
353
|
)
|
|
273
354
|
self.session.add(operation_scope_composition)
|
|
274
355
|
|
|
@@ -279,16 +360,23 @@ class OperationScopeService:
|
|
|
279
360
|
"""
|
|
280
361
|
existing_scopes = []
|
|
281
362
|
new_scopes = []
|
|
282
|
-
operation_scopes = [
|
|
283
|
-
|
|
284
|
-
|
|
363
|
+
operation_scopes = [
|
|
364
|
+
o for o in self.session.new if isinstance(o, OperationScope)
|
|
365
|
+
]
|
|
366
|
+
database_scopes = OperationScopeComposition.get_from_operation_version_id(
|
|
367
|
+
self.session, self.operation_version_id
|
|
368
|
+
)
|
|
285
369
|
if database_scopes.empty:
|
|
286
370
|
new_scopes = operation_scopes
|
|
287
371
|
else:
|
|
288
372
|
for scope in operation_scopes:
|
|
289
|
-
composition_modules = [
|
|
290
|
-
|
|
291
|
-
|
|
373
|
+
composition_modules = [
|
|
374
|
+
scope_comp.modulevid
|
|
375
|
+
for scope_comp in scope.operation_scope_compositions
|
|
376
|
+
]
|
|
377
|
+
result = database_scopes.groupby("OperationScopeID").filter(
|
|
378
|
+
lambda x: _check_if_existing(composition_modules, x)
|
|
379
|
+
)
|
|
292
380
|
|
|
293
381
|
if not result.empty:
|
|
294
382
|
existing_scopes.append(scope)
|
|
@@ -298,10 +386,12 @@ class OperationScopeService:
|
|
|
298
386
|
existing_previous = False
|
|
299
387
|
for vid in composition_modules:
|
|
300
388
|
if id not in existing_scopes:
|
|
301
|
-
aux = ModuleVersion.get_module_version_by_vid(
|
|
389
|
+
aux = ModuleVersion.get_module_version_by_vid(
|
|
390
|
+
session=self.session, vid=vid
|
|
391
|
+
)
|
|
302
392
|
if aux.empty:
|
|
303
393
|
continue
|
|
304
|
-
if aux[
|
|
394
|
+
if aux["EndReleaseID"][0] is not None:
|
|
305
395
|
existing_previous = True
|
|
306
396
|
break
|
|
307
397
|
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
AST to JSON serialization utilities for pyDPM
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
|
-
from py_dpm.
|
|
6
|
+
from py_dpm.dpm_xl.ast.visitor import NodeVisitor
|
|
7
7
|
|
|
8
8
|
class ASTToJSONVisitor(NodeVisitor):
|
|
9
9
|
"""Visitor that converts AST nodes to JSON using the existing visitor pattern infrastructure."""
|
|
@@ -542,10 +542,9 @@ class ASTToJSONVisitor(NodeVisitor):
|
|
|
542
542
|
|
|
543
543
|
return result
|
|
544
544
|
|
|
545
|
-
|
|
546
545
|
# Original serialization functions (kept for backward compatibility)
|
|
547
546
|
import json
|
|
548
|
-
from py_dpm.
|
|
547
|
+
from py_dpm.dpm_xl.ast import nodes as ASTObjects
|
|
549
548
|
|
|
550
549
|
|
|
551
550
|
def expand_with_expression(node):
|
|
@@ -759,4 +758,4 @@ def ast_from_json_string(json_str):
|
|
|
759
758
|
AST object instance
|
|
760
759
|
"""
|
|
761
760
|
data = json.loads(json_str)
|
|
762
|
-
return deserialize_ast(data)
|
|
761
|
+
return deserialize_ast(data)
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"""
|
|
2
|
+
DPM-XL Validation
|
|
3
|
+
|
|
4
|
+
Syntax and semantic validation for DPM-XL expressions.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from py_dpm.dpm_xl.validation.variants import *
|
|
8
|
+
from py_dpm.dpm_xl.validation.property_constraints import *
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
# Re-export will be handled by import *
|
|
12
|
+
]
|
|
@@ -3,11 +3,10 @@ from itertools import groupby
|
|
|
3
3
|
|
|
4
4
|
import pandas as pd
|
|
5
5
|
|
|
6
|
-
from py_dpm.
|
|
6
|
+
from py_dpm.dpm_xl.utils.tokens import CELL_COMPONENTS, COLUMN, COLUMN_CODE, EXISTENCE_REPORT, \
|
|
7
7
|
HIERARCHY_REPORT, ROW, ROW_CODE, SHEET, SHEET_CODE, \
|
|
8
8
|
SIGN_REPORT
|
|
9
|
-
from py_dpm.ValidationsGeneration.Utils import ExternalDataExistence, ExternalDataHierarchies,
|
|
10
|
-
ExternalDataSign
|
|
9
|
+
# from py_dpm.ValidationsGeneration.Utils import ExternalDataExistence, ExternalDataHierarchies, ExternalDataSign
|
|
11
10
|
|
|
12
11
|
|
|
13
12
|
def from_generate_to_response(validations):
|
|
@@ -1,12 +1,25 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
|
|
3
|
-
from py_dpm.
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
from py_dpm.dpm_xl.ast.nodes import (
|
|
4
|
+
AggregationOp,
|
|
5
|
+
BinOp,
|
|
6
|
+
ComplexNumericOp,
|
|
7
|
+
CondExpr,
|
|
8
|
+
FilterOp,
|
|
9
|
+
GetOp,
|
|
10
|
+
PropertyReference,
|
|
11
|
+
RenameOp,
|
|
12
|
+
Scalar,
|
|
13
|
+
TimeShiftOp,
|
|
14
|
+
UnaryOp,
|
|
15
|
+
VarID,
|
|
16
|
+
WhereClauseOp,
|
|
17
|
+
)
|
|
18
|
+
from py_dpm.dpm_xl.ast.template import ASTTemplate
|
|
19
|
+
from py_dpm.exceptions import exceptions
|
|
20
|
+
from py_dpm.dpm.models import ItemCategory, ViewDatapoints
|
|
21
|
+
from py_dpm.dpm_xl.validation.generation_utils import ValidationsGenerationUtils
|
|
22
|
+
from py_dpm.dpm_xl.utils.tokens import *
|
|
10
23
|
|
|
11
24
|
ALLOWED_OPERATORS = [MATCH, IN, EQ, NEQ, GT, GTE, LT, LTE, LENGTH, CONCATENATE]
|
|
12
25
|
|
|
@@ -41,7 +54,9 @@ class PropertiesConstraintsChecker(ASTTemplate):
|
|
|
41
54
|
pass # signature should have : to be a property constraint
|
|
42
55
|
signature = node.code
|
|
43
56
|
# look for property in models
|
|
44
|
-
property_query = ItemCategory.get_property_from_signature(
|
|
57
|
+
property_query = ItemCategory.get_property_from_signature(
|
|
58
|
+
signature, self.session
|
|
59
|
+
)
|
|
45
60
|
if property_query is None:
|
|
46
61
|
raise exceptions.SemanticError("5-1-4", ref=signature)
|
|
47
62
|
self.has_property = True
|
|
@@ -55,13 +70,17 @@ class PropertiesConstraintsChecker(ASTTemplate):
|
|
|
55
70
|
if not self.has_property:
|
|
56
71
|
if getattr(node, "scalar_type", None) == "Item":
|
|
57
72
|
# go to models and check if item exists and is a property
|
|
58
|
-
property_query = ItemCategory.get_property_from_signature(
|
|
73
|
+
property_query = ItemCategory.get_property_from_signature(
|
|
74
|
+
signature, self.session
|
|
75
|
+
)
|
|
59
76
|
if property_query:
|
|
60
77
|
self.has_property = True
|
|
61
78
|
# other assumption could be always first scalar is a property but this is not true
|
|
62
79
|
# self.has_property = True
|
|
63
80
|
else:
|
|
64
|
-
other_property_query = ItemCategory.get_property_from_signature(
|
|
81
|
+
other_property_query = ItemCategory.get_property_from_signature(
|
|
82
|
+
signature, self.session
|
|
83
|
+
)
|
|
65
84
|
if other_property_query:
|
|
66
85
|
raise exceptions.SemanticError("5-1-2")
|
|
67
86
|
|
|
@@ -96,12 +115,21 @@ class PropertiesConstraintsProcessor(ASTTemplate):
|
|
|
96
115
|
raise exceptions.SemanticError("5-1-1")
|
|
97
116
|
|
|
98
117
|
item_category = ItemCategory.get_property_from_signature(
|
|
99
|
-
|
|
118
|
+
signature=self.property_constraint,
|
|
119
|
+
session=self.session,
|
|
120
|
+
release_id=self.release_id,
|
|
121
|
+
)
|
|
100
122
|
if item_category is None:
|
|
101
|
-
raise exceptions.SemanticError(
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
123
|
+
raise exceptions.SemanticError(
|
|
124
|
+
"1-7", property_code=self.property_constraint
|
|
125
|
+
)
|
|
126
|
+
variables: pd.DataFrame = ViewDatapoints.get_from_property(
|
|
127
|
+
self.session, item_category.ItemID, self.release_id
|
|
128
|
+
)
|
|
129
|
+
for table_code, group_df in variables.groupby(["table_code"]):
|
|
130
|
+
datapoints = ViewDatapoints.get_table_data(
|
|
131
|
+
session=self.session, table=str(table_code)
|
|
132
|
+
)
|
|
105
133
|
self.generate_expressions(table_code, group_df, datapoints)
|
|
106
134
|
|
|
107
135
|
def generate_expressions(self, table_code, data, datapoints_table):
|
|
@@ -111,12 +139,17 @@ class PropertiesConstraintsProcessor(ASTTemplate):
|
|
|
111
139
|
:param data: dataframe with operand datapoints
|
|
112
140
|
:param datapoints_table: table datapoints
|
|
113
141
|
"""
|
|
114
|
-
groups = ValidationsGenerationUtils.group_cells(
|
|
115
|
-
|
|
142
|
+
groups = ValidationsGenerationUtils.group_cells(
|
|
143
|
+
datapoints_variable=data, datapoints_table=datapoints_table
|
|
144
|
+
)
|
|
116
145
|
for rows, cols, sheets in groups:
|
|
117
|
-
operand = ValidationsGenerationUtils.write_cell(
|
|
146
|
+
operand = ValidationsGenerationUtils.write_cell(
|
|
147
|
+
table_code, rows, cols, sheets
|
|
148
|
+
)
|
|
118
149
|
new_expression = self.expression
|
|
119
|
-
new_expression = new_expression.replace(
|
|
150
|
+
new_expression = new_expression.replace(
|
|
151
|
+
f"[{self.property_constraint}]", operand
|
|
152
|
+
)
|
|
120
153
|
self.new_expressions.append(new_expression)
|
|
121
154
|
|
|
122
155
|
def create_validation(self, expression, status):
|
|
@@ -133,7 +166,7 @@ class PropertiesConstraintsProcessor(ASTTemplate):
|
|
|
133
166
|
return {
|
|
134
167
|
VALIDATION_CODE: validation_code,
|
|
135
168
|
EXPRESSION: expression,
|
|
136
|
-
STATUS: status
|
|
169
|
+
STATUS: status,
|
|
137
170
|
}
|
|
138
171
|
|
|
139
172
|
def visit_PropertyReference(self, node: PropertyReference):
|
|
@@ -148,7 +181,9 @@ class PropertiesConstraintsProcessor(ASTTemplate):
|
|
|
148
181
|
def visit_Scalar(self, node: Scalar):
|
|
149
182
|
if getattr(node, "scalar_type", None) == "Item":
|
|
150
183
|
signature = node.item
|
|
151
|
-
property_query = ItemCategory.get_property_from_signature(
|
|
184
|
+
property_query = ItemCategory.get_property_from_signature(
|
|
185
|
+
signature, self.session
|
|
186
|
+
)
|
|
152
187
|
if property_query:
|
|
153
188
|
if not self.property_constraint:
|
|
154
189
|
self.property_constraint = signature
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import numpy as np
|
|
2
2
|
import pandas as pd
|
|
3
3
|
|
|
4
|
-
from py_dpm.
|
|
5
|
-
from py_dpm.
|
|
4
|
+
from py_dpm.dpm_xl.validation.generation_utils import ValidationsGenerationUtils
|
|
5
|
+
from py_dpm.dpm_xl.utils.tokens import *
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
def generate_context_structure(lst: list, group_df: pd.DataFrame):
|