pydpm_xl 0.1.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. py_dpm/AST/ASTConstructor.py +503 -0
  2. py_dpm/AST/ASTObjects.py +827 -0
  3. py_dpm/AST/ASTTemplate.py +101 -0
  4. py_dpm/AST/ASTVisitor.py +13 -0
  5. py_dpm/AST/MLGeneration.py +588 -0
  6. py_dpm/AST/ModuleAnalyzer.py +79 -0
  7. py_dpm/AST/ModuleDependencies.py +203 -0
  8. py_dpm/AST/WhereClauseChecker.py +12 -0
  9. py_dpm/AST/__init__.py +0 -0
  10. py_dpm/AST/check_operands.py +302 -0
  11. py_dpm/DataTypes/ScalarTypes.py +324 -0
  12. py_dpm/DataTypes/TimeClasses.py +370 -0
  13. py_dpm/DataTypes/TypePromotion.py +195 -0
  14. py_dpm/DataTypes/__init__.py +0 -0
  15. py_dpm/Exceptions/__init__.py +0 -0
  16. py_dpm/Exceptions/exceptions.py +84 -0
  17. py_dpm/Exceptions/messages.py +114 -0
  18. py_dpm/OperationScopes/OperationScopeService.py +247 -0
  19. py_dpm/OperationScopes/__init__.py +0 -0
  20. py_dpm/Operators/AggregateOperators.py +138 -0
  21. py_dpm/Operators/BooleanOperators.py +30 -0
  22. py_dpm/Operators/ClauseOperators.py +159 -0
  23. py_dpm/Operators/ComparisonOperators.py +69 -0
  24. py_dpm/Operators/ConditionalOperators.py +362 -0
  25. py_dpm/Operators/NumericOperators.py +101 -0
  26. py_dpm/Operators/Operator.py +388 -0
  27. py_dpm/Operators/StringOperators.py +27 -0
  28. py_dpm/Operators/TimeOperators.py +53 -0
  29. py_dpm/Operators/__init__.py +0 -0
  30. py_dpm/Utils/ValidationsGenerationUtils.py +429 -0
  31. py_dpm/Utils/__init__.py +0 -0
  32. py_dpm/Utils/operands_mapping.py +73 -0
  33. py_dpm/Utils/operator_mapping.py +89 -0
  34. py_dpm/Utils/tokens.py +172 -0
  35. py_dpm/Utils/utils.py +2 -0
  36. py_dpm/ValidationsGeneration/PropertiesConstraintsProcessor.py +190 -0
  37. py_dpm/ValidationsGeneration/Utils.py +364 -0
  38. py_dpm/ValidationsGeneration/VariantsProcessor.py +265 -0
  39. py_dpm/ValidationsGeneration/__init__.py +0 -0
  40. py_dpm/ValidationsGeneration/auxiliary_functions.py +98 -0
  41. py_dpm/__init__.py +61 -0
  42. py_dpm/api/__init__.py +140 -0
  43. py_dpm/api/ast_generator.py +438 -0
  44. py_dpm/api/complete_ast.py +241 -0
  45. py_dpm/api/data_dictionary_validation.py +577 -0
  46. py_dpm/api/migration.py +77 -0
  47. py_dpm/api/semantic.py +224 -0
  48. py_dpm/api/syntax.py +182 -0
  49. py_dpm/client.py +106 -0
  50. py_dpm/data_handlers.py +99 -0
  51. py_dpm/db_utils.py +117 -0
  52. py_dpm/grammar/__init__.py +0 -0
  53. py_dpm/grammar/dist/__init__.py +0 -0
  54. py_dpm/grammar/dist/dpm_xlLexer.interp +428 -0
  55. py_dpm/grammar/dist/dpm_xlLexer.py +804 -0
  56. py_dpm/grammar/dist/dpm_xlLexer.tokens +106 -0
  57. py_dpm/grammar/dist/dpm_xlParser.interp +249 -0
  58. py_dpm/grammar/dist/dpm_xlParser.py +5224 -0
  59. py_dpm/grammar/dist/dpm_xlParser.tokens +106 -0
  60. py_dpm/grammar/dist/dpm_xlParserListener.py +742 -0
  61. py_dpm/grammar/dist/dpm_xlParserVisitor.py +419 -0
  62. py_dpm/grammar/dist/listeners.py +10 -0
  63. py_dpm/grammar/dpm_xlLexer.g4 +435 -0
  64. py_dpm/grammar/dpm_xlParser.g4 +260 -0
  65. py_dpm/migration.py +282 -0
  66. py_dpm/models.py +2139 -0
  67. py_dpm/semantics/DAG/DAGAnalyzer.py +158 -0
  68. py_dpm/semantics/DAG/__init__.py +0 -0
  69. py_dpm/semantics/SemanticAnalyzer.py +320 -0
  70. py_dpm/semantics/Symbols.py +223 -0
  71. py_dpm/semantics/__init__.py +0 -0
  72. py_dpm/utils/__init__.py +0 -0
  73. py_dpm/utils/ast_serialization.py +481 -0
  74. py_dpm/views/data_types.sql +12 -0
  75. py_dpm/views/datapoints.sql +65 -0
  76. py_dpm/views/hierarchy_operand_reference.sql +11 -0
  77. py_dpm/views/hierarchy_preconditions.sql +13 -0
  78. py_dpm/views/hierarchy_variables.sql +26 -0
  79. py_dpm/views/hierarchy_variables_context.sql +14 -0
  80. py_dpm/views/key_components.sql +18 -0
  81. py_dpm/views/module_from_table.sql +11 -0
  82. py_dpm/views/open_keys.sql +13 -0
  83. py_dpm/views/operation_info.sql +27 -0
  84. py_dpm/views/operation_list.sql +18 -0
  85. py_dpm/views/operations_versions_from_module_version.sql +30 -0
  86. py_dpm/views/precondition_info.sql +17 -0
  87. py_dpm/views/report_type_operand_reference_info.sql +18 -0
  88. py_dpm/views/subcategory_info.sql +17 -0
  89. py_dpm/views/table_info.sql +19 -0
  90. pydpm_xl-0.1.10.dist-info/LICENSE +674 -0
  91. pydpm_xl-0.1.10.dist-info/METADATA +50 -0
  92. pydpm_xl-0.1.10.dist-info/RECORD +94 -0
  93. pydpm_xl-0.1.10.dist-info/WHEEL +4 -0
  94. pydpm_xl-0.1.10.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,203 @@
1
+ from abc import ABC
2
+
3
+ from py_dpm.AST.ASTObjects import *
4
+ from py_dpm.AST.ASTTemplate import ASTTemplate
5
+ from py_dpm.AST.WhereClauseChecker import WhereClauseChecker
6
+ from py_dpm.Exceptions import exceptions
7
+ from py_dpm.models import TableVersion, VariableVersion, ViewDatapoints, ViewModules, ItemCategory
8
+
9
+ operand_elements = ['table', 'rows', 'cols', 'sheets', 'default', 'interval']
10
+
11
+ def filter_datapoints_df(df, table, table_info:dict, release_id:int=None):
12
+ """
13
+ """
14
+ mapping_dictionary = {'rows': 'row_code', 'cols': 'column_code', 'sheets': 'sheet_code'}
15
+ df = df[df['table_code'] == table]
16
+ for k, v in table_info.items():
17
+ if v is not None:
18
+ if '-' in v[0]:
19
+ low_limit, high_limit = v[0].split('-')
20
+ df = df[(df[mapping_dictionary[k]].between(low_limit, high_limit))]
21
+ elif v[0] == '*':
22
+ continue
23
+ else:
24
+ df = df[(df[mapping_dictionary[k]].isin(v))]
25
+
26
+ if release_id:
27
+ df = df[df['release_id'] == release_id]
28
+ return df
29
+
30
+ def filter_module_by_table_df(df, table):
31
+ """
32
+ Returns a list of modules that contain the table
33
+ """
34
+ module_list = df[df['table_code'] == table]['module_code'].tolist()
35
+ return module_list
36
+
37
+ class ModuleDependencies(ASTTemplate, ABC):
38
+ def __init__(self, session, ast, release_id, date, module_ref, is_scripting=False):
39
+ self.release_id = release_id
40
+ self.AST = ast
41
+ self.tables = {}
42
+ self.operands = {}
43
+ self.full_operands = {}
44
+ self.module_ref = module_ref
45
+ # self.key_components = {}
46
+ self.partial_selection = None
47
+ self.data = None
48
+ self.items = []
49
+ self.preconditions = False
50
+ self.dimension_codes = []
51
+ # self.open_keys = None
52
+
53
+ self.operations = []
54
+ self.operations_data = None
55
+ self.is_scripting = is_scripting
56
+
57
+ self.session = session
58
+ self.time_period = 't' # TODO
59
+ self.date = date
60
+ self.modules = {}
61
+ self.from_time_shift = False
62
+
63
+ super().__init__()
64
+ self.visit(self.AST)
65
+
66
+
67
+ # Start of visiting nodes
68
+ def visit_WithExpression(self, node: WithExpression):
69
+ if node.partial_selection.is_table_group:
70
+ raise exceptions.SemanticError("1-10", table=node.partial_selection.table)
71
+ self.partial_selection: VarID = node.partial_selection
72
+ self.visit(node.expression)
73
+
74
+ def visit_VarID(self, node: VarID):
75
+
76
+ if node.is_table_group:
77
+ raise exceptions.SemanticError("1-10", table=node.table)
78
+
79
+ if self.partial_selection:
80
+ for attribute in operand_elements:
81
+ if getattr(node, attribute, None) is None and not getattr(self.partial_selection, attribute, None) is None:
82
+ setattr(node, attribute, getattr(self.partial_selection, attribute))
83
+
84
+ if not node.table:
85
+ raise exceptions.SemanticError("1-4", table=node.table)
86
+
87
+ table_info = {
88
+ 'rows': node.rows,
89
+ 'cols': node.cols,
90
+ 'sheets': node.sheets
91
+ }
92
+
93
+ if node.table not in self.tables:
94
+ self.tables[node.table] = table_info
95
+ self.operands[node.table] = [self.time_period]
96
+ # self.operands[node.table] = [node]
97
+ else:
98
+ if self.time_period not in self.operands[node.table]:
99
+ self.operands[node.table].append(self.time_period)
100
+ # self.operands[node.table].append(node)
101
+
102
+ # Variables full
103
+ variables_full = ViewDatapoints.get_filtered_datapoints(self.session,
104
+ node.table,
105
+ table_info,
106
+ release_id=self.release_id)
107
+ # variables_full = ViewDatapoints.get_table_data(self.session, node.table, table_info['rows'], table_info['cols'], table_info['sheets'],
108
+ # self.release_id)
109
+
110
+ if variables_full.empty:
111
+ raise exceptions.SemanticError("1-5", open_keys=table_info)
112
+
113
+ final_list = variables_full['variable_id'].to_list()
114
+ # Here change table for module or modules
115
+ modules = ViewModules().get_all_modules(self.session) # TODO
116
+ modules = filter_module_by_table_df(modules, node.table)
117
+ # modules = ViewModules().get_modules(self.session, [node.table], None)
118
+ if self.module_ref:
119
+ if self.module_ref in modules:
120
+ full_name = f"{self.module_ref}:{self.time_period}"
121
+ if full_name not in self.full_operands:
122
+ self.full_operands[full_name] = final_list
123
+ else:
124
+ for elto in final_list:
125
+ if elto not in self.full_operands[full_name]:
126
+ self.full_operands[full_name].append(elto)
127
+ else:
128
+ for module in modules:
129
+ full_name = f"{module}:{self.time_period}"
130
+ if full_name not in self.full_operands:
131
+ self.full_operands[full_name] = final_list
132
+ else:
133
+ for elto in final_list:
134
+ if elto not in self.full_operands[full_name]:
135
+ self.full_operands[full_name].append(elto)
136
+ else:
137
+ full_name = f"{node.table}:{self.time_period}"
138
+ self.full_operands[full_name] = final_list
139
+
140
+
141
+ def visit_Dimension(self, node: Dimension):
142
+ if node.dimension_code not in self.dimension_codes:
143
+ self.dimension_codes.append(node.dimension_code)
144
+ if not ItemCategory.get_property_from_code(node.dimension_code, self.session):
145
+ raise exceptions.SemanticError("1-5", open_keys=node.dimension_code)
146
+
147
+ def visit_VarRef(self, node: VarRef):
148
+ if not VariableVersion.check_variable_exists(self.session, node.variable, self.release_id):
149
+ raise exceptions.SemanticError('1-3', variable=node.variable)
150
+
151
+ def visit_PreconditionItem(self, node: PreconditionItem):
152
+
153
+ if self.is_scripting:
154
+ raise exceptions.SemanticError('6-3', precondition=node.variable_id)
155
+
156
+ if not TableVersion.check_table_exists(self.session, node.variable_id, self.release_id):
157
+ raise exceptions.SemanticError("1-3", variable=node.variable_id)
158
+
159
+ self.preconditions = True
160
+
161
+ def visit_Scalar(self, node: Scalar):
162
+ if node.item and node.scalar_type == 'Item':
163
+ if node.item not in self.items:
164
+ self.items.append(node.item)
165
+ if not ItemCategory.get_item_category_id_from_signature(node.item, self.session):
166
+ raise exceptions.SemanticError("1-1", items=node.item)
167
+
168
+
169
+ def visit_WhereClauseOp(self, node: WhereClauseOp):
170
+ self.visit(node.operand)
171
+ checker = WhereClauseChecker()
172
+ checker.visit(node.condition)
173
+ node.key_components = checker.key_components
174
+ self.visit(node.condition)
175
+
176
+ def visit_TimeShiftOp(self, node: TimeShiftOp):
177
+ self.from_time_shift = True
178
+ period_indicator = node.period_indicator
179
+ shift_number = node.shift_number
180
+ # compute new time period
181
+ if period_indicator not in ('A', 'Q', 'M', 'W', 'D'):
182
+ raise ValueError("Period indicator is not valid")
183
+ if "-" in shift_number:
184
+ new_time_period = f"t+{period_indicator}{shift_number}"
185
+ else:
186
+ new_time_period = f"t-{period_indicator}{shift_number}"
187
+
188
+ self.time_period = new_time_period
189
+ self.visit(node.operand)
190
+ self.from_time_shift = False
191
+
192
+ def visit_OperationRef(self, node: OperationRef):
193
+ if not self.is_scripting:
194
+ raise exceptions.SemanticError("6-2", operation_code=node.operation_code)
195
+
196
+ def visit_PersistentAssignment(self, node: PersistentAssignment):
197
+ # TODO: visit node.left when there are calculations variables in database
198
+ self.visit(node.right)
199
+
200
+ def visit_TemporaryAssignment(self, node: TemporaryAssignment):
201
+ temporary_identifier = node.left
202
+ self.operations.append(temporary_identifier.value)
203
+ self.visit(node.right)
@@ -0,0 +1,12 @@
1
+ from py_dpm.AST.ASTObjects import Dimension
2
+ from py_dpm.AST.ASTTemplate import ASTTemplate
3
+
4
+
5
+ class WhereClauseChecker(ASTTemplate):
6
+
7
+ def __init__(self):
8
+ super().__init__()
9
+ self.key_components = []
10
+
11
+ def visit_Dimension(self, node: Dimension):
12
+ self.key_components.append(node.dimension_code)
py_dpm/AST/__init__.py ADDED
File without changes
@@ -0,0 +1,302 @@
1
+ from abc import ABC
2
+
3
+ import pandas as pd
4
+
5
+ from py_dpm.AST.ASTObjects import Dimension, OperationRef, PersistentAssignment, PreconditionItem, \
6
+ Scalar, TemporaryAssignment, VarID, VarRef, WhereClauseOp, WithExpression
7
+ from py_dpm.AST.ASTTemplate import ASTTemplate
8
+ from py_dpm.AST.WhereClauseChecker import WhereClauseChecker
9
+ from py_dpm.DataTypes.ScalarTypes import Integer, Mixed, Number, ScalarFactory
10
+ from py_dpm.Exceptions import exceptions
11
+ from py_dpm.Exceptions.exceptions import SemanticError
12
+ from py_dpm.models import ItemCategory, Operation, VariableVersion, ViewDatapoints, \
13
+ ViewKeyComponents, ViewOpenKeys
14
+ from py_dpm.Utils.operands_mapping import generate_new_label, set_operand_label
15
+ from py_dpm.data_handlers import filter_all_data
16
+
17
+ operand_elements = ['table', 'rows', 'cols', 'sheets', 'default', 'interval']
18
+
19
+
20
+ def _create_operand_label(node):
21
+ label = generate_new_label()
22
+ node.label = label
23
+
24
+
25
+ def _modify_element_info(new_data, element, table_info):
26
+ if new_data is None and table_info[element] is None:
27
+ pass
28
+ elif table_info[element] == ['*']:
29
+ pass
30
+ elif new_data is not None and table_info[element] is None:
31
+ table_info[element] = new_data
32
+
33
+ elif new_data == ['*']:
34
+ # We have already all data available
35
+ table_info[element] = new_data
36
+
37
+ else:
38
+ # We get only the elements that are not already in the info and sort them
39
+ new_list = [x for x in new_data if x not in table_info[element]]
40
+ new_list += table_info[element]
41
+ new_list = sorted(new_list)
42
+ table_info[element] = new_list
43
+
44
+
45
+ def _modify_table(node, table_info):
46
+ for element in table_info:
47
+ _modify_element_info(getattr(node, element), element, table_info)
48
+
49
+
50
+ def format_missing_data(node):
51
+ rows = ', '.join([f"r{x}" for x in node.rows]) if node.rows else None
52
+ cols = ', '.join([f"c{x}" for x in node.cols]) if node.cols else None
53
+ sheets = ', '.join([f"s{x}" for x in node.sheets]) if node.sheets else None
54
+ op_pos = [node.table, rows, cols, sheets]
55
+ cell_exp = ", ".join(x for x in op_pos if x is not None)
56
+ raise exceptions.SemanticError("1-2", cell_expression=cell_exp)
57
+
58
+
59
+ class OperandsChecking(ASTTemplate, ABC):
60
+ def __init__(self, session, expression, ast, release_id, is_scripting=False):
61
+ self.expression = expression
62
+ self.release_id = release_id
63
+ self.AST = ast
64
+ self.tables = {}
65
+ self.operands = {}
66
+ self.key_components = {}
67
+ self.partial_selection = None
68
+ self.data = None
69
+ self.items = []
70
+ self.preconditions = False
71
+ self.dimension_codes = []
72
+ self.open_keys = None
73
+
74
+ self.operations = []
75
+ self.operations_data = None
76
+ self.is_scripting = is_scripting
77
+
78
+ self.session = session
79
+
80
+ super().__init__()
81
+ self.visit(self.AST)
82
+ self.check_headers()
83
+ self.check_items()
84
+ self.check_tables()
85
+ self.check_dimensions()
86
+
87
+ self.check_operations()
88
+
89
+ def _check_header_present(self, table, header):
90
+ if (self.partial_selection is not None and self.partial_selection.table == table and
91
+ getattr(self.partial_selection, header) is not None):
92
+ return
93
+ for node in self.operands[table]:
94
+ if getattr(node, header) is None:
95
+ if header == 'cols':
96
+ header = 'columns'
97
+ raise exceptions.SemanticError("1-20", header=header, table=table)
98
+
99
+ def check_headers(self):
100
+ table_codes = list(self.tables.keys())
101
+ if len(table_codes) == 0:
102
+ return
103
+ query = """
104
+ SELECT DISTINCT tv.Code, tv.StartReleaseID, tv.EndReleaseID, h.Direction, t.HasOpenRows, t.HasOpenColumns, t.HasOpenSheets
105
+ FROM [Table] AS t
106
+ INNER JOIN TableVersion tv ON t.TableID = tv.TableID
107
+ INNER JOIN TableVersionHeader tvh ON tv.TableVID = tvh.TableVID
108
+ INNER JOIN Header h ON h.HeaderID = tvh.HeaderID
109
+ """
110
+ codes = [f"{code!r}" for code in table_codes]
111
+ query += f"WHERE tv.Code IN ({', '.join(codes)})"
112
+ query += "AND tv.EndReleaseID is null"
113
+ df_headers = pd.read_sql(query, self.session.connection())
114
+ for table in table_codes:
115
+ table_headers = df_headers[df_headers['Code'] == table]
116
+ open_rows = table_headers['HasOpenRows'].values[0]
117
+ open_cols = table_headers['HasOpenColumns'].values[0]
118
+ open_sheets = table_headers['HasOpenSheets'].values[0]
119
+ if table_headers.empty:
120
+ continue
121
+ if "Y" in table_headers['Direction'].values and not open_rows:
122
+ self._check_header_present(table, 'rows')
123
+ if "X" in table_headers['Direction'].values and not open_cols:
124
+ self._check_header_present(table, 'cols')
125
+ if "Z" in table_headers['Direction'].values and not open_sheets:
126
+ self._check_header_present(table, 'sheets')
127
+
128
+
129
+ def check_items(self):
130
+ if len(self.items) == 0:
131
+ return
132
+ df_items = ItemCategory.get_items(self.session, self.items, self.release_id)
133
+ if len(df_items.iloc[:, 0].values) < len(set(self.items)):
134
+ not_found_items = list(set(self.items).difference(set(df_items['Signature'])))
135
+ raise exceptions.SemanticError("1-1", items=not_found_items)
136
+
137
+ def check_dimensions(self):
138
+ if len(self.dimension_codes) == 0:
139
+ return
140
+ self.open_keys = ViewOpenKeys.get_keys(self.session, self.dimension_codes, self.release_id)
141
+ if len(self.open_keys) < len(self.dimension_codes):
142
+ not_found_dimensions = list(set(self.dimension_codes).difference(self.open_keys['property_code']))
143
+ raise exceptions.SemanticError("1-5", open_keys=not_found_dimensions)
144
+
145
+ def check_tables(self):
146
+ for table, value in self.tables.items():
147
+ # Extract all data and filter to get only necessary data
148
+ table_info = value
149
+ df_table = ViewDatapoints.get_table_data(self.session, table, table_info['rows'],
150
+ table_info['cols'], table_info['sheets'], self.release_id)
151
+ if df_table.empty:
152
+ cell_expression = f'table: {table}'
153
+ for k, v in table_info.items():
154
+ if v:
155
+ cell_expression += f' {k}: {v}'
156
+ raise exceptions.SemanticError("1-2", cell_expression=cell_expression)
157
+
158
+ # Insert data type on each node by selecting only data required by node
159
+ for node in self.operands[table]:
160
+ node_data = filter_all_data(df_table, table, node.rows, node.cols, node.sheets)
161
+ # Checking grey cells (no variable ID in data for that cell)
162
+ grey_cells_data = node_data[node_data['variable_id'].isnull()]
163
+ if not grey_cells_data.empty:
164
+ if len(grey_cells_data) > 10:
165
+ list_cells = grey_cells_data["cell_code"].values[:10]
166
+ else:
167
+ list_cells = grey_cells_data["cell_code"].values
168
+ cell_expression = ', '.join(list_cells)
169
+ raise exceptions.SemanticError("1-17", cell_expression=cell_expression)
170
+ if node_data.empty:
171
+ format_missing_data(node)
172
+ extract_data_types(node, node_data['data_type'])
173
+ if df_table['sheet_code'].isnull().all() and node.sheets is not None and '*' in node.sheets:
174
+ raise SemanticError("1-18")
175
+ if df_table['row_code'].isnull().all() and node.rows is not None and '*' in node.rows:
176
+ raise SemanticError("1-19")
177
+ del node_data
178
+
179
+ # Adding data to self.data
180
+ if self.data is None:
181
+ self.data = df_table
182
+ else:
183
+ self.data: pd.DataFrame = pd.concat([self.data, df_table], axis=0).reset_index(drop=True)
184
+
185
+
186
+ self.key_components[table] = ViewKeyComponents.get_by_table(self.session, table, self.release_id)
187
+
188
+ # Start of visiting nodes
189
+ def visit_WithExpression(self, node: WithExpression):
190
+ if node.partial_selection.is_table_group:
191
+ raise exceptions.SemanticError("1-10", table=node.partial_selection.table)
192
+ self.partial_selection: VarID = node.partial_selection
193
+ self.visit(node.expression)
194
+
195
+ def visit_VarID(self, node: VarID):
196
+
197
+ if node.is_table_group:
198
+ raise exceptions.SemanticError("1-10", table=node.table)
199
+
200
+ if self.partial_selection:
201
+ for attribute in operand_elements:
202
+ if getattr(node, attribute, None) is None and not getattr(self.partial_selection, attribute, None) is None:
203
+ setattr(node, attribute, getattr(self.partial_selection, attribute))
204
+
205
+ if not node.table:
206
+ raise exceptions.SemanticError("1-4", table=node.table)
207
+
208
+ _create_operand_label(node)
209
+ set_operand_label(node.label, node)
210
+
211
+ table_info = {
212
+ 'rows': node.rows,
213
+ 'cols': node.cols,
214
+ 'sheets': node.sheets
215
+ }
216
+
217
+ if node.table not in self.tables:
218
+ self.tables[node.table] = table_info
219
+ self.operands[node.table] = [node]
220
+ else:
221
+ self.operands[node.table].append(node)
222
+ _modify_table(node, self.tables[node.table])
223
+
224
+ def visit_Dimension(self, node: Dimension):
225
+ if node.dimension_code not in self.dimension_codes:
226
+ self.dimension_codes.append(node.dimension_code)
227
+
228
+ def visit_VarRef(self, node: VarRef):
229
+ if not VariableVersion.check_variable_exists(self.session, node.variable, self.release_id):
230
+ raise exceptions.SemanticError('1-3', variable=node.variable)
231
+
232
+ def visit_PreconditionItem(self, node: PreconditionItem):
233
+
234
+ if self.is_scripting:
235
+ raise exceptions.SemanticError('6-3', precondition=node.variable_code)
236
+
237
+ if not VariableVersion.check_variable_exists(self.session, node.variable_code, self.release_id):
238
+ raise exceptions.SemanticError("1-3", variable=node.variable_code)
239
+
240
+ self.preconditions = True
241
+ _create_operand_label(node)
242
+ set_operand_label(node.label, node)
243
+
244
+ def visit_Scalar(self, node: Scalar):
245
+ if node.item and node.scalar_type == 'Item':
246
+ if node.item not in self.items:
247
+ self.items.append(node.item)
248
+
249
+ def visit_WhereClauseOp(self, node: WhereClauseOp):
250
+ self.visit(node.operand)
251
+ checker = WhereClauseChecker()
252
+ checker.visit(node.condition)
253
+ node.key_components = checker.key_components
254
+ self.visit(node.condition)
255
+
256
+ def visit_OperationRef(self, node: OperationRef):
257
+ if not self.is_scripting:
258
+ raise exceptions.SemanticError("6-2", operation_code=node.operation_code)
259
+
260
+ def visit_PersistentAssignment(self, node: PersistentAssignment):
261
+ # TODO: visit node.left when there are calculations variables in database
262
+ self.visit(node.right)
263
+
264
+ def visit_TemporaryAssignment(self, node: TemporaryAssignment):
265
+ temporary_identifier = node.left
266
+ self.operations.append(temporary_identifier.value)
267
+ self.visit(node.right)
268
+
269
+ def check_operations(self):
270
+ if len(self.operations):
271
+ df_operations = Operation.get_operations_from_codes(session=self.session, operation_codes=self.operations,
272
+ release_id=self.release_id)
273
+ if len(df_operations.values) < len(self.operations):
274
+ not_found_operations = list(set(self.operations).difference(set(df_operations['Code'])))
275
+ raise exceptions.SemanticError('1-8', operations=not_found_operations)
276
+ self.operations_data = df_operations
277
+
278
+
279
+ def extract_data_types(node: VarID, database_types: pd.Series) -> None:
280
+ """
281
+ Extract data type of var ids from database information
282
+ :param node: Var id
283
+ :param database_types: Series that contains the data types of node elements
284
+ :return: None
285
+ """
286
+ unique_types = database_types.unique()
287
+ scalar_factory = ScalarFactory()
288
+ if len(unique_types) == 1:
289
+ data_type = scalar_factory.database_types_mapping(unique_types[0])
290
+ if node.interval and isinstance(data_type(), Number):
291
+ setattr(node, "type", data_type(node.interval))
292
+ else:
293
+ setattr(node, "type", data_type())
294
+ else:
295
+ data_types = {scalar_factory.database_types_mapping(data_type) for data_type in unique_types}
296
+ if len(data_types) == 1:
297
+ data_type = data_types.pop()
298
+ setattr(node, "type", data_type())
299
+ elif len(data_types) == 2 and Number in data_types and Integer in data_types:
300
+ setattr(node, "type", Number())
301
+ else:
302
+ setattr(node, "type", Mixed())