pydpm_xl 0.1.39rc32__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. py_dpm/__init__.py +1 -1
  2. py_dpm/api/__init__.py +58 -189
  3. py_dpm/api/dpm/__init__.py +20 -0
  4. py_dpm/api/{data_dictionary.py → dpm/data_dictionary.py} +903 -984
  5. py_dpm/api/dpm/explorer.py +236 -0
  6. py_dpm/api/dpm/hierarchical_queries.py +142 -0
  7. py_dpm/api/{migration.py → dpm/migration.py} +16 -19
  8. py_dpm/api/{operation_scopes.py → dpm/operation_scopes.py} +319 -267
  9. py_dpm/api/dpm_xl/__init__.py +25 -0
  10. py_dpm/api/{ast_generator.py → dpm_xl/ast_generator.py} +3 -3
  11. py_dpm/api/{complete_ast.py → dpm_xl/complete_ast.py} +191 -167
  12. py_dpm/api/dpm_xl/semantic.py +354 -0
  13. py_dpm/api/{syntax.py → dpm_xl/syntax.py} +6 -5
  14. py_dpm/api/explorer.py +4 -0
  15. py_dpm/api/semantic.py +30 -306
  16. py_dpm/cli/__init__.py +9 -0
  17. py_dpm/{client.py → cli/main.py} +8 -8
  18. py_dpm/dpm/__init__.py +11 -0
  19. py_dpm/{models.py → dpm/models.py} +112 -88
  20. py_dpm/dpm/queries/base.py +100 -0
  21. py_dpm/dpm/queries/basic_objects.py +33 -0
  22. py_dpm/dpm/queries/explorer_queries.py +352 -0
  23. py_dpm/dpm/queries/filters.py +139 -0
  24. py_dpm/dpm/queries/glossary.py +45 -0
  25. py_dpm/dpm/queries/hierarchical_queries.py +838 -0
  26. py_dpm/dpm/queries/tables.py +133 -0
  27. py_dpm/dpm/utils.py +356 -0
  28. py_dpm/dpm_xl/__init__.py +8 -0
  29. py_dpm/dpm_xl/ast/__init__.py +14 -0
  30. py_dpm/{AST/ASTConstructor.py → dpm_xl/ast/constructor.py} +6 -6
  31. py_dpm/{AST/MLGeneration.py → dpm_xl/ast/ml_generation.py} +137 -87
  32. py_dpm/{AST/ModuleAnalyzer.py → dpm_xl/ast/module_analyzer.py} +7 -7
  33. py_dpm/{AST/ModuleDependencies.py → dpm_xl/ast/module_dependencies.py} +56 -41
  34. py_dpm/{AST/ASTObjects.py → dpm_xl/ast/nodes.py} +1 -1
  35. py_dpm/{AST/check_operands.py → dpm_xl/ast/operands.py} +16 -13
  36. py_dpm/{AST/ASTTemplate.py → dpm_xl/ast/template.py} +2 -2
  37. py_dpm/{AST/WhereClauseChecker.py → dpm_xl/ast/where_clause.py} +2 -2
  38. py_dpm/dpm_xl/grammar/__init__.py +18 -0
  39. py_dpm/dpm_xl/operators/__init__.py +19 -0
  40. py_dpm/{Operators/AggregateOperators.py → dpm_xl/operators/aggregate.py} +7 -7
  41. py_dpm/{Operators/NumericOperators.py → dpm_xl/operators/arithmetic.py} +6 -6
  42. py_dpm/{Operators/Operator.py → dpm_xl/operators/base.py} +5 -5
  43. py_dpm/{Operators/BooleanOperators.py → dpm_xl/operators/boolean.py} +5 -5
  44. py_dpm/{Operators/ClauseOperators.py → dpm_xl/operators/clause.py} +8 -8
  45. py_dpm/{Operators/ComparisonOperators.py → dpm_xl/operators/comparison.py} +5 -5
  46. py_dpm/{Operators/ConditionalOperators.py → dpm_xl/operators/conditional.py} +7 -7
  47. py_dpm/{Operators/StringOperators.py → dpm_xl/operators/string.py} +5 -5
  48. py_dpm/{Operators/TimeOperators.py → dpm_xl/operators/time.py} +6 -6
  49. py_dpm/{semantics/SemanticAnalyzer.py → dpm_xl/semantic_analyzer.py} +168 -68
  50. py_dpm/{semantics/Symbols.py → dpm_xl/symbols.py} +3 -3
  51. py_dpm/dpm_xl/types/__init__.py +13 -0
  52. py_dpm/{DataTypes/TypePromotion.py → dpm_xl/types/promotion.py} +2 -2
  53. py_dpm/{DataTypes/ScalarTypes.py → dpm_xl/types/scalar.py} +2 -2
  54. py_dpm/dpm_xl/utils/__init__.py +14 -0
  55. py_dpm/{data_handlers.py → dpm_xl/utils/data_handlers.py} +2 -2
  56. py_dpm/{Utils → dpm_xl/utils}/operands_mapping.py +1 -1
  57. py_dpm/{Utils → dpm_xl/utils}/operator_mapping.py +8 -8
  58. py_dpm/{OperationScopes/OperationScopeService.py → dpm_xl/utils/scopes_calculator.py} +148 -58
  59. py_dpm/{Utils/ast_serialization.py → dpm_xl/utils/serialization.py} +2 -2
  60. py_dpm/dpm_xl/validation/__init__.py +12 -0
  61. py_dpm/{Utils/ValidationsGenerationUtils.py → dpm_xl/validation/generation_utils.py} +2 -3
  62. py_dpm/{ValidationsGeneration/PropertiesConstraintsProcessor.py → dpm_xl/validation/property_constraints.py} +56 -21
  63. py_dpm/{ValidationsGeneration/auxiliary_functions.py → dpm_xl/validation/utils.py} +2 -2
  64. py_dpm/{ValidationsGeneration/VariantsProcessor.py → dpm_xl/validation/variants.py} +149 -55
  65. py_dpm/exceptions/__init__.py +23 -0
  66. py_dpm/{Exceptions → exceptions}/exceptions.py +7 -2
  67. pydpm_xl-0.2.0.dist-info/METADATA +278 -0
  68. pydpm_xl-0.2.0.dist-info/RECORD +88 -0
  69. pydpm_xl-0.2.0.dist-info/entry_points.txt +2 -0
  70. py_dpm/Exceptions/__init__.py +0 -0
  71. py_dpm/OperationScopes/__init__.py +0 -0
  72. py_dpm/Operators/__init__.py +0 -0
  73. py_dpm/Utils/__init__.py +0 -0
  74. py_dpm/Utils/utils.py +0 -2
  75. py_dpm/ValidationsGeneration/Utils.py +0 -364
  76. py_dpm/ValidationsGeneration/__init__.py +0 -0
  77. py_dpm/api/data_dictionary_validation.py +0 -614
  78. py_dpm/db_utils.py +0 -221
  79. py_dpm/grammar/__init__.py +0 -0
  80. py_dpm/grammar/dist/__init__.py +0 -0
  81. py_dpm/grammar/dpm_xlLexer.g4 +0 -437
  82. py_dpm/grammar/dpm_xlParser.g4 +0 -263
  83. py_dpm/semantics/DAG/DAGAnalyzer.py +0 -158
  84. py_dpm/semantics/DAG/__init__.py +0 -0
  85. py_dpm/semantics/__init__.py +0 -0
  86. py_dpm/views/data_types.sql +0 -12
  87. py_dpm/views/datapoints.sql +0 -65
  88. py_dpm/views/hierarchy_operand_reference.sql +0 -11
  89. py_dpm/views/hierarchy_preconditions.sql +0 -13
  90. py_dpm/views/hierarchy_variables.sql +0 -26
  91. py_dpm/views/hierarchy_variables_context.sql +0 -14
  92. py_dpm/views/key_components.sql +0 -18
  93. py_dpm/views/module_from_table.sql +0 -11
  94. py_dpm/views/open_keys.sql +0 -13
  95. py_dpm/views/operation_info.sql +0 -27
  96. py_dpm/views/operation_list.sql +0 -18
  97. py_dpm/views/operations_versions_from_module_version.sql +0 -30
  98. py_dpm/views/precondition_info.sql +0 -17
  99. py_dpm/views/report_type_operand_reference_info.sql +0 -18
  100. py_dpm/views/subcategory_info.sql +0 -17
  101. py_dpm/views/table_info.sql +0 -19
  102. pydpm_xl-0.1.39rc32.dist-info/METADATA +0 -53
  103. pydpm_xl-0.1.39rc32.dist-info/RECORD +0 -96
  104. pydpm_xl-0.1.39rc32.dist-info/entry_points.txt +0 -2
  105. /py_dpm/{AST → cli/commands}/__init__.py +0 -0
  106. /py_dpm/{migration.py → dpm/migration.py} +0 -0
  107. /py_dpm/{AST/ASTVisitor.py → dpm_xl/ast/visitor.py} +0 -0
  108. /py_dpm/{DataTypes → dpm_xl/grammar/generated}/__init__.py +0 -0
  109. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlLexer.interp +0 -0
  110. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlLexer.py +0 -0
  111. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlLexer.tokens +0 -0
  112. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParser.interp +0 -0
  113. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParser.py +0 -0
  114. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParser.tokens +0 -0
  115. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParserListener.py +0 -0
  116. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParserVisitor.py +0 -0
  117. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/listeners.py +0 -0
  118. /py_dpm/{DataTypes/TimeClasses.py → dpm_xl/types/time.py} +0 -0
  119. /py_dpm/{Utils → dpm_xl/utils}/tokens.py +0 -0
  120. /py_dpm/{Exceptions → exceptions}/messages.py +0 -0
  121. {pydpm_xl-0.1.39rc32.dist-info → pydpm_xl-0.2.0.dist-info}/WHEEL +0 -0
  122. {pydpm_xl-0.1.39rc32.dist-info → pydpm_xl-0.2.0.dist-info}/licenses/LICENSE +0 -0
  123. {pydpm_xl-0.1.39rc32.dist-info → pydpm_xl-0.2.0.dist-info}/top_level.txt +0 -0
@@ -3,25 +3,69 @@ from abc import ABC
3
3
 
4
4
  import pandas as pd
5
5
 
6
- from py_dpm.AST.ASTObjects import AggregationOp, BinOp, ComplexNumericOp, CondExpr, Constant, \
7
- Dimension, FilterOp, GetOp, OperationRef, \
8
- ParExpr, PersistentAssignment, PreconditionItem, PropertyReference, RenameOp, \
9
- Scalar as ScalarNode, Set, Start, SubOp, TemporaryAssignment, \
10
- TimeShiftOp, UnaryOp, VarID, VarRef, WhereClauseOp, WithExpression
11
- from py_dpm.AST.ASTTemplate import ASTTemplate
12
- from py_dpm.DataTypes.ScalarTypes import Item, Mixed, Null, ScalarFactory
13
- from py_dpm.DataTypes.TypePromotion import binary_implicit_type_promotion
14
- from py_dpm.Exceptions import exceptions
15
- from py_dpm.Exceptions.exceptions import SemanticError
16
- from py_dpm.Utils.operands_mapping import set_operand_label
17
- from py_dpm.Utils.operator_mapping import AGGR_OP_MAPPING, BIN_OP_MAPPING, CLAUSE_OP_MAPPING, \
18
- COMPLEX_OP_MAPPING, CONDITIONAL_OP_MAPPING, \
19
- TIME_OPERATORS, UNARY_OP_MAPPING
20
- from py_dpm.Utils.tokens import DPM, FILTER, GET, IF, RENAME, STANDARD, SUB, TIME_SHIFT, WHERE
21
- from py_dpm.data_handlers import filter_all_data
22
- from py_dpm.semantics.Symbols import ConstantOperand, FactComponent, KeyComponent, RecordSet, \
23
- Scalar, \
24
- ScalarSet, Structure
6
+ from py_dpm.dpm_xl.ast.nodes import (
7
+ AggregationOp,
8
+ BinOp,
9
+ ComplexNumericOp,
10
+ CondExpr,
11
+ Constant,
12
+ Dimension,
13
+ FilterOp,
14
+ GetOp,
15
+ OperationRef,
16
+ ParExpr,
17
+ PersistentAssignment,
18
+ PreconditionItem,
19
+ PropertyReference,
20
+ RenameOp,
21
+ Scalar as ScalarNode,
22
+ Set,
23
+ Start,
24
+ SubOp,
25
+ TemporaryAssignment,
26
+ TimeShiftOp,
27
+ UnaryOp,
28
+ VarID,
29
+ VarRef,
30
+ WhereClauseOp,
31
+ WithExpression,
32
+ )
33
+ from py_dpm.dpm_xl.ast.template import ASTTemplate
34
+ from py_dpm.dpm_xl.types.scalar import Item, Mixed, Null, ScalarFactory
35
+ from py_dpm.dpm_xl.types.promotion import binary_implicit_type_promotion
36
+ from py_dpm.exceptions import exceptions
37
+ from py_dpm.exceptions.exceptions import SemanticError
38
+ from py_dpm.dpm_xl.utils.operands_mapping import set_operand_label
39
+ from py_dpm.dpm_xl.utils.operator_mapping import (
40
+ AGGR_OP_MAPPING,
41
+ BIN_OP_MAPPING,
42
+ CLAUSE_OP_MAPPING,
43
+ COMPLEX_OP_MAPPING,
44
+ CONDITIONAL_OP_MAPPING,
45
+ TIME_OPERATORS,
46
+ UNARY_OP_MAPPING,
47
+ )
48
+ from py_dpm.dpm_xl.utils.tokens import (
49
+ DPM,
50
+ FILTER,
51
+ GET,
52
+ IF,
53
+ RENAME,
54
+ STANDARD,
55
+ SUB,
56
+ TIME_SHIFT,
57
+ WHERE,
58
+ )
59
+ from py_dpm.dpm_xl.utils.data_handlers import filter_all_data
60
+ from py_dpm.dpm_xl.symbols import (
61
+ ConstantOperand,
62
+ FactComponent,
63
+ KeyComponent,
64
+ RecordSet,
65
+ Scalar,
66
+ ScalarSet,
67
+ Structure,
68
+ )
25
69
 
26
70
 
27
71
  class InputAnalyzer(ASTTemplate, ABC):
@@ -37,7 +81,9 @@ class InputAnalyzer(ASTTemplate, ABC):
37
81
 
38
82
  self.calculations_outputs = {}
39
83
 
40
- self.global_variables = {'refPeriod': ScalarFactory().database_types_mapping('d')()}
84
+ self.global_variables = {
85
+ "refPeriod": ScalarFactory().database_types_mapping("d")()
86
+ }
41
87
 
42
88
  # Start of visiting nodes.
43
89
 
@@ -48,12 +94,18 @@ class InputAnalyzer(ASTTemplate, ABC):
48
94
  result_symbol = self.visit(child)
49
95
 
50
96
  if self.preconditions:
51
- if isinstance(result_symbol, Scalar) and not result_symbol.type.strictly_same_class(ScalarFactory().scalar_factory("Boolean")):
97
+ if isinstance(
98
+ result_symbol, Scalar
99
+ ) and not result_symbol.type.strictly_same_class(
100
+ ScalarFactory().scalar_factory("Boolean")
101
+ ):
52
102
  raise exceptions.SemanticError("2-1")
53
103
  elif isinstance(result_symbol, RecordSet):
54
104
  if (not result_symbol.has_only_global_components) or (
55
105
  not result_symbol.get_fact_component().type.strictly_same_class(
56
- ScalarFactory().scalar_factory("Boolean"))):
106
+ ScalarFactory().scalar_factory("Boolean")
107
+ )
108
+ ):
57
109
  raise exceptions.SemanticError("2-1")
58
110
 
59
111
  if len(node.children) == 1:
@@ -92,7 +144,9 @@ class InputAnalyzer(ASTTemplate, ABC):
92
144
  condition_symbol = self.visit(node.condition)
93
145
  then_symbol = self.visit(node.then_expr)
94
146
  else_symbol = None if node.else_expr is None else self.visit(node.else_expr)
95
- result = CONDITIONAL_OP_MAPPING[IF].validate(condition_symbol, then_symbol, else_symbol)
147
+ result = CONDITIONAL_OP_MAPPING[IF].validate(
148
+ condition_symbol, then_symbol, else_symbol
149
+ )
96
150
  return result
97
151
 
98
152
  def visit_VarRef(self, node: VarRef):
@@ -109,48 +163,63 @@ class InputAnalyzer(ASTTemplate, ABC):
109
163
  try:
110
164
  binary_implicit_type_promotion(default_type, type_)
111
165
  except SemanticError:
112
- raise exceptions.SemanticError("3-6", expected_type=type_, default_type=default_type)
113
-
166
+ raise exceptions.SemanticError(
167
+ "3-6", expected_type=type_, default_type=default_type
168
+ )
114
169
 
115
170
  def visit_VarID(self, node: VarID):
116
171
 
117
- self.__check_default_value(node.default, getattr(node, 'type'))
172
+ self.__check_default_value(node.default, getattr(node, "type"))
118
173
 
119
174
  # filter by table_code
120
175
  df = filter_all_data(self.data, node.table, node.rows, node.cols, node.sheets)
121
176
 
122
177
  scalar_factory = ScalarFactory()
123
178
  interval = getattr(node, "interval", None)
124
- data_types = df["data_type"].apply(lambda x: scalar_factory.from_database_to_scalar_types(x, interval))
125
- df['data_type'] = data_types
179
+ data_types = df["data_type"].apply(
180
+ lambda x: scalar_factory.from_database_to_scalar_types(x, interval)
181
+ )
182
+ df["data_type"] = data_types
126
183
 
127
184
  label = getattr(node, "label", None)
128
185
  components = []
129
186
  if self.key_components and node.table in self.key_components:
130
187
  dpm_keys = self.key_components[node.table]
131
188
  if len(dpm_keys) > 0:
132
- for key_name, key_type in zip(dpm_keys['property_code'], dpm_keys['data_type']):
189
+ for key_name, key_type in zip(
190
+ dpm_keys["property_code"], dpm_keys["data_type"]
191
+ ):
133
192
  if not key_type:
134
193
  type_ = Item()
135
194
  else:
136
195
  type_ = ScalarFactory().database_types_mapping(key_type)()
137
- components.append(KeyComponent(name=key_name, type_=type_, subtype=DPM, parent=label))
196
+ components.append(
197
+ KeyComponent(
198
+ name=key_name, type_=type_, subtype=DPM, parent=label
199
+ )
200
+ )
138
201
 
139
202
  standard_keys = []
140
- self._check_standard_key(standard_keys, df['row_code'], 'r', label)
141
- self._check_standard_key(standard_keys, df['column_code'], 'c', label)
142
- self._check_standard_key(standard_keys, df['sheet_code'], 's', label)
203
+ self._check_standard_key(standard_keys, df["row_code"], "r", label)
204
+ self._check_standard_key(standard_keys, df["column_code"], "c", label)
205
+ self._check_standard_key(standard_keys, df["sheet_code"], "s", label)
143
206
 
144
207
  if len(self.global_variables):
145
208
  for var_name, var_type in self.global_variables.items():
146
- var_component = KeyComponent(name=var_name, type_=var_type, subtype=DPM, parent=label, is_global=True)
209
+ var_component = KeyComponent(
210
+ name=var_name,
211
+ type_=var_type,
212
+ subtype=DPM,
213
+ parent=label,
214
+ is_global=True,
215
+ )
147
216
  components.append(var_component)
148
217
 
149
218
  components.extend(standard_keys)
150
219
  if len(components) == 0:
151
220
  set_operand_label(label=label, operand=node)
152
- return Scalar(type_=getattr(node, 'type'), name=label, origin=label)
153
- fact_component = FactComponent(type_=getattr(node, 'type'), parent=label)
221
+ return Scalar(type_=getattr(node, "type"), name=label, origin=label)
222
+ fact_component = FactComponent(type_=getattr(node, "type"), parent=label)
154
223
 
155
224
  components.append(fact_component)
156
225
  structure = Structure(components)
@@ -161,16 +230,16 @@ class InputAnalyzer(ASTTemplate, ABC):
161
230
  if len(standard_keys) > 0:
162
231
  for key in standard_keys:
163
232
  standard_key_names.append(key.name)
164
- if key.name == 'r':
165
- records.append(df['row_code'])
166
- elif key.name == 'c':
167
- records.append(df['column_code'])
168
- elif key.name == 's':
169
- records.append(df['sheet_code'])
233
+ if key.name == "r":
234
+ records.append(df["row_code"])
235
+ elif key.name == "c":
236
+ records.append(df["column_code"])
237
+ elif key.name == "s":
238
+ records.append(df["sheet_code"])
170
239
 
171
240
  df_records = pd.concat(records, axis=1)
172
241
  df_records.columns = standard_key_names
173
- df_records['data_type'] = df['data_type']
242
+ df_records["data_type"] = df["data_type"]
174
243
 
175
244
  # Check for duplicate keys, but only among non-NULL combinations
176
245
  # NULL values can repeat without being considered duplicates
@@ -179,32 +248,47 @@ class InputAnalyzer(ASTTemplate, ABC):
179
248
  df_non_null_keys = df_records[~mask_all_null]
180
249
 
181
250
  if len(df_non_null_keys) > 0:
182
- repeated_identifiers = df_non_null_keys[df_non_null_keys[standard_key_names].duplicated(keep=False)]
251
+ repeated_identifiers = df_non_null_keys[
252
+ df_non_null_keys[standard_key_names].duplicated(keep=False)
253
+ ]
183
254
  # Further filter: only report duplicates where NO key is NULL (fully specified duplicates)
184
- mask_has_null = repeated_identifiers[standard_key_names].isnull().any(axis=1)
255
+ mask_has_null = (
256
+ repeated_identifiers[standard_key_names].isnull().any(axis=1)
257
+ )
185
258
  fully_specified_duplicates = repeated_identifiers[~mask_has_null]
186
259
 
187
260
  if len(fully_specified_duplicates) > 0:
188
261
  repeated_values = ""
189
262
  for value in fully_specified_duplicates.values:
190
- repeated_values = ', '.join([repeated_values, str(value)]) if repeated_values else str(value)
191
- raise exceptions.SemanticError("2-6", name=getattr(node, 'label', None), keys=standard_key_names,
192
- values=repeated_values)
263
+ repeated_values = (
264
+ ", ".join([repeated_values, str(value)])
265
+ if repeated_values
266
+ else str(value)
267
+ )
268
+ raise exceptions.SemanticError(
269
+ "2-6",
270
+ name=getattr(node, "label", None),
271
+ keys=standard_key_names,
272
+ values=repeated_values,
273
+ )
193
274
 
194
275
  recordset.records = df_records
195
276
 
196
-
197
277
  return recordset
198
278
 
199
279
  @staticmethod
200
280
  def _check_standard_key(key_components, elements, name, parent):
201
281
  if len(elements) > 1 and len(elements.unique()) > 1:
202
- key_component = KeyComponent(name=name, type_=Null(), subtype=STANDARD, parent=parent)
282
+ key_component = KeyComponent(
283
+ name=name, type_=Null(), subtype=STANDARD, parent=parent
284
+ )
203
285
  key_components.append(key_component)
204
286
 
205
287
  def visit_Constant(self, node: Constant):
206
288
  constant_type = ScalarFactory().scalar_factory(code=node.type)
207
- return ConstantOperand(type_=constant_type, name=None, origin=node.value, value=node.value)
289
+ return ConstantOperand(
290
+ type_=constant_type, name=None, origin=node.value, value=node.value
291
+ )
208
292
 
209
293
  def visit_AggregationOp(self, node: AggregationOp):
210
294
  operand = self.visit(node.operand)
@@ -213,27 +297,31 @@ class InputAnalyzer(ASTTemplate, ABC):
213
297
 
214
298
  if operand.has_only_global_components:
215
299
  warnings.warn(
216
- f"Performing an aggregation on recordset: {operand.name} which has only global key components")
300
+ f"Performing an aggregation on recordset: {operand.name} which has only global key components"
301
+ )
217
302
 
218
303
  grouping_clause = None
219
304
  if node.grouping_clause:
220
305
  grouping_clause = node.grouping_clause.components
221
306
 
222
307
  if isinstance(operand.get_fact_component().type, Mixed):
223
- origin_expression =AGGR_OP_MAPPING[node.op].generate_origin_expression(operand, grouping_clause)
308
+ origin_expression = AGGR_OP_MAPPING[node.op].generate_origin_expression(
309
+ operand, grouping_clause
310
+ )
224
311
  raise exceptions.SemanticError("4-4-0-3", origin=origin_expression)
225
312
 
226
313
  result = AGGR_OP_MAPPING[node.op].validate(operand, grouping_clause)
227
314
  return result
228
315
 
229
316
  def visit_Dimension(self, node: Dimension):
230
- dimension_data = self.open_keys[self.open_keys['property_code'] == node.dimension_code].reset_index(
231
- drop=True)
232
- if dimension_data['data_type'][0] is not None:
233
- type_code = dimension_data['data_type'][0]
317
+ dimension_data = self.open_keys[
318
+ self.open_keys["property_code"] == node.dimension_code
319
+ ].reset_index(drop=True)
320
+ if dimension_data["data_type"][0] is not None:
321
+ type_code = dimension_data["data_type"][0]
234
322
  type_ = ScalarFactory().database_types_mapping(code=type_code)()
235
323
  else:
236
- type_ = ScalarFactory().scalar_factory(code='Item')
324
+ type_ = ScalarFactory().scalar_factory(code="Item")
237
325
  return Scalar(type_=type_, name=None, origin=node.dimension_code)
238
326
 
239
327
  def visit_Set(self, node: Set):
@@ -241,15 +329,15 @@ class InputAnalyzer(ASTTemplate, ABC):
241
329
  if isinstance(node.children[0], Constant):
242
330
  types = {child.type for child in node.children}
243
331
  if len(types) > 1:
244
- raise exceptions.SemanticError('11', types=', '.join(types))
332
+ raise exceptions.SemanticError("11", types=", ".join(types))
245
333
  common_type_code = types.pop()
246
334
  origin_elements = [str(child.value) for child in node.children]
247
335
  else:
248
- common_type_code = 'Item'
336
+ common_type_code = "Item"
249
337
  origin_elements = ["[" + child.item + "]" for child in node.children]
250
338
  common_type = ScalarFactory().scalar_factory(common_type_code)
251
- origin = ', '.join(origin_elements)
252
- origin = "{" + origin + '}'
339
+ origin = ", ".join(origin_elements)
340
+ origin = "{" + origin + "}"
253
341
 
254
342
  return ScalarSet(type_=common_type, name=None, origin=origin)
255
343
 
@@ -276,8 +364,12 @@ class InputAnalyzer(ASTTemplate, ABC):
276
364
  operand = self.visit(node.operand)
277
365
  if not isinstance(operand, (RecordSet, Scalar, ConstantOperand)):
278
366
  raise exceptions.SemanticError("4-7-1", op=TIME_SHIFT)
279
- result = TIME_OPERATORS[TIME_SHIFT].validate(operand=operand, component_name=node.component,
280
- period=node.period_indicator, shift_number=node.shift_number)
367
+ result = TIME_OPERATORS[TIME_SHIFT].validate(
368
+ operand=operand,
369
+ component_name=node.component,
370
+ period=node.period_indicator,
371
+ shift_number=node.shift_number,
372
+ )
281
373
  return result
282
374
 
283
375
  def visit_WhereClauseOp(self, node: WhereClauseOp):
@@ -288,8 +380,12 @@ class InputAnalyzer(ASTTemplate, ABC):
288
380
  raise exceptions.SemanticError("4-5-2-1", recordset=operand.name)
289
381
 
290
382
  condition = self.visit(node.condition)
291
- result = CLAUSE_OP_MAPPING[WHERE].validate(operand=operand, key_names=node.key_components, new_names=None,
292
- condition=condition)
383
+ result = CLAUSE_OP_MAPPING[WHERE].validate(
384
+ operand=operand,
385
+ key_names=node.key_components,
386
+ new_names=None,
387
+ condition=condition,
388
+ )
293
389
  return result
294
390
 
295
391
  def visit_RenameOp(self, node: RenameOp):
@@ -299,7 +395,9 @@ class InputAnalyzer(ASTTemplate, ABC):
299
395
  for rename_node in node.rename_nodes:
300
396
  names.append(rename_node.old_name)
301
397
  new_names.append(rename_node.new_name)
302
- result = CLAUSE_OP_MAPPING[RENAME].validate(operand=operand, key_names=names, new_names=new_names)
398
+ result = CLAUSE_OP_MAPPING[RENAME].validate(
399
+ operand=operand, key_names=names, new_names=new_names
400
+ )
303
401
  return result
304
402
 
305
403
  def visit_GetOp(self, node: GetOp):
@@ -311,7 +409,9 @@ class InputAnalyzer(ASTTemplate, ABC):
311
409
  def visit_SubOp(self, node: SubOp):
312
410
  operand = self.visit(node.operand)
313
411
  value = self.visit(node.value)
314
- result = CLAUSE_OP_MAPPING[SUB].validate(operand=operand, property_code=node.property_code, value=value)
412
+ result = CLAUSE_OP_MAPPING[SUB].validate(
413
+ operand=operand, property_code=node.property_code, value=value
414
+ )
315
415
  return result
316
416
 
317
417
  def visit_PreconditionItem(self, node: PreconditionItem) -> Scalar:
@@ -1,8 +1,8 @@
1
1
  from typing import List, Union
2
2
 
3
- from py_dpm.DataTypes.ScalarTypes import ScalarFactory
4
- from py_dpm.Exceptions.exceptions import SemanticError
5
- from py_dpm.Utils.tokens import DPM, STANDARD
3
+ from py_dpm.dpm_xl.types.scalar import ScalarFactory
4
+ from py_dpm.exceptions.exceptions import SemanticError
5
+ from py_dpm.dpm_xl.utils.tokens import DPM, STANDARD
6
6
 
7
7
 
8
8
  class Operand:
@@ -0,0 +1,13 @@
1
+ """
2
+ DPM-XL Type System
3
+
4
+ Type definitions, type checking, and type promotion rules for DPM-XL expressions.
5
+ """
6
+
7
+ from py_dpm.dpm_xl.types.scalar import *
8
+ from py_dpm.dpm_xl.types.time import *
9
+ from py_dpm.dpm_xl.types.promotion import *
10
+
11
+ __all__ = [
12
+ # Re-export will be handled by import *
13
+ ]
@@ -1,8 +1,8 @@
1
1
  import warnings
2
2
 
3
- from py_dpm.DataTypes.ScalarTypes import Boolean, Date, Duration, Integer, Item, Mixed, Null, Number, \
3
+ from py_dpm.dpm_xl.types.scalar import Boolean, Date, Duration, Integer, Item, Mixed, Null, Number, \
4
4
  ScalarType, String, Subcategory, TimeInterval, TimePeriod
5
- from py_dpm.Exceptions.exceptions import SemanticError
5
+ from py_dpm.exceptions.exceptions import SemanticError
6
6
 
7
7
  implicit_type_promotion_dict = {
8
8
  String: {String},
@@ -1,8 +1,8 @@
1
1
  import numpy as np
2
2
  import pandas as pd
3
3
 
4
- from py_dpm.DataTypes.TimeClasses import timeParser, timePeriodParser
5
- from py_dpm.Exceptions.exceptions import DataTypeError, SemanticError
4
+ from py_dpm.dpm_xl.types.time import timeParser, timePeriodParser
5
+ from py_dpm.exceptions.exceptions import DataTypeError, SemanticError
6
6
 
7
7
 
8
8
  class ScalarType:
@@ -0,0 +1,14 @@
1
+ """
2
+ DPM-XL Utilities
3
+
4
+ Utility functions and mappings for DPM-XL processing.
5
+ """
6
+
7
+ from py_dpm.dpm_xl.utils.operands_mapping import *
8
+ from py_dpm.dpm_xl.utils.operator_mapping import *
9
+ from py_dpm.dpm_xl.utils.tokens import *
10
+ from py_dpm.dpm_xl.utils.serialization import *
11
+
12
+ __all__ = [
13
+ # Re-export will be handled by import *
14
+ ]
@@ -1,7 +1,7 @@
1
1
  import pandas as pd
2
2
 
3
- from py_dpm.Exceptions.exceptions import SemanticError
4
- from py_dpm.Utils.tokens import *
3
+ from py_dpm.exceptions.exceptions import SemanticError
4
+ from py_dpm.dpm_xl.utils.tokens import *
5
5
 
6
6
 
7
7
  def filter_data_by_cell_element(series, cell_elements, element_name, table_code):
@@ -2,7 +2,7 @@ import itertools
2
2
  import string
3
3
  from typing import Union
4
4
 
5
- from py_dpm.AST.ASTObjects import PreconditionItem, VarID
5
+ from py_dpm.dpm_xl.ast.nodes import PreconditionItem, VarID
6
6
 
7
7
 
8
8
  class LabelHandler:
@@ -1,13 +1,13 @@
1
- from py_dpm.Operators.AggregateOperators import Avg, Count, MaxAggr, Median, MinAggr, Sum
2
- from py_dpm.Operators.BooleanOperators import And, Not, Or, Xor
3
- from py_dpm.Operators.ClauseOperators import Get, Rename, Sub, Where
4
- from py_dpm.Operators.ComparisonOperators import Equal, Greater, GreaterEqual, In, IsNull, Less, LessEqual, Match, NotEqual
5
- from py_dpm.Operators.ConditionalOperators import Filter, IfOperator, Nvl
6
- from py_dpm.Operators.NumericOperators import AbsoluteValue, BinMinus, BinPlus, Div, Exponential, Logarithm, \
1
+ from py_dpm.dpm_xl.operators.aggregate import Avg, Count, MaxAggr, Median, MinAggr, Sum
2
+ from py_dpm.dpm_xl.operators.boolean import And, Not, Or, Xor
3
+ from py_dpm.dpm_xl.operators.clause import Get, Rename, Sub, Where
4
+ from py_dpm.dpm_xl.operators.comparison import Equal, Greater, GreaterEqual, In, IsNull, Less, LessEqual, Match, NotEqual
5
+ from py_dpm.dpm_xl.operators.conditional import Filter, IfOperator, Nvl
6
+ from py_dpm.dpm_xl.operators.arithmetic import AbsoluteValue, BinMinus, BinPlus, Div, Exponential, Logarithm, \
7
7
  Max, Min, Mult, NaturalLogarithm, \
8
8
  Power, SquareRoot, UnMinus, UnPlus
9
- from py_dpm.Operators.StringOperators import Concatenate, Len
10
- from py_dpm.Operators.TimeOperators import TimeShift
9
+ from py_dpm.dpm_xl.operators.string import Concatenate, Len
10
+ from py_dpm.dpm_xl.operators.time import TimeShift
11
11
  from .tokens import *
12
12
 
13
13
  BIN_OP_MAPPING = {