pydpm_xl 0.1.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. py_dpm/AST/ASTConstructor.py +503 -0
  2. py_dpm/AST/ASTObjects.py +827 -0
  3. py_dpm/AST/ASTTemplate.py +101 -0
  4. py_dpm/AST/ASTVisitor.py +13 -0
  5. py_dpm/AST/MLGeneration.py +588 -0
  6. py_dpm/AST/ModuleAnalyzer.py +79 -0
  7. py_dpm/AST/ModuleDependencies.py +203 -0
  8. py_dpm/AST/WhereClauseChecker.py +12 -0
  9. py_dpm/AST/__init__.py +0 -0
  10. py_dpm/AST/check_operands.py +302 -0
  11. py_dpm/DataTypes/ScalarTypes.py +324 -0
  12. py_dpm/DataTypes/TimeClasses.py +370 -0
  13. py_dpm/DataTypes/TypePromotion.py +195 -0
  14. py_dpm/DataTypes/__init__.py +0 -0
  15. py_dpm/Exceptions/__init__.py +0 -0
  16. py_dpm/Exceptions/exceptions.py +84 -0
  17. py_dpm/Exceptions/messages.py +114 -0
  18. py_dpm/OperationScopes/OperationScopeService.py +247 -0
  19. py_dpm/OperationScopes/__init__.py +0 -0
  20. py_dpm/Operators/AggregateOperators.py +138 -0
  21. py_dpm/Operators/BooleanOperators.py +30 -0
  22. py_dpm/Operators/ClauseOperators.py +159 -0
  23. py_dpm/Operators/ComparisonOperators.py +69 -0
  24. py_dpm/Operators/ConditionalOperators.py +362 -0
  25. py_dpm/Operators/NumericOperators.py +101 -0
  26. py_dpm/Operators/Operator.py +388 -0
  27. py_dpm/Operators/StringOperators.py +27 -0
  28. py_dpm/Operators/TimeOperators.py +53 -0
  29. py_dpm/Operators/__init__.py +0 -0
  30. py_dpm/Utils/ValidationsGenerationUtils.py +429 -0
  31. py_dpm/Utils/__init__.py +0 -0
  32. py_dpm/Utils/operands_mapping.py +73 -0
  33. py_dpm/Utils/operator_mapping.py +89 -0
  34. py_dpm/Utils/tokens.py +172 -0
  35. py_dpm/Utils/utils.py +2 -0
  36. py_dpm/ValidationsGeneration/PropertiesConstraintsProcessor.py +190 -0
  37. py_dpm/ValidationsGeneration/Utils.py +364 -0
  38. py_dpm/ValidationsGeneration/VariantsProcessor.py +265 -0
  39. py_dpm/ValidationsGeneration/__init__.py +0 -0
  40. py_dpm/ValidationsGeneration/auxiliary_functions.py +98 -0
  41. py_dpm/__init__.py +61 -0
  42. py_dpm/api/__init__.py +140 -0
  43. py_dpm/api/ast_generator.py +438 -0
  44. py_dpm/api/complete_ast.py +241 -0
  45. py_dpm/api/data_dictionary_validation.py +577 -0
  46. py_dpm/api/migration.py +77 -0
  47. py_dpm/api/semantic.py +224 -0
  48. py_dpm/api/syntax.py +182 -0
  49. py_dpm/client.py +106 -0
  50. py_dpm/data_handlers.py +99 -0
  51. py_dpm/db_utils.py +117 -0
  52. py_dpm/grammar/__init__.py +0 -0
  53. py_dpm/grammar/dist/__init__.py +0 -0
  54. py_dpm/grammar/dist/dpm_xlLexer.interp +428 -0
  55. py_dpm/grammar/dist/dpm_xlLexer.py +804 -0
  56. py_dpm/grammar/dist/dpm_xlLexer.tokens +106 -0
  57. py_dpm/grammar/dist/dpm_xlParser.interp +249 -0
  58. py_dpm/grammar/dist/dpm_xlParser.py +5224 -0
  59. py_dpm/grammar/dist/dpm_xlParser.tokens +106 -0
  60. py_dpm/grammar/dist/dpm_xlParserListener.py +742 -0
  61. py_dpm/grammar/dist/dpm_xlParserVisitor.py +419 -0
  62. py_dpm/grammar/dist/listeners.py +10 -0
  63. py_dpm/grammar/dpm_xlLexer.g4 +435 -0
  64. py_dpm/grammar/dpm_xlParser.g4 +260 -0
  65. py_dpm/migration.py +282 -0
  66. py_dpm/models.py +2139 -0
  67. py_dpm/semantics/DAG/DAGAnalyzer.py +158 -0
  68. py_dpm/semantics/DAG/__init__.py +0 -0
  69. py_dpm/semantics/SemanticAnalyzer.py +320 -0
  70. py_dpm/semantics/Symbols.py +223 -0
  71. py_dpm/semantics/__init__.py +0 -0
  72. py_dpm/utils/__init__.py +0 -0
  73. py_dpm/utils/ast_serialization.py +481 -0
  74. py_dpm/views/data_types.sql +12 -0
  75. py_dpm/views/datapoints.sql +65 -0
  76. py_dpm/views/hierarchy_operand_reference.sql +11 -0
  77. py_dpm/views/hierarchy_preconditions.sql +13 -0
  78. py_dpm/views/hierarchy_variables.sql +26 -0
  79. py_dpm/views/hierarchy_variables_context.sql +14 -0
  80. py_dpm/views/key_components.sql +18 -0
  81. py_dpm/views/module_from_table.sql +11 -0
  82. py_dpm/views/open_keys.sql +13 -0
  83. py_dpm/views/operation_info.sql +27 -0
  84. py_dpm/views/operation_list.sql +18 -0
  85. py_dpm/views/operations_versions_from_module_version.sql +30 -0
  86. py_dpm/views/precondition_info.sql +17 -0
  87. py_dpm/views/report_type_operand_reference_info.sql +18 -0
  88. py_dpm/views/subcategory_info.sql +17 -0
  89. py_dpm/views/table_info.sql +19 -0
  90. pydpm_xl-0.1.10.dist-info/LICENSE +674 -0
  91. pydpm_xl-0.1.10.dist-info/METADATA +50 -0
  92. pydpm_xl-0.1.10.dist-info/RECORD +94 -0
  93. pydpm_xl-0.1.10.dist-info/WHEEL +4 -0
  94. pydpm_xl-0.1.10.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,388 @@
1
+ import warnings
2
+ from typing import Union
3
+
4
+ import pandas as pd
5
+
6
+ from py_dpm.DataTypes.ScalarTypes import Mixed, Number, ScalarFactory
7
+ from py_dpm.DataTypes.TypePromotion import binary_implicit_type_promotion, binary_implicit_type_promotion_with_mixed_types, \
8
+ check_operator, \
9
+ unary_implicit_type_promotion, unary_implicit_type_promotion_with_mixed_types
10
+ from py_dpm.Exceptions.exceptions import SemanticError
11
+ from py_dpm.Utils.operands_mapping import generate_new_label, set_operand_label
12
+ from py_dpm.semantics.Symbols import ConstantOperand, FactComponent, RecordSet, Scalar, ScalarSet, Structure
13
+
14
+
15
+ class Operator:
16
+ """
17
+ Superclass for all operators. Defines the flags and methods to be used to create the scalars and recordsets.
18
+
19
+ Attributes:
20
+ op: Token used to represent the operator
21
+ py_op: Python function to calculate the value (if necessary)
22
+ type_to_check: Data type to be checked to comply with the operator specification
23
+ do_not_check_with_return_type: On Comparison operators, we enforce only that operands involved have the same data type
24
+ return_type: Data type to be checked. Operands must comply with this Data type by having it or by applying the Type Promotion.
25
+ """
26
+ op = None
27
+ py_op = None
28
+ type_to_check = None
29
+ do_not_check_with_return_type = False
30
+ return_type = None
31
+ propagate_attributes = False
32
+
33
+ @staticmethod
34
+ def _create_labeled_scalar(origin, result_type) -> Scalar:
35
+ new_label = generate_new_label()
36
+
37
+ interval = getattr(result_type, "interval", None)
38
+ scalar = Scalar(type_=ScalarFactory().scalar_factory(str(result_type), interval), name=new_label,
39
+ origin=origin)
40
+ set_operand_label(new_label, origin)
41
+ return scalar
42
+
43
+ @classmethod
44
+ def _create_labeled_recordset(cls, origin, rslt_type, rslt_structure, result_dataframe=None) -> RecordSet:
45
+ new_recordset_label = generate_new_label()
46
+ fact_component = FactComponent(type_=rslt_type, parent=new_recordset_label)
47
+ rslt_structure.components["f"] = fact_component
48
+ if not cls.propagate_attributes:
49
+ rslt_structure.remove_attributes()
50
+ rslt_structure.replace_components_parent(new_recordset_label)
51
+ recordset = RecordSet(structure=rslt_structure, name=new_recordset_label, origin=origin)
52
+ recordset.records = result_dataframe
53
+ set_operand_label(label=new_recordset_label, operand=origin)
54
+ return recordset
55
+
56
+ @classmethod
57
+ def check_operator_well_defined(cls):
58
+ """
59
+ """
60
+ return_type = cls.return_type if cls.return_type is None else ScalarFactory().scalar_factory(cls.return_type.__class__.__name__)
61
+ op_check_type = cls.type_to_check if cls.type_to_check is None else ScalarFactory().scalar_factory(
62
+ cls.type_to_check.__class__.__name__)
63
+ well_defined = check_operator(return_type=return_type, op_check_type=op_check_type)
64
+ if not well_defined:
65
+ raise Exception("Review this operator {} ".format(cls.op))
66
+
67
+
68
+ class Binary(Operator):
69
+ op = None
70
+ py_op = None
71
+ type_to_check = None
72
+ do_not_check_with_return_type = False
73
+ return_type = None
74
+
75
+ @classmethod
76
+ def create_origin_expression(cls, first_operand, second_operand) -> str:
77
+ first_operand_origin = getattr(first_operand, 'origin')
78
+ second_operand_origin = getattr(second_operand, 'origin')
79
+ origin = f"{first_operand_origin} {cls.op} {second_operand_origin}"
80
+ return origin
81
+
82
+ @classmethod
83
+ def create_labeled_scalar(cls, first_operand: Union[Scalar, ConstantOperand], second_operand: Union[Scalar, ConstantOperand],
84
+ result_type=None):
85
+ """
86
+ """
87
+ origin: str = cls.create_origin_expression(first_operand, second_operand)
88
+ if isinstance(first_operand, ConstantOperand) and isinstance(second_operand, ConstantOperand):
89
+ value = cls.py_op(first_operand.value, second_operand.value)
90
+ return ConstantOperand(type_=ScalarFactory().scalar_factory(str(result_type)), name=value, origin=origin, value=value)
91
+
92
+ scalar = cls._create_labeled_scalar(origin=origin, result_type=result_type)
93
+ return scalar
94
+
95
+ @classmethod
96
+ def create_labeled_recordset(cls, first_operand, second_operand, rslt_structure, rslt_type, result_dataframe=None):
97
+ origin: str = cls.create_origin_expression(first_operand, second_operand)
98
+ recordset = cls._create_labeled_recordset(origin=origin, rslt_type=rslt_type, rslt_structure=rslt_structure,
99
+ result_dataframe=result_dataframe)
100
+ return recordset
101
+
102
+ @classmethod
103
+ def create_labeled_precondition(cls, first_operand, second_operand):
104
+ value = cls.py_op(first_operand.value, second_operand.value)
105
+ origin: str = cls.create_origin_expression(first_operand, second_operand)
106
+ precondition = cls._create_labeled_precondition(origin=origin, value=value)
107
+ return precondition
108
+
109
+ @classmethod
110
+ def types_given_structures(cls, left, right):
111
+
112
+ op_type_to_check = None if not cls.type_to_check else ScalarFactory().scalar_factory(cls.type_to_check.__name__)
113
+
114
+ if isinstance(left, Scalar) and isinstance(right, Scalar):
115
+
116
+ return left.type, right.type, op_type_to_check
117
+
118
+ elif isinstance(left, RecordSet) and isinstance(right, RecordSet):
119
+ fact_left_component = left.get_fact_component()
120
+ fact_right_component = right.get_fact_component()
121
+
122
+ return fact_left_component.type, fact_right_component.type, op_type_to_check
123
+
124
+ elif (isinstance(left, RecordSet) and isinstance(right, Scalar)) or (
125
+ isinstance(left, Scalar) and isinstance(right, RecordSet)):
126
+ fact_component = left.get_fact_component() if isinstance(left, RecordSet) else right.get_fact_component()
127
+ scalar = right if isinstance(left, RecordSet) else left
128
+
129
+ return fact_component.type, scalar.type, op_type_to_check
130
+
131
+ elif isinstance(left, RecordSet) and isinstance(right, ScalarSet):
132
+ fact_component = left.get_fact_component()
133
+ return fact_component.type, right.type, op_type_to_check
134
+ elif isinstance(left, Scalar) and isinstance(right, ScalarSet):
135
+ return left.type, right.type, op_type_to_check
136
+ else:
137
+ raise NotImplementedError
138
+
139
+ @classmethod
140
+ def validate_types(cls, left, right, result_dataframe):
141
+ """
142
+ Here the Structures has been validated
143
+ """
144
+ cls.check_operator_well_defined()
145
+ left_type, right_type, op_type_to_check = cls.types_given_structures(left, right)
146
+ return_type = None if not cls.return_type else ScalarFactory().scalar_factory(cls.return_type.__name__)
147
+ error_info = {
148
+ 'left_name': left.name if left.name is not None else left.origin,
149
+ 'right_name': right.name if right.name is not None else right.origin,
150
+ 'op': cls.op
151
+ }
152
+ interval_allowed = getattr(cls, "interval_allowed", False)
153
+ if isinstance(left_type, Mixed) or isinstance(right_type, Mixed):
154
+ final_type, result_dataframe = binary_implicit_type_promotion_with_mixed_types(
155
+ result_dataframe=result_dataframe, left_type=left_type,
156
+ right_type=right_type, op_type_to_check=op_type_to_check,
157
+ return_type=return_type,
158
+ interval_allowed=interval_allowed,
159
+ error_info=error_info)
160
+ else:
161
+ final_type = binary_implicit_type_promotion(
162
+ left_type, right_type, op_type_to_check, return_type,
163
+ interval_allowed=interval_allowed, error_info=error_info
164
+ )
165
+ if result_dataframe is not None:
166
+ if 'data_type_left' in result_dataframe.columns:
167
+ result_dataframe = result_dataframe.drop(columns=['data_type_left', 'data_type_right'])
168
+ result_dataframe = result_dataframe.assign(data_type=final_type)
169
+
170
+ return final_type, result_dataframe
171
+
172
+ @classmethod
173
+ def validate_structures(cls, left, right):
174
+ """
175
+ """
176
+ if isinstance(left, RecordSet) and isinstance(right, RecordSet):
177
+ result_dataframe = None
178
+ # structure
179
+ origin = cls.create_origin_expression(left, right)
180
+ result_structure = cls._check_structures(left.structure, right.structure, origin)
181
+ if left.records is not None and right.records is not None:
182
+ if len(left.records) != len(right.records) and len(left.records) != 0 and len(right.records) != 0:
183
+ raise SemanticError("2-9", op=cls.op)
184
+ if len(left.structure.get_standard_components()) == len(right.structure.get_standard_components()):
185
+ result_dataframe = pd.merge(left=left.records, right=right.records, suffixes=('_left', '_right'),
186
+ on=[col for col in left.records.columns if col != 'data_type'])
187
+ else:
188
+ if len(left.structure.get_standard_components()) == len(result_structure.get_standard_components()):
189
+ result_dataframe = pd.merge(left=left.records, right=right.records,
190
+ suffixes=('_left', '_right'),
191
+ on=[col for col in right.records.columns if col != 'data_type'])
192
+ else:
193
+ result_dataframe = pd.merge(left=right.records, right=left.records,
194
+ suffixes=('_right', '_left'),
195
+ on=[col for col in left.records.columns if col != 'data_type'])
196
+ if len(result_dataframe) == 0:
197
+ raise SemanticError("2-2", op=cls.op, left=left.name, right=right.name)
198
+ if len(result_dataframe) < len(left.records):
199
+ warnings.warn(
200
+ f"There is no correspondence between recordset {left.name} and recordset {right.name}.")
201
+
202
+ return result_structure, result_dataframe
203
+ elif (isinstance(left, RecordSet) and isinstance(right, Scalar)) or (
204
+ isinstance(left, Scalar) and isinstance(right, RecordSet)):
205
+ if isinstance(left, RecordSet):
206
+ return left.structure, left.records
207
+ return right.structure, right.records
208
+ elif isinstance(left, RecordSet) and isinstance(right, ScalarSet):
209
+ return left.structure, left.records
210
+ else:
211
+ return None, None
212
+
213
+ @classmethod
214
+ def check_same_components(cls, left: Structure, right: Structure, origin):
215
+ if set(left.get_key_components_names()) != set(right.get_key_components_names()):
216
+ raise SemanticError("2-3", op=cls.op, structure_1=left.get_key_components_names(),
217
+ structure_2=right.get_key_components_names(), origin=origin)
218
+
219
+ left_dpm_components = left.get_dpm_components()
220
+ right_dpm_components = right.get_dpm_components()
221
+ for comp_key, comp_value in left_dpm_components.items():
222
+ if comp_key not in right_dpm_components.keys():
223
+ raise SemanticError("2-4", op=cls.op, name=comp_key, origin=origin)
224
+ if comp_value.type.__class__ != right_dpm_components[comp_key].type.__class__:
225
+ # We do not do here the implicit cast, they have to have exactly the same types
226
+ raise SemanticError("2-5", op=cls.op, name=comp_key, type_1=comp_value.type,
227
+ type_2=right_dpm_components[comp_key].type, origin=origin)
228
+
229
+ @classmethod
230
+ def _check_structures(cls, left: Structure, right: Structure, origin) -> Structure:
231
+ """
232
+ Used for recordset-recordset
233
+ """
234
+ if len(left.get_key_components()) == len(right.get_key_components()):
235
+ cls.check_same_components(left, right, origin)
236
+ return left
237
+ else:
238
+ is_subset, final_structure = cls.check_is_subset(left, right)
239
+ if is_subset:
240
+ return final_structure
241
+ raise SemanticError("2-3", op=cls.op, structure_1=left.get_key_components_names(),
242
+ structure_2=right.get_key_components_names(), origin=origin)
243
+
244
+ @staticmethod
245
+ def check_is_subset(left: Structure, right: Structure): # -> Tuple[bool, Structure| None]:
246
+ """
247
+ Take two Structures and return a True is one is other's subset and the greatest Structure
248
+ False, None in other case
249
+ """
250
+ left_dpm_components = left.get_dpm_components()
251
+ right_dpm_components = right.get_dpm_components()
252
+ if set(left.get_key_components_names()) <= set(right.get_key_components_names()): # <= is subset
253
+ for comp_key, comp_value in left_dpm_components.items():
254
+ if comp_value.type.__class__ != right_dpm_components[comp_key].type.__class__:
255
+ return False, None
256
+ return True, right
257
+ elif set(right.get_key_components_names()) <= set(left.get_key_components_names()):
258
+ for comp_key, comp_value in right_dpm_components.items():
259
+ if comp_value.type.__class__ != left_dpm_components[comp_key].type.__class__:
260
+ return False, None
261
+ return True, left
262
+ else:
263
+ return False, None
264
+
265
+ @classmethod
266
+ def validate(cls, left, right):
267
+ """
268
+ """
269
+ rslt_structure, result_dataframe = cls.validate_structures(left, right)
270
+ rslt_type, result_dataframe = cls.validate_types(left, right, result_dataframe)
271
+ if isinstance(rslt_structure, Structure):
272
+ recordset = cls.create_labeled_recordset(left, right, rslt_structure, rslt_type, result_dataframe)
273
+ return recordset
274
+ labeled_scalar = cls.create_labeled_scalar(left, right, result_type=rslt_type)
275
+ return labeled_scalar
276
+
277
+
278
+ class Unary(Operator):
279
+ op = None
280
+ py_op = None
281
+ type_to_check = None
282
+ check_specific_type = False
283
+ return_type = None
284
+
285
+ @classmethod
286
+ def create_origin_expression(cls, operand, *args) -> str:
287
+ operand_origin = getattr(operand, 'origin')
288
+ origin = f"{cls.op}({operand_origin})"
289
+ return origin
290
+
291
+ @classmethod
292
+ def create_labeled_scalar(cls, first_operand, result_type):
293
+ """
294
+ """
295
+ origin: str = cls.create_origin_expression(first_operand)
296
+ if isinstance(first_operand, ConstantOperand):
297
+ value = cls.py_op(first_operand.value)
298
+ return ConstantOperand(type_=ScalarFactory().scalar_factory(str(result_type)), name=value, origin=origin, value=value)
299
+
300
+ scalar = cls._create_labeled_scalar(origin=origin, result_type=result_type)
301
+ return scalar
302
+
303
+ @classmethod
304
+ def create_labeled_recordset(cls, first_operand, rslt_structure, rslt_type, result_dataframe=None):
305
+ """
306
+ """
307
+ origin: str = cls.create_origin_expression(first_operand)
308
+ recordset = cls._create_labeled_recordset(origin=origin, rslt_type=rslt_type, rslt_structure=rslt_structure,
309
+ result_dataframe=result_dataframe)
310
+ return recordset
311
+
312
+ @classmethod
313
+ def create_labeled_precondition(cls, operand):
314
+
315
+ value = cls.py_op(operand.value)
316
+ origin: str = cls.create_origin_expression(operand)
317
+
318
+ precondition = cls._create_labeled_precondition(origin=origin, value=value)
319
+ return precondition
320
+
321
+ @classmethod
322
+ def validate_types(cls, operand):
323
+
324
+ # First we check the operator
325
+ cls.check_operator_well_defined()
326
+ return_type = None if not cls.return_type else ScalarFactory().scalar_factory(cls.return_type.__name__)
327
+ op_type_to_check = None if not cls.type_to_check else ScalarFactory().scalar_factory(cls.type_to_check.__name__)
328
+ error_info = {
329
+ 'operand_name': operand.name,
330
+ 'op': cls.op
331
+ }
332
+
333
+ if isinstance(operand, Scalar):
334
+
335
+ final_type = unary_implicit_type_promotion(operand.type, op_type_to_check, return_type=return_type, error_info=error_info)
336
+ labeled_scalar = cls.create_labeled_scalar(operand, result_type=final_type)
337
+ return labeled_scalar
338
+
339
+ elif isinstance(operand, RecordSet):
340
+ fact_component_type = operand.structure.components["f"].type
341
+
342
+ if isinstance(fact_component_type, Mixed):
343
+ final_type, operand.records = unary_implicit_type_promotion_with_mixed_types(operand.records, op_type_to_check, return_type, error_info=error_info)
344
+ else:
345
+ final_type = unary_implicit_type_promotion(fact_component_type, op_type_to_check, return_type=return_type, error_info=error_info)
346
+ if operand.records is not None:
347
+ operand.records['data_type'] = final_type
348
+
349
+ recordset = cls.create_labeled_recordset(operand, rslt_structure=operand.structure, rslt_type=final_type,
350
+ result_dataframe=operand.records)
351
+
352
+ return recordset
353
+
354
+ else:
355
+ raise Exception("Unary operators only works for Recordset or Scalars")
356
+
357
+
358
+ class Complex(Binary):
359
+
360
+ @classmethod
361
+ def validate(cls, operands: list):
362
+
363
+ origin = f"{cls.op}({','.join([str(x.value) if isinstance(x, ConstantOperand) else x.name for x in operands])})"
364
+ recordsets = [operand for operand in operands if isinstance(operand, RecordSet)]
365
+ if len(recordsets) == 0:
366
+ types = []
367
+ ref_operand = operands.pop(0)
368
+ for operand in operands:
369
+ rslt_type = cls.validate_types(ref_operand, operand, None)
370
+ if rslt_type[0] not in types:
371
+ types.append(rslt_type[0])
372
+ if len(types) == 1:
373
+ final_type = types[0]
374
+ else:
375
+ final_type = Number() # TODO: review this
376
+ return cls._create_labeled_scalar(origin=origin, result_type=final_type)
377
+
378
+ ref_recordset = recordsets[0]
379
+ operands.remove(ref_recordset)
380
+ final_type = None
381
+ for operand in operands:
382
+ rslt_structure, rslt_dataframe = cls.validate_structures(ref_recordset, operand)
383
+ final_type, rslt_dataframe = cls.validate_types(ref_recordset, operand, rslt_dataframe)
384
+ ref_recordset.structure = rslt_structure
385
+ ref_recordset.records = rslt_dataframe
386
+
387
+ return cls._create_labeled_recordset(origin=origin, rslt_type=final_type, rslt_structure=ref_recordset.structure,
388
+ result_dataframe=ref_recordset.records)
@@ -0,0 +1,27 @@
1
+ import operator
2
+
3
+ from py_dpm.DataTypes.ScalarTypes import Integer, String
4
+ from py_dpm.Operators import Operator
5
+ from py_dpm.Utils import tokens
6
+
7
+
8
+ class Unary(Operator.Unary):
9
+ op = None
10
+ type_to_check = String
11
+
12
+
13
+ class Binary(Operator.Binary):
14
+ op = None
15
+ type_to_check = String
16
+
17
+
18
+ class Len(Unary):
19
+ op = tokens.LENGTH
20
+ py_op = operator.length_hint
21
+ return_type = Integer
22
+
23
+
24
+ class Concatenate(Binary):
25
+ op = tokens.CONCATENATE
26
+ py_op = operator.concat
27
+ return_type = String
@@ -0,0 +1,53 @@
1
+ from typing import Union
2
+
3
+ from py_dpm.DataTypes.ScalarTypes import ScalarFactory, TimeInterval
4
+ from py_dpm.DataTypes.TypePromotion import unary_implicit_type_promotion
5
+ from py_dpm.Exceptions import exceptions
6
+ from py_dpm.Operators.Operator import Operator
7
+ from py_dpm.Utils import tokens
8
+ from py_dpm.semantics.Symbols import ConstantOperand, RecordSet, Scalar
9
+
10
+
11
+ class TimeShift(Operator):
12
+ op = tokens.TIME_SHIFT
13
+ type_to_check = TimeInterval
14
+ propagate_attributes = True
15
+
16
+ @classmethod
17
+ def validate(cls, operand: Union[RecordSet, Scalar, ConstantOperand], component_name: str, period: str,
18
+ shift_number: int):
19
+
20
+ type_to_check = ScalarFactory().scalar_factory(cls.type_to_check.__name__)
21
+ error_info = {
22
+ 'operand_name': operand.name,
23
+ 'op': cls.op
24
+ }
25
+
26
+ if isinstance(operand, RecordSet):
27
+ if not component_name:
28
+ raise exceptions.SemanticError("4-7-3")
29
+
30
+ if not component_name == tokens.FACT:
31
+ components = {**operand.get_dpm_components(), **operand.get_attributes()}
32
+ if not components or component_name not in components:
33
+ raise exceptions.SemanticError("2-8", op=cls.op, dpm_keys=component_name,
34
+ recordset=operand.name)
35
+
36
+ component = operand.structure.components[component_name]
37
+
38
+ result_type = unary_implicit_type_promotion(
39
+ operand=component.type, op_type_to_check=type_to_check, error_info=error_info)
40
+
41
+ origin = f"{cls.op} ( {operand.name}, {period}, {shift_number}, {component_name} )"
42
+ return cls._create_labeled_recordset(origin, operand.get_fact_component().type, operand.structure, operand.records)
43
+
44
+ if component_name:
45
+ raise exceptions.SemanticError("4-7-2")
46
+
47
+ final_type = unary_implicit_type_promotion(
48
+ operand=operand.type, op_type_to_check=type_to_check, error_info=error_info)
49
+ if isinstance(operand, ConstantOperand):
50
+ return operand
51
+
52
+ origin = f"{cls.op}({operand.name}, {period}, {shift_number})"
53
+ return cls._create_labeled_scalar(origin, result_type=final_type)
File without changes