vtlengine 1.0.3rc3__py3-none-any.whl → 1.1rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of vtlengine might be problematic. Click here for more details.

Files changed (48) hide show
  1. vtlengine/API/_InternalApi.py +64 -58
  2. vtlengine/API/__init__.py +11 -2
  3. vtlengine/API/data/schema/json_schema_2.1.json +116 -0
  4. vtlengine/AST/ASTConstructor.py +5 -4
  5. vtlengine/AST/ASTConstructorModules/Expr.py +47 -48
  6. vtlengine/AST/ASTConstructorModules/ExprComponents.py +45 -23
  7. vtlengine/AST/ASTConstructorModules/Terminals.py +21 -11
  8. vtlengine/AST/ASTEncoders.py +1 -1
  9. vtlengine/AST/DAG/__init__.py +0 -3
  10. vtlengine/AST/Grammar/lexer.py +0 -1
  11. vtlengine/AST/Grammar/parser.py +185 -440
  12. vtlengine/AST/VtlVisitor.py +0 -1
  13. vtlengine/DataTypes/TimeHandling.py +50 -15
  14. vtlengine/DataTypes/__init__.py +79 -7
  15. vtlengine/Exceptions/__init__.py +3 -5
  16. vtlengine/Exceptions/messages.py +65 -105
  17. vtlengine/Interpreter/__init__.py +83 -38
  18. vtlengine/Model/__init__.py +7 -9
  19. vtlengine/Operators/Aggregation.py +13 -7
  20. vtlengine/Operators/Analytic.py +48 -9
  21. vtlengine/Operators/Assignment.py +0 -1
  22. vtlengine/Operators/CastOperator.py +44 -44
  23. vtlengine/Operators/Clause.py +16 -10
  24. vtlengine/Operators/Comparison.py +20 -12
  25. vtlengine/Operators/Conditional.py +30 -13
  26. vtlengine/Operators/General.py +9 -4
  27. vtlengine/Operators/HROperators.py +4 -14
  28. vtlengine/Operators/Join.py +15 -14
  29. vtlengine/Operators/Numeric.py +32 -26
  30. vtlengine/Operators/RoleSetter.py +6 -2
  31. vtlengine/Operators/Set.py +12 -8
  32. vtlengine/Operators/String.py +9 -9
  33. vtlengine/Operators/Time.py +136 -116
  34. vtlengine/Operators/Validation.py +10 -4
  35. vtlengine/Operators/__init__.py +56 -69
  36. vtlengine/Utils/__init__.py +6 -1
  37. vtlengine/__extras_check.py +17 -0
  38. vtlengine/files/output/__init__.py +2 -1
  39. vtlengine/files/output/_time_period_representation.py +2 -1
  40. vtlengine/files/parser/__init__.py +47 -31
  41. vtlengine/files/parser/_rfc_dialect.py +1 -1
  42. vtlengine/files/parser/_time_checking.py +4 -4
  43. {vtlengine-1.0.3rc3.dist-info → vtlengine-1.1rc1.dist-info}/METADATA +17 -17
  44. vtlengine-1.1rc1.dist-info/RECORD +59 -0
  45. {vtlengine-1.0.3rc3.dist-info → vtlengine-1.1rc1.dist-info}/WHEEL +1 -1
  46. vtlengine/DataTypes/NumericTypesHandling.py +0 -38
  47. vtlengine-1.0.3rc3.dist-info/RECORD +0 -58
  48. {vtlengine-1.0.3rc3.dist-info → vtlengine-1.1rc1.dist-info}/LICENSE.md +0 -0
@@ -10,26 +10,25 @@ All exceptions exposed by the Vtl engine.
10
10
  centralised_messages = {
11
11
  # Input Validation errors
12
12
  "0-1-2-1": "Invalid json structure because additional properties have been supplied "
13
- "on file {filename}.",
13
+ "on file {filename}.",
14
14
  "0-1-2-2": "Errors found on file {filename}: {errors}",
15
15
  "0-1-2-3": "Component {component} is duplicated.",
16
16
  "0-1-2-4": "Invalid json structure because {err} on file {filename}.",
17
17
  "0-1-2-5": "File {file} must be encoded in utf-8 (without BOM).",
18
- # "0-1-2-5": "The library item {li}, used in this module {mdl}, is not found.",
19
18
  # JSON Schema validations
19
+ "0-3-1-1": "Dataset {dataset} is not valid according to JSON schema",
20
20
  # Infer Data Structure errors
21
- # "0-1-1-1": "A csv file or a dataframe is required.",
22
21
  "0-1-1-2": "The provided {source} must have data to can infer the data structure.",
23
22
  "0-1-1-3": "Can not infer data structure: {errors}.",
24
23
  "0-1-1-4": "On Dataset {name} loading: An identifier cannot have null values, found null "
25
- "values on {null_identifier}.",
24
+ "values on {null_identifier}.",
26
25
  "0-1-1-5": "On Dataset {name} loading: Datasets without identifiers must have 0 or "
27
- "1 datapoints.",
26
+ "1 datapoints.",
28
27
  "0-1-1-6": "Duplicated records. Combination of identifiers are repeated.",
29
28
  "0-1-1-7": "G1 - The provided CSV file is empty.",
30
29
  "0-1-1-8": "The following identifiers {ids} were not found , review file {file}.",
31
30
  "0-1-1-9": "You have a problem related with commas, review rfc4180 standard, review file "
32
- "{file}.",
31
+ "{file}.",
33
32
  "0-1-1-10": "On Dataset {name} loading: Component {comp_name} is missing in Datapoints.",
34
33
  "0-1-1-11": "Wrong data in the file for this scalardataset {name}.",
35
34
  "0-1-1-12": "On Dataset {name} loading: not possible to cast column {column} to {type}.",
@@ -38,74 +37,43 @@ centralised_messages = {
38
37
  "0-1-0-1": " Trying to redefine input datasets {dataset}.", # Semantic Error
39
38
  # ------------Operators-------------
40
39
  # General Semantic errors
41
- # "1-1-1-1": "At op {op}. Unable to validate types.",
42
40
  "1-1-1-1": "Invalid implicit cast from {type_1} to {type_2}.",
43
41
  "1-1-1-2": "Invalid implicit cast from {type_1} and {type_2} to {type_check}.",
44
42
  "1-1-1-3": "At op {op}: {entity} {name} cannot be promoted to {target_type}.",
45
- # "1-1-1-2": "At op {op}: Component {comp_name} type must be '{type_1}', found '{type_2}'.",
46
- # "1-1-1-3": "At op {op}: Invalid data type for Component {comp_name} and Scalar
47
- # {scalar_name}.",
48
43
  "1-1-1-4": "At op {op}: Operation not allowed for multimeasure datasets.",
49
- # "1-1-1-5": "At op {op}: Invalid data type {type} for Scalar {scalar_name}.",
50
- # TODO: Deprecated not in use, delete this.
51
- # "1-1-1-6": "At op {op}: Internal error: Not same parents.",
52
- # "1-1-1-7": "At op {op}: Invalid data type {type} for Component {name}.",
53
44
  "1-1-1-8": "At op {op}: Invalid Dataset {name}, no measures defined.",
54
45
  "1-1-1-9": "At op {op}: Invalid Dataset {name}, all measures must have the same type: {type}.",
55
46
  "1-1-1-10": "Component {comp_name} not found in Dataset {dataset_name}.",
56
- # "1-1-1-11": "At op {op}: Identifier {name} is specified more than once.",
57
- # "1-1-1-12": "At op {op}: Different scalar types for component {comp_name} and set
58
- # {set_name}.",
59
47
  "1-1-1-13": "At op {op}: Component {comp_name} role must be '{role_1}', found '{role_2}'.",
60
- # "1-1-1-14": "At op {op}: Dataset {name} type must be '{type_1}'.",
61
48
  "1-1-1-15": "At op {op}: Datasets {name_1} and {name_2} does not contain the same number of "
62
- "{type}.",
49
+ "{type}.",
63
50
  "1-1-1-16": "Found structure not nullable and null values.",
64
- # "1-1-1-17": "At op {op}: Problem with nullability for this components {name_1} and {name_2}.",
65
- # "1-1-1-18": "No {type} {value} found.",
66
- # "1-1-1-19": "At op {op}: Invalid data type for Scalar {scalar_name_1} and Scalar
67
- # {scalar_name_2}.",
68
51
  "1-1-1-20": "At op {op}: Only applies to datasets, instead of this a Scalar was provided.",
69
- # General Interpreter errors
70
- # "2-1-1-1": "At op {op}: Unable to evaluate.",
71
- # "2-1-1-2": "At op {op}: Dataset {name} is empty.",
72
- # TODO: Review this message, for unpivot for example we can't raise this error,
73
- # because we can have a empty dataset
74
- # "2-1-1-3": "At op {op}: No rules have results.",
75
52
  # Aggregate errors
76
- # TODO: Use error message 1-1-1-8
77
- # "1-1-2-1": "At op {op}: No measures found to aggregate.",
78
53
  "1-1-2-2": "At op {op}: Only Identifiers are allowed for grouping, "
79
- "found {id_name} - {id_type}.",
54
+ "found {id_name} - {id_type}.",
80
55
  "1-1-2-3": "Having component output type must be boolean, found {type}.",
81
- # "1-1-2-4": "At op {op}: Component {id_name} not found in dataset",
82
56
  # Analytic errors
83
- # TODO: Use error message 1-1-1-8
84
- # "1-1-3-1": "At op {op}: No measures found to analyse.",
85
57
  "1-1-3-2": "At op {op}: Only Identifiers are allowed for partitioning, "
86
- "found {id_name} - {id_type}.",
58
+ "found {id_name} - {id_type}.",
87
59
  # Cast errors
88
60
  "1-1-5-1": "Type {type_1}, cannot be cast to {type_2}.",
89
61
  "1-1-5-3": "Impossible to cast from type {type_1} to {type_2}, without providing a mask.",
90
62
  "1-1-5-4": "Invalid mask to cast from type {type_1} to {type_2}.",
91
63
  "1-1-5-5": "A mask can't be provided to cast from type {type_1} to {type_2}. Mask provided: "
92
- "{mask_value}.",
64
+ "{mask_value}.",
93
65
  "2-1-5-1": "Impossible to cast {value} from type {type_1} to {type_2}.",
94
66
  # Clause errors
95
- # "1-1-6-1": "At op {op}: Component {comp_name} not found in dataset {dataset_name}.",
96
67
  "1-1-6-2": "At op {op}: The identifier {name} in dataset {dataset} could not be included "
97
- "in the {op} op.",
98
- # TODO: This is not possible at all, as calc clause adds a new column and
99
- # identifiers are still unique
100
- # "1-1-6-3": "Found duplicated values on identifiers after Calc clause.",
68
+ "in the {op} op.",
101
69
  "1-1-6-4": "At op {op}: Alias symbol cannot have the name of a component symbol: "
102
- "{symbol_name} - {comp_name}.",
70
+ "{symbol_name} - {comp_name}.",
103
71
  "1-1-6-5": "At op {op}: Scalar values are not allowed at sub operator, found {name}.",
104
72
  "1-1-6-6": "Membership is not allowed inside a clause, found {dataset_name}#{comp_name}.",
105
73
  "1-1-6-7": "Cannot use component {comp_name} as it was generated in another calc expression.",
106
74
  # all the components used in calccomp must belong to the operand dataset
107
75
  "1-1-6-8": "Cannot use component {comp_name} for rename, it is already in the dataset "
108
- "{dataset_name}.",
76
+ "{dataset_name}.",
109
77
  # it is the same error that 1-1-8-1 AND similar but not the same 1-3-1
110
78
  "1-1-6-9": "At op {op}: The following components are repeated: {from_components}.",
111
79
  "1-1-6-10": "At op {op}: Component {operand} in dataset {dataset_name} is not an identifier",
@@ -113,28 +81,27 @@ centralised_messages = {
113
81
  # it is the same as the one that appears in joins, but are differents kinds of failures
114
82
  "1-1-6-12": "At op {op}: Not allowed to drop the last element.",
115
83
  "1-1-6-13": "At op {op}: Not allowed to overwrite an identifier: {comp_name}",
116
- # "1-1-6-15": "At op {op}: Component {comp_name} already exists in dataset {dataset_name}",
117
84
  # Comparison errors
118
85
  "1-1-7-1": "At op {op}: Value in {left_name} of type {left_type} is not comparable to value "
119
- "{right_name} of type {right_type}.",
86
+ "{right_name} of type {right_type}.",
120
87
  # Conditional errors
121
88
  "1-1-9-1": "At op {op}: The evaluation condition must result in a Boolean "
122
- "expression, found '{type}'.",
89
+ "expression, found '{type}'.",
123
90
  "1-1-9-3": "At op {op}: Then clause {then_name} and else clause {else_name}, both must be "
124
- "Scalars.",
91
+ "Scalars.",
125
92
  "1-1-9-4": "At op {op}: The condition dataset {name} must contain an unique measure.",
126
93
  "1-1-9-5": "At op {op}: The condition dataset Measure must be a Boolean, found '{type}'.",
127
94
  "1-1-9-6": "At op {op}: Then-else datasets have different number of identifiers compared "
128
- "with condition dataset.",
95
+ "with condition dataset.",
129
96
  "1-1-9-9": "At op {op}: {clause} component {clause_name} role must be {role_1}, found "
130
- "{role_2}.",
97
+ "{role_2}.",
131
98
  "1-1-9-10": "At op {op}: {clause} dataset have different number of identifiers compared with "
132
- "condition dataset.",
99
+ "condition dataset.",
133
100
  "1-1-9-11": "At op {op}: Condition component {name} must be Boolean, found {type}.",
134
101
  "1-1-9-12": "At op {op}: then clause {then_symbol} and else clause {else_symbol}, both must "
135
- "be Datasets or at least one of them a Scalar.",
102
+ "be Datasets or at least one of them a Scalar.",
136
103
  "1-1-9-13": "At op {op}: then {then} and else {else_clause} datasets must contain the same "
137
- "number of components.",
104
+ "number of components.",
138
105
  "2-1-9-1": "At op {op}: Condition operators must have the same operator type.",
139
106
  "2-1-9-2": "At op {op}: Condition {name} it's not a boolean.",
140
107
  "2-1-9-3": "At op {op}: All then and else operands must be scalars.",
@@ -146,68 +113,66 @@ centralised_messages = {
146
113
  "1-1-10-1": "At op {op}: The {op_type} operand must have exactly one measure of type {me_type}",
147
114
  "1-1-10-2": "At op {op}: Number of variable has to be equal between the call and signature.",
148
115
  "1-1-10-3": "At op {op}: Name in the call {found} has to be equal to variable rule in "
149
- "signature {expected}.",
116
+ "signature {expected}.",
150
117
  "1-1-10-4": "At op {op}: When a hierarchical ruleset is defined for value domain, it is "
151
- "necessary to specify the component with the rule clause on call.",
118
+ "necessary to specify the component with the rule clause on call.",
152
119
  "1-1-10-5": "No rules to analyze on Hierarchy Roll-up as rules have no = operator.",
153
120
  "1-1-10-6": "At op {op}: Name in the call {found} has to be equal to variable condition in "
154
- "signature {expected} .",
121
+ "signature {expected} .",
155
122
  "1-1-10-7": "Not found component {comp_name} on signature.",
156
123
  "1-1-10-8": "At op {op}: Measures involved have to be numerical, other types found {found}.",
157
124
  "1-1-10-9": "Invalid signature for the ruleset {ruleset}. On variables, condComp and "
158
- "ruleComp must be the same",
125
+ "ruleComp must be the same",
159
126
  # General Operators
160
- # "1-1-12-1": "At op {op}: You could not recalculate the identifier {name} on dataset "
161
- # "{dataset}.",
162
- # "2-1-12-1": "At op {op}: Create a null measure without a scalar type is not allowed. "
163
- # "Please use cast operator.",
127
+ "2-1-12-1": "At op {op}: Create a null measure without a scalar type is not allowed."
128
+ "Please use cast operator.",
164
129
  # Join Operators
165
130
  "1-1-13-1": "At op {op}: Duplicated alias {duplicates}.",
166
131
  "1-1-13-2": "At op {op}: Missing mandatory aliasing.",
167
132
  "1-1-13-3": "At op {op}: Join conflict with duplicated names for column {name} from original "
168
- "datasets.",
133
+ "datasets.",
169
134
  "1-1-13-4": "At op {op}: Using clause, using={using_names}, does not define all the "
170
- "identifiers, of non reference dataset {dataset}.",
135
+ "identifiers, of non reference dataset {dataset}.",
171
136
  "1-1-13-5": "At op {op}: Invalid subcase B1, All the datasets must share as identifiers the "
172
- "using ones.",
137
+ "using ones.",
173
138
  # not in use but we keep for later, in use 1-1-13-4
174
139
  "1-1-13-6": "At op {op}: Invalid subcase B2, All the declared using components "
175
- "'{using_components}' must be present as components in the reference dataset "
176
- "'{reference}'.",
140
+ "'{using_components}' must be present as components in the reference dataset "
141
+ "'{reference}'.",
177
142
  "1-1-13-7": "At op {op}: Invalid subcase B2, All the non reference datasets must share as "
178
- "identifiers the using ones.",
143
+ "identifiers the using ones.",
179
144
  "1-1-13-8": "At op {op}: No available using clause.",
180
145
  "1-1-13-9": "Ambiguity for this variable {comp_name} inside a join clause.",
181
146
  "1-1-13-10": "The join operator does not perform scalar/component operations.",
182
147
  "1-1-13-11": "At op {op}: Invalid subcase A, {dataset_reference} should be a superset but "
183
- "{component} not found.",
148
+ "{component} not found.",
184
149
  # inner_join and left join
185
150
  "1-1-13-12": "At op {op}: Invalid subcase A. There are different identifiers for the provided "
186
- "datasets",
151
+ "datasets",
187
152
  # full_join
188
153
  "1-1-13-13": "At op {op}: Invalid subcase A. There are not same number of identifiers for the "
189
- "provided datasets",
154
+ "provided datasets",
190
155
  # full_join
191
156
  "1-1-13-14": "Cannot perform a join over a Dataset Without Identifiers: {name}.",
192
157
  "1-1-13-15": "At op {op}: {comp_name} has to be a Measure for all the provided datasets inside "
193
- "the join",
158
+ "the join",
194
159
  "1-1-13-16": "At op {op}: Invalid use, please review : {msg}.",
195
160
  "1-1-13-17": "At op {op}: {comp_name} not present in the dataset(result from join VDS) at the "
196
- "time it is called",
161
+ "time it is called",
197
162
  # Operators general errors
198
163
  "1-1-14-1": "At op {op}: Measure names don't match: {left} - {right}.",
199
164
  "1-1-14-3": "At op {op}: Invalid scalar types for identifiers at DataSet {dataset}. One {type} "
200
- "identifier expected, {count} found.",
165
+ "identifier expected, {count} found.",
201
166
  "1-1-14-5": "At op {op}: {names} with type/s {types} is not compatible with {op}",
202
167
  "1-1-14-6": "At op {op}: {comp_name} with type {comp_type} and scalar_set with type "
203
- "{scalar_type} is not compatible with {op}",
168
+ "{scalar_type} is not compatible with {op}",
204
169
  # "1-1-14-8": "At op {op}: Operation not allowed for multimeasure datasets.",
205
170
  "1-1-14-9": "At op {op}: {names} with type/s {types} is not compatible with {op} on datasets "
206
- "{datasets}.",
171
+ "{datasets}.",
207
172
  # Numeric Operators
208
173
  "1-1-15-8": "At op {op}: {op} operator cannot have a {comp_type} as parameter.",
209
174
  "2-1-15-1": "At op {op}: Component {comp_name} from dataset {dataset_name} contains negative "
210
- "values.",
175
+ "values.",
211
176
  "2-1-15-2": "At op {op}: Value {value} could not be negative.",
212
177
  "2-1-15-3": "At op {op}: Base value {value} could not be less or equal 0.",
213
178
  "2-1-15-4": "At op {op}: Invalid values in Component {name}.",
@@ -216,9 +181,9 @@ centralised_messages = {
216
181
  "2-1-15-7": "At op {op}: {op} operator cannot be a dataset.",
217
182
  # Set Operators
218
183
  "1-1-17-1": "At op {op}: Datasets {dataset_1} and {dataset_2} have different number of "
219
- "components",
184
+ "components",
220
185
  # String Operators
221
- # "1-1-18-1": "At op {op}: Invalid Dataset {name}. Dataset with one measure expected.",
186
+ "1-1-18-1": "At op {op}: Invalid Dataset {name}. Dataset with one measure expected.",
222
187
  "1-1-18-2": "At op {op}: Composition of DataSet and Component is not allowed.",
223
188
  "1-1-18-3": "At op {op}: Invalid parameter position: {pos}.",
224
189
  "1-1-18-4": "At op {op}: {param_type} parameter should be {correct_type}.",
@@ -230,12 +195,12 @@ centralised_messages = {
230
195
  "1-1-19-2": "At op {op}: Unknown date type for {op}.",
231
196
  "1-1-19-3": "At op {op}: Invalid {param} for {op}.",
232
197
  "1-1-19-4": "At op {op}: Invalid values {value_1} and {value_2}, periodIndTo parameter must be "
233
- "a larger duration value than periodIndFrom parameter.",
198
+ "a larger duration value than periodIndFrom parameter.",
234
199
  "1-1-19-5": "At op {op}: periodIndTo parameter must be a larger duration value than the values "
235
- "to aggregate.",
200
+ "to aggregate.",
236
201
  "1-1-19-6": "At op {op}: Time type used in the component {comp} is not supported.",
237
202
  "1-1-19-7": "At op {op}: can be applied only on Data Sets (of time series) and returns a Data "
238
- "Set (of time series).",
203
+ "Set (of time series).",
239
204
  # flow_to_stock, stock_to_flow
240
205
  "1-1-19-8": "At op {op}: {op} can only be applied to a {comp_type}",
241
206
  "1-1-19-9": "At op {op}: {op} can only be applied to a {comp_type} with a {param}",
@@ -243,38 +208,35 @@ centralised_messages = {
243
208
  "1-1-19-10": "{op} can only be applied to operands with data type as Date or Time Period",
244
209
  # Other time operators
245
210
  "2-1-19-1": "At op {op}: Invalid values {value_1} and {value_2} for duration, "
246
- "periodIndTo parameter must be a larger duration value than the "
247
- "values to aggregate.",
211
+ "periodIndTo parameter must be a larger duration value than the "
212
+ "values to aggregate.",
248
213
  "2-1-19-2": "Invalid period indicator {period}.",
249
214
  "2-1-19-3": "Only same period indicator allowed for both parameters ({period1} != {period2}).",
250
215
  "2-1-19-4": "Date setter, ({value} > {date}). Cannot set date1 with a value higher than date2.",
251
216
  "2-1-19-5": "Date setter, ({value} < {date}). Cannot set date2 with a value lower than date1.",
252
217
  "2-1-19-6": "Invalid period format, must be YYYY-(L)NNN: {period_format}",
253
218
  "2-1-19-7": "Period Number must be between 1 and {periods} for period indicator "
254
- "{period_indicator}.",
219
+ "{period_indicator}.",
255
220
  "2-1-19-8": "Invalid date format, must be YYYY-MM-DD: {date}",
256
221
  "2-1-19-9": "Invalid day {day} for year {year}.",
257
222
  "2-1-19-10": "Invalid year {year}, must be between 1900 and 9999.",
258
223
  "2-1-19-11": "{op} operator is not compatible with time values",
259
- "2-1-19-12": "At op {op}: Invalid param type {type} for param {name}, "
260
- "expected {expected}.",
224
+ "2-1-19-12": "At op {op}: Invalid param type {type} for param {name}, expected {expected}.",
261
225
  "2-1-19-13": "At op {op}: Invalid param data_type {type} for param {name}, "
262
- "expected {expected}.",
226
+ "expected {expected}.",
263
227
  "2-1-19-14": "At op {op}: Invalid dataset {name}, requires at least one Date/Time_Period "
264
- "measure.",
265
- "2-1-19-15": "{op} can only be applied according to the following mask: PY/YDDD/D",
266
- "2-1-19-16": "{op} can only be applied according to the following mask: PM/MDD/D",
267
- "2-1-19-17": "{op} can only be positive numbers",
228
+ "measure.",
229
+ "2-1-19-15": "{op} can only be applied according to the iso 8601 format mask",
230
+ "2-1-19-16": "{op} can only be positive numbers",
268
231
  # ----------- Interpreter Common ------
269
232
  "2-3-1": "{comp_type} {comp_name} not found.",
270
233
  "2-3-2": "{op_type} cannot be used with {node_op} operators.",
271
- # "2-3-3": "Internal error: Not able to categorize {value}.",
272
234
  "2-3-4": "{op} operator must have a {comp}",
273
235
  "2-3-5": "Expected {param_type}, got {type_name} on UDO {op}, parameter {param_name}",
274
236
  "2-3-6": "Dataset {dataset_name} not found, please check input datastructures",
275
237
  "2-3-9": "{comp_type} {comp_name} not found in {param}.",
276
238
  "2-3-10": "No {comp_type} have been defined.",
277
- # "2-3-11": "{pos} operand must be a dataset.",
239
+ "2-3-11": "{pos} operand must be a dataset.",
278
240
  # ---------Semantic Analyzer Common----
279
241
  "1-3-1": "Please don't use twice {alias} like var_to.",
280
242
  "1-3-3": "Overwriting a dataset/variable is not allowed, trying it with {varId_value}.",
@@ -285,7 +247,6 @@ centralised_messages = {
285
247
  "1-3-10": "Not valid set declaration, mixed scalar types {scalar_1} and {scalar_2}.",
286
248
  "1-3-12": "Default arguments cannot be followed by non-default arguments.",
287
249
  "1-3-15": "Missing datastructure definition for required input Dataset {input}.",
288
- # "1-3-16": "Component {name} not found.",
289
250
  "1-3-17": "Operations without output assigned are not available.",
290
251
  "1-3-19": "No {node_type} {node_value} found.",
291
252
  "1-3-20": "RuleComp of Hierarchical Ruleset can only be an identifier, {name} is a {role}.",
@@ -293,13 +254,13 @@ centralised_messages = {
293
254
  "1-3-22": "Unable to categorize {node_value}.",
294
255
  "1-3-23": "Missing value domain '{name}' definition, please provide an structure.",
295
256
  "1-3-24": "Internal error on Analytic operators inside a calc, No partition or "
296
- "order symbol found.",
257
+ "order symbol found.",
297
258
  "1-3-26": "Value domain {name} not found.",
298
259
  "1-3-27": "Dataset without identifiers are not allowed in {op} operator.",
299
260
  "1-3-28": "At op {op}: invalid number of parameters: received {received}, expected at "
300
- "least: {expected}",
261
+ "least: {expected}",
301
262
  "1-3-29": "At op {op}: can not use user defined operator that returns a component outside "
302
- "clause operator or rule",
263
+ "clause operator or rule",
303
264
  "1-3-30": "At op {op}: too many parameters: received {received}, expected: {expected}",
304
265
  "1-3-31": "Cannot use component {name} outside an aggregate function in a having clause.",
305
266
  "1-3-32": "Cannot perform operation {op} inside having clause.",
@@ -311,25 +272,24 @@ centralised_messages = {
311
272
  "1-4-1-1": "At op {op}: User defined {option} declared as {type_1}, found {type_2}.",
312
273
  "1-4-1-2": "Using variable {value}, not defined at {op} definition.",
313
274
  "1-4-1-3": "At op {op}: using variable {value}, not defined as an argument.",
314
- "1-4-1-4": "Found duplicates at arguments naming, please review {type} " "definition {op}.",
275
+ "1-4-1-4": "Found duplicates at arguments naming, please review {type} definition {op}.",
315
276
  "1-4-1-5": "Found duplicates at rule naming: {names}. Please review {type} "
316
- "{ruleset_name} definition.",
277
+ "{ruleset_name} definition.",
317
278
  "1-4-1-6": "At op {op}: Arguments incoherence, {defined} defined {passed} passed.",
318
279
  "1-4-1-7": "All rules must be named or not named, but found mixed criteria at {type} "
319
- "definition {name}.",
280
+ "definition {name}.",
320
281
  "1-4-1-8": "All rules must have different code items in the left side of '=' in hierarchy "
321
- "operator at hierachical ruleset definition {name}.",
282
+ "operator at hierachical ruleset definition {name}.",
322
283
  "1-4-1-9": "At op check_datapoint: {name} has an invalid datatype expected DataSet, found "
323
- "Scalar.",
284
+ "Scalar.",
324
285
  # AST Creation
325
286
  "1-4-2-1": "Eval could not be called without a {option} type definition.",
326
287
  "1-4-2-2": "Optional or empty expression node is not allowed in time_agg.",
327
288
  "1-4-2-3": "{value} could not be called in the count.",
328
289
  "1-4-2-4": "At op {op}: Only one order_by element must be used in Analytic with range "
329
- "windowing.",
290
+ "windowing.",
330
291
  "1-4-2-5": "At op {op}: User defined operator without returns is not implemented.",
331
292
  "1-4-2-6": "At op {op}: Window must be provided.",
332
293
  "1-4-2-7": "At op {op}: Partition by or order by clause must be provided for Analytic "
333
- "operators.",
334
- # Not Implemented Error
294
+ "operators.",
335
295
  }