vtlengine 1.0.1__py3-none-any.whl → 1.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of vtlengine might be problematic. Click here for more details.
- vtlengine/API/_InternalApi.py +19 -8
- vtlengine/API/__init__.py +9 -9
- vtlengine/AST/ASTConstructor.py +23 -43
- vtlengine/AST/ASTConstructorModules/Expr.py +147 -71
- vtlengine/AST/ASTConstructorModules/ExprComponents.py +104 -40
- vtlengine/AST/ASTConstructorModules/Terminals.py +28 -39
- vtlengine/AST/ASTTemplate.py +16 -1
- vtlengine/AST/DAG/__init__.py +12 -15
- vtlengine/AST/Grammar/Vtl.g4 +49 -20
- vtlengine/AST/Grammar/VtlTokens.g4 +13 -1
- vtlengine/AST/Grammar/lexer.py +1293 -1183
- vtlengine/AST/Grammar/parser.py +5758 -3939
- vtlengine/AST/Grammar/tokens.py +12 -0
- vtlengine/AST/VtlVisitor.py +9 -2
- vtlengine/AST/__init__.py +21 -3
- vtlengine/DataTypes/TimeHandling.py +12 -7
- vtlengine/DataTypes/__init__.py +17 -24
- vtlengine/Exceptions/__init__.py +43 -1
- vtlengine/Exceptions/messages.py +82 -62
- vtlengine/Interpreter/__init__.py +125 -120
- vtlengine/Model/__init__.py +17 -12
- vtlengine/Operators/Aggregation.py +14 -14
- vtlengine/Operators/Analytic.py +56 -31
- vtlengine/Operators/Assignment.py +2 -3
- vtlengine/Operators/Boolean.py +5 -7
- vtlengine/Operators/CastOperator.py +12 -13
- vtlengine/Operators/Clause.py +11 -13
- vtlengine/Operators/Comparison.py +31 -17
- vtlengine/Operators/Conditional.py +157 -17
- vtlengine/Operators/General.py +4 -4
- vtlengine/Operators/HROperators.py +41 -34
- vtlengine/Operators/Join.py +18 -22
- vtlengine/Operators/Numeric.py +76 -39
- vtlengine/Operators/RoleSetter.py +6 -8
- vtlengine/Operators/Set.py +7 -12
- vtlengine/Operators/String.py +19 -27
- vtlengine/Operators/Time.py +366 -43
- vtlengine/Operators/Validation.py +4 -7
- vtlengine/Operators/__init__.py +38 -41
- vtlengine/Utils/__init__.py +149 -94
- vtlengine/__init__.py +1 -1
- vtlengine/files/output/__init__.py +2 -2
- vtlengine/files/output/_time_period_representation.py +0 -1
- vtlengine/files/parser/__init__.py +18 -18
- vtlengine/files/parser/_time_checking.py +3 -2
- {vtlengine-1.0.1.dist-info → vtlengine-1.0.3.dist-info}/METADATA +17 -5
- vtlengine-1.0.3.dist-info/RECORD +58 -0
- vtlengine-1.0.1.dist-info/RECORD +0 -58
- {vtlengine-1.0.1.dist-info → vtlengine-1.0.3.dist-info}/LICENSE.md +0 -0
- {vtlengine-1.0.1.dist-info → vtlengine-1.0.3.dist-info}/WHEEL +0 -0
vtlengine/AST/Grammar/tokens.py
CHANGED
|
@@ -32,6 +32,7 @@ LN = "ln"
|
|
|
32
32
|
POWER = "power"
|
|
33
33
|
LOG = "log"
|
|
34
34
|
SQRT = "sqrt"
|
|
35
|
+
RANDOM = "random"
|
|
35
36
|
# Boolean operators.
|
|
36
37
|
AND = "and"
|
|
37
38
|
OR = "or"
|
|
@@ -70,6 +71,7 @@ SYMDIFF = "symdiff"
|
|
|
70
71
|
IF = "if"
|
|
71
72
|
THEN = "then"
|
|
72
73
|
ELSE = "else"
|
|
74
|
+
CASE = "case"
|
|
73
75
|
NVL = "nvl"
|
|
74
76
|
# Clause Operators.
|
|
75
77
|
FILTER = "filter"
|
|
@@ -89,6 +91,16 @@ STOCK_TO_FLOW = "stock_to_flow"
|
|
|
89
91
|
TIMESHIFT = "timeshift"
|
|
90
92
|
TIME_AGG = "time_agg"
|
|
91
93
|
CURRENT_DATE = "current_date"
|
|
94
|
+
DATEDIFF = "datediff"
|
|
95
|
+
DATE_ADD = "dateadd"
|
|
96
|
+
YEAR = "year"
|
|
97
|
+
MONTH = "month"
|
|
98
|
+
DAYOFMONTH = "dayofmonth"
|
|
99
|
+
DAYOFYEAR = "dayofyear"
|
|
100
|
+
DAYTOYEAR = "daytoyear"
|
|
101
|
+
DAYTOMONTH = "daytomonth"
|
|
102
|
+
YEARTODAY = "yeartoday"
|
|
103
|
+
MONTHTODAY = "monthtoday"
|
|
92
104
|
# Join Operators.
|
|
93
105
|
INNER_JOIN = "inner_join"
|
|
94
106
|
LEFT_JOIN = "left_join"
|
vtlengine/AST/VtlVisitor.py
CHANGED
|
@@ -2,7 +2,6 @@ from antlr4 import ParseTreeVisitor
|
|
|
2
2
|
|
|
3
3
|
from vtlengine.AST.Grammar.parser import Parser
|
|
4
4
|
|
|
5
|
-
|
|
6
5
|
# This class defines a complete generic visitor for a parse tree produced by Parser.
|
|
7
6
|
|
|
8
7
|
|
|
@@ -62,6 +61,10 @@ class VtlVisitor(ParseTreeVisitor):
|
|
|
62
61
|
def visitIfExpr(self, ctx: Parser.IfExprContext):
|
|
63
62
|
return self.visitChildren(ctx)
|
|
64
63
|
|
|
64
|
+
# Visit a parse tree produced by Parser#caseExpr.
|
|
65
|
+
def visitCaseExpr(self, ctx: Parser.CaseExprContext):
|
|
66
|
+
return self.visitChildren(ctx)
|
|
67
|
+
|
|
65
68
|
# Visit a parse tree produced by Parser#clauseExpr.
|
|
66
69
|
def visitClauseExpr(self, ctx: Parser.ClauseExprContext):
|
|
67
70
|
return self.visitChildren(ctx)
|
|
@@ -90,6 +93,10 @@ class VtlVisitor(ParseTreeVisitor):
|
|
|
90
93
|
def visitIfExprComp(self, ctx: Parser.IfExprCompContext):
|
|
91
94
|
return self.visitChildren(ctx)
|
|
92
95
|
|
|
96
|
+
# Visit a parse tree produced by Parser#caseExprComp.
|
|
97
|
+
def visitCaseExprComp(self, ctx: Parser.CaseExprCompContext):
|
|
98
|
+
return self.visitChildren(ctx)
|
|
99
|
+
|
|
93
100
|
# Visit a parse tree produced by Parser#comparisonExprComp.
|
|
94
101
|
def visitComparisonExprComp(self, ctx: Parser.ComparisonExprCompContext):
|
|
95
102
|
return self.visitChildren(ctx)
|
|
@@ -399,7 +406,7 @@ class VtlVisitor(ParseTreeVisitor):
|
|
|
399
406
|
return self.visitChildren(ctx)
|
|
400
407
|
|
|
401
408
|
# Visit a parse tree produced by Parser#periodAtomComponent.
|
|
402
|
-
def
|
|
409
|
+
def visitTimeUnaryAtomComponent(self, ctx: Parser.PeriodAtomComponentContext):
|
|
403
410
|
return self.visitChildren(ctx)
|
|
404
411
|
|
|
405
412
|
# Visit a parse tree produced by Parser#fillTimeAtomComponent.
|
vtlengine/AST/__init__.py
CHANGED
|
@@ -11,7 +11,6 @@ from dataclasses import dataclass
|
|
|
11
11
|
from typing import Any, Dict, List, Optional, Type, Union
|
|
12
12
|
|
|
13
13
|
from vtlengine.DataTypes import ScalarType
|
|
14
|
-
|
|
15
14
|
from vtlengine.Model import Role
|
|
16
15
|
|
|
17
16
|
|
|
@@ -33,7 +32,7 @@ class AST:
|
|
|
33
32
|
"""Returns a human-friendly description."""
|
|
34
33
|
out = []
|
|
35
34
|
name = self.__class__.__name__
|
|
36
|
-
for k in self.__all_annotations()
|
|
35
|
+
for k in self.__all_annotations():
|
|
37
36
|
v = self.__getattribute__(k)
|
|
38
37
|
if v:
|
|
39
38
|
out.append(f"{k}={str(v)}")
|
|
@@ -41,7 +40,7 @@ class AST:
|
|
|
41
40
|
|
|
42
41
|
def toJSON(self):
|
|
43
42
|
base = {"class_name": self.__class__.__name__}
|
|
44
|
-
for k in self.__all_annotations()
|
|
43
|
+
for k in self.__all_annotations():
|
|
45
44
|
v = self.__getattribute__(k)
|
|
46
45
|
base[k] = v
|
|
47
46
|
return base
|
|
@@ -344,6 +343,25 @@ class If(AST):
|
|
|
344
343
|
elseOp: AST
|
|
345
344
|
|
|
346
345
|
|
|
346
|
+
class CaseObj:
|
|
347
|
+
condition: AST
|
|
348
|
+
thenOp: AST
|
|
349
|
+
|
|
350
|
+
def __init__(self, condition: AST, thenOp: AST):
|
|
351
|
+
self.condition = condition
|
|
352
|
+
self.thenOp = thenOp
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
@dataclass
|
|
356
|
+
class Case(AST):
|
|
357
|
+
"""
|
|
358
|
+
Case: (condition, thenOp, elseOp)
|
|
359
|
+
"""
|
|
360
|
+
|
|
361
|
+
cases: List[CaseObj]
|
|
362
|
+
elseOp: AST
|
|
363
|
+
|
|
364
|
+
|
|
347
365
|
@dataclass
|
|
348
366
|
class Validation(AST):
|
|
349
367
|
"""
|
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import calendar
|
|
2
2
|
import copy
|
|
3
3
|
import operator
|
|
4
|
-
from datetime import date
|
|
5
|
-
from
|
|
4
|
+
from datetime import date
|
|
5
|
+
from datetime import datetime as dt
|
|
6
|
+
from typing import Any, Dict, Optional, Union
|
|
6
7
|
|
|
7
8
|
import pandas as pd
|
|
8
9
|
|
|
@@ -31,9 +32,11 @@ def date_to_period(date_value: date, period_indicator: str) -> Any:
|
|
|
31
32
|
return TimePeriodHandler(f"{date_value.year}D{date_value.timetuple().tm_yday}")
|
|
32
33
|
|
|
33
34
|
|
|
34
|
-
def period_to_date(
|
|
35
|
-
|
|
36
|
-
|
|
35
|
+
def period_to_date(year: int,
|
|
36
|
+
period_indicator: str,
|
|
37
|
+
period_number: int,
|
|
38
|
+
start: bool = False
|
|
39
|
+
) -> date:
|
|
37
40
|
if period_indicator == "A":
|
|
38
41
|
return date(year, 1, 1) if start else date(year, 12, 31)
|
|
39
42
|
periods = {
|
|
@@ -145,6 +148,8 @@ class TimePeriodHandler:
|
|
|
145
148
|
_period_number: int
|
|
146
149
|
|
|
147
150
|
def __init__(self, period: str) -> None:
|
|
151
|
+
if isinstance(period, int):
|
|
152
|
+
period = str(period)
|
|
148
153
|
if "-" in period:
|
|
149
154
|
self.year, self.period_indicator, self.period_number = (
|
|
150
155
|
from_input_customer_support_to_internal(period)
|
|
@@ -207,7 +212,7 @@ class TimePeriodHandler:
|
|
|
207
212
|
raise SemanticError(
|
|
208
213
|
"2-1-19-7",
|
|
209
214
|
periods=PeriodDuration.periods[self.period_indicator],
|
|
210
|
-
|
|
215
|
+
period_indicator=self.period_indicator,
|
|
211
216
|
)
|
|
212
217
|
# raise ValueError(f'Period Number must be between 1 and '
|
|
213
218
|
# f'{PeriodDuration.periods[self.period_indicator]} '
|
|
@@ -481,7 +486,7 @@ def check_max_date(str_: Optional[str]) -> Optional[str]:
|
|
|
481
486
|
|
|
482
487
|
# Format 2010-01-01. Prevent passthrough of other ISO 8601 formats.
|
|
483
488
|
if len(str_) != 10 or str_[7] != "-":
|
|
484
|
-
raise SemanticError("2-1-19-8", date=
|
|
489
|
+
raise SemanticError("2-1-19-8", date=str_)
|
|
485
490
|
# raise ValueError(f"Invalid date format, must be YYYY-MM-DD: {str_}")
|
|
486
491
|
|
|
487
492
|
result = date.fromisoformat(str_)
|
vtlengine/DataTypes/__init__.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
|
-
from typing import Any,
|
|
1
|
+
from typing import Any, Dict, Optional, Set, Type, Union
|
|
2
|
+
|
|
2
3
|
import pandas as pd
|
|
3
4
|
|
|
4
|
-
from vtlengine.DataTypes.TimeHandling import
|
|
5
|
+
from vtlengine.DataTypes.TimeHandling import check_max_date, date_to_period_str, str_period_to_date
|
|
5
6
|
from vtlengine.Exceptions import SemanticError
|
|
6
7
|
|
|
7
8
|
DTYPE_MAPPING: Dict[str, str] = {
|
|
@@ -310,7 +311,8 @@ class TimeInterval(ScalarType):
|
|
|
310
311
|
|
|
311
312
|
@classmethod
|
|
312
313
|
def explicit_cast(cls, value: Any, from_type: Any) -> Any:
|
|
313
|
-
|
|
314
|
+
if from_type == String:
|
|
315
|
+
return value # check_time(value). TODO: resolve this to avoid a circular import.
|
|
314
316
|
raise SemanticError(
|
|
315
317
|
"2-1-5-1",
|
|
316
318
|
value=value,
|
|
@@ -328,7 +330,7 @@ class Date(TimeInterval):
|
|
|
328
330
|
def implicit_cast(cls, value: Any, from_type: Any) -> Any:
|
|
329
331
|
# TODO: Remove String, only for compatibility with previous engine
|
|
330
332
|
if from_type in {Date, String}:
|
|
331
|
-
return value
|
|
333
|
+
return check_max_date(value)
|
|
332
334
|
|
|
333
335
|
raise SemanticError(
|
|
334
336
|
"2-1-5-1",
|
|
@@ -341,7 +343,7 @@ class Date(TimeInterval):
|
|
|
341
343
|
def explicit_cast(cls, value: Any, from_type: Any) -> Any:
|
|
342
344
|
# TODO: Remove String, only for compatibility with previous engine
|
|
343
345
|
if from_type == String:
|
|
344
|
-
return value
|
|
346
|
+
return check_max_date(value)
|
|
345
347
|
|
|
346
348
|
raise SemanticError(
|
|
347
349
|
"2-1-5-1",
|
|
@@ -384,7 +386,7 @@ class TimePeriod(TimeInterval):
|
|
|
384
386
|
return period_str
|
|
385
387
|
# TODO: Remove String, only for compatibility with previous engine
|
|
386
388
|
elif from_type == String:
|
|
387
|
-
return value
|
|
389
|
+
return value # check_time_period(value) TODO: resolve this to avoid a circular import.
|
|
388
390
|
|
|
389
391
|
raise SemanticError(
|
|
390
392
|
"2-1-5-1",
|
|
@@ -442,15 +444,9 @@ class Boolean(ScalarType):
|
|
|
442
444
|
else:
|
|
443
445
|
return None
|
|
444
446
|
if isinstance(value, int):
|
|
445
|
-
|
|
446
|
-
return True
|
|
447
|
-
else:
|
|
448
|
-
return False
|
|
447
|
+
return value != 0
|
|
449
448
|
if isinstance(value, float):
|
|
450
|
-
|
|
451
|
-
return True
|
|
452
|
-
else:
|
|
453
|
-
return False
|
|
449
|
+
return value != 0.0
|
|
454
450
|
if isinstance(value, bool):
|
|
455
451
|
return value
|
|
456
452
|
return value
|
|
@@ -470,9 +466,7 @@ class Boolean(ScalarType):
|
|
|
470
466
|
@classmethod
|
|
471
467
|
def explicit_cast(cls, value: Any, from_type: Any) -> bool:
|
|
472
468
|
if from_type in {Number, Integer}:
|
|
473
|
-
|
|
474
|
-
return False
|
|
475
|
-
return True
|
|
469
|
+
return value not in {0}
|
|
476
470
|
|
|
477
471
|
raise SemanticError(
|
|
478
472
|
"2-1-5-1",
|
|
@@ -672,13 +666,12 @@ def unary_implicit_promotion(
|
|
|
672
666
|
return: The resulting type of the operation, after the implicit type promotion
|
|
673
667
|
"""
|
|
674
668
|
operand_implicities = IMPLICIT_TYPE_PROMOTION_MAPPING[operand_type]
|
|
675
|
-
if type_to_check:
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
)
|
|
669
|
+
if type_to_check and not type_to_check.is_included(operand_implicities):
|
|
670
|
+
raise SemanticError(
|
|
671
|
+
code="1-1-1-1",
|
|
672
|
+
type_1=SCALAR_TYPES_CLASS_REVERSE[operand_type],
|
|
673
|
+
type_2=SCALAR_TYPES_CLASS_REVERSE[type_to_check],
|
|
674
|
+
)
|
|
682
675
|
if return_type:
|
|
683
676
|
return return_type
|
|
684
677
|
if (
|
vtlengine/Exceptions/__init__.py
CHANGED
|
@@ -7,7 +7,8 @@ Description
|
|
|
7
7
|
All exceptions exposed by the Vtl engine.
|
|
8
8
|
"""
|
|
9
9
|
|
|
10
|
-
from typing import
|
|
10
|
+
from typing import Any, List, Optional
|
|
11
|
+
|
|
11
12
|
from vtlengine.Exceptions.messages import centralised_messages
|
|
12
13
|
|
|
13
14
|
dataset_output = None
|
|
@@ -130,3 +131,44 @@ class InputValidationException(VTLEngineException):
|
|
|
130
131
|
super().__init__(message, lino, colno, code)
|
|
131
132
|
else:
|
|
132
133
|
super().__init__(message, lino, colno)
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def check_key(field: str, dict_keys: Any, key: str) -> None:
|
|
137
|
+
if key not in dict_keys:
|
|
138
|
+
closest_key = find_closest_key(dict_keys, key)
|
|
139
|
+
message_append = f". Did you mean {closest_key}?" if closest_key else ""
|
|
140
|
+
raise SemanticError("0-1-1-13", field=field, key=key, closest_key=message_append)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def find_closest_key(dict_keys: Any, key: str) -> Optional[str]:
|
|
144
|
+
closest_key = None
|
|
145
|
+
max_distance = 3
|
|
146
|
+
min_distance = float('inf')
|
|
147
|
+
|
|
148
|
+
for dict_key in dict_keys:
|
|
149
|
+
distance = key_distance(key, dict_key)
|
|
150
|
+
if distance < min_distance:
|
|
151
|
+
min_distance = distance
|
|
152
|
+
closest_key = dict_key
|
|
153
|
+
|
|
154
|
+
if min_distance <= max_distance:
|
|
155
|
+
return closest_key
|
|
156
|
+
return None
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def key_distance(key: str, objetive: str) -> int:
|
|
160
|
+
dp = [[0] * (len(objetive) + 1) for _ in range(len(key) + 1)]
|
|
161
|
+
|
|
162
|
+
for i in range(len(key) + 1):
|
|
163
|
+
dp[i][0] = i
|
|
164
|
+
for j in range(len(objetive) + 1):
|
|
165
|
+
dp[0][j] = j
|
|
166
|
+
|
|
167
|
+
for i in range(1, len(key) + 1):
|
|
168
|
+
for j in range(1, len(objetive) + 1):
|
|
169
|
+
cost = 0 if key[i - 1] == objetive[j - 1] else 1
|
|
170
|
+
dp[i][j] = min(dp[i - 1][j] + 1,
|
|
171
|
+
dp[i][j - 1] + 1,
|
|
172
|
+
dp[i - 1][j - 1] + cost)
|
|
173
|
+
|
|
174
|
+
return dp[-1][-1]
|
vtlengine/Exceptions/messages.py
CHANGED
|
@@ -10,7 +10,7 @@ All exceptions exposed by the Vtl engine.
|
|
|
10
10
|
centralised_messages = {
|
|
11
11
|
# Input Validation errors
|
|
12
12
|
"0-1-2-1": "Invalid json structure because additional properties have been supplied "
|
|
13
|
-
|
|
13
|
+
"on file {filename}.",
|
|
14
14
|
"0-1-2-2": "Errors found on file {filename}: {errors}",
|
|
15
15
|
"0-1-2-3": "Component {component} is duplicated.",
|
|
16
16
|
"0-1-2-4": "Invalid json structure because {err} on file {filename}.",
|
|
@@ -20,21 +20,22 @@ centralised_messages = {
|
|
|
20
20
|
# Infer Data Structure errors
|
|
21
21
|
# "0-1-1-1": "A csv file or a dataframe is required.",
|
|
22
22
|
"0-1-1-2": "The provided {source} must have data to can infer the data structure.",
|
|
23
|
-
"0-1-1-3": "Can not infer data structure: {errors}",
|
|
23
|
+
"0-1-1-3": "Can not infer data structure: {errors}.",
|
|
24
24
|
"0-1-1-4": "On Dataset {name} loading: An identifier cannot have null values, found null "
|
|
25
|
-
|
|
25
|
+
"values on {null_identifier}.",
|
|
26
26
|
"0-1-1-5": "On Dataset {name} loading: Datasets without identifiers must have 0 or "
|
|
27
|
-
|
|
27
|
+
"1 datapoints.",
|
|
28
28
|
"0-1-1-6": "Duplicated records. Combination of identifiers are repeated.",
|
|
29
|
-
"0-1-1-7": "G1 - The provided CSV file is empty",
|
|
30
|
-
"0-1-1-8": "The following identifiers {ids} were not found , review file {file}",
|
|
29
|
+
"0-1-1-7": "G1 - The provided CSV file is empty.",
|
|
30
|
+
"0-1-1-8": "The following identifiers {ids} were not found , review file {file}.",
|
|
31
31
|
"0-1-1-9": "You have a problem related with commas, review rfc4180 standard, review file "
|
|
32
|
-
|
|
32
|
+
"{file}.",
|
|
33
33
|
"0-1-1-10": "On Dataset {name} loading: Component {comp_name} is missing in Datapoints.",
|
|
34
|
-
"0-1-1-11": "Wrong data in the file for this scalardataset {name}",
|
|
35
|
-
"0-1-1-12": "On Dataset {name} loading: not possible to cast column {column} to {type}",
|
|
36
|
-
|
|
37
|
-
"0-1-
|
|
34
|
+
"0-1-1-11": "Wrong data in the file for this scalardataset {name}.",
|
|
35
|
+
"0-1-1-12": "On Dataset {name} loading: not possible to cast column {column} to {type}.",
|
|
36
|
+
"0-1-1-13": "Invalid key on {field} field: {key}{closest_key}.",
|
|
37
|
+
"0-1-1-14": "Empty datasets {dataset1} and {dataset2} shape missmatch.",
|
|
38
|
+
"0-1-0-1": " Trying to redefine input datasets {dataset}.", # Semantic Error
|
|
38
39
|
# ------------Operators-------------
|
|
39
40
|
# General Semantic errors
|
|
40
41
|
# "1-1-1-1": "At op {op}. Unable to validate types.",
|
|
@@ -58,7 +59,7 @@ centralised_messages = {
|
|
|
58
59
|
"1-1-1-13": "At op {op}: Component {comp_name} role must be '{role_1}', found '{role_2}'.",
|
|
59
60
|
# "1-1-1-14": "At op {op}: Dataset {name} type must be '{type_1}'.",
|
|
60
61
|
"1-1-1-15": "At op {op}: Datasets {name_1} and {name_2} does not contain the same number of "
|
|
61
|
-
|
|
62
|
+
"{type}.",
|
|
62
63
|
"1-1-1-16": "Found structure not nullable and null values.",
|
|
63
64
|
# "1-1-1-17": "At op {op}: Problem with nullability for this components {name_1} and {name_2}.",
|
|
64
65
|
# "1-1-1-18": "No {type} {value} found.",
|
|
@@ -75,36 +76,36 @@ centralised_messages = {
|
|
|
75
76
|
# TODO: Use error message 1-1-1-8
|
|
76
77
|
# "1-1-2-1": "At op {op}: No measures found to aggregate.",
|
|
77
78
|
"1-1-2-2": "At op {op}: Only Identifiers are allowed for grouping, "
|
|
78
|
-
|
|
79
|
+
"found {id_name} - {id_type}.",
|
|
79
80
|
"1-1-2-3": "Having component output type must be boolean, found {type}.",
|
|
80
81
|
# "1-1-2-4": "At op {op}: Component {id_name} not found in dataset",
|
|
81
82
|
# Analytic errors
|
|
82
83
|
# TODO: Use error message 1-1-1-8
|
|
83
84
|
# "1-1-3-1": "At op {op}: No measures found to analyse.",
|
|
84
85
|
"1-1-3-2": "At op {op}: Only Identifiers are allowed for partitioning, "
|
|
85
|
-
|
|
86
|
+
"found {id_name} - {id_type}.",
|
|
86
87
|
# Cast errors
|
|
87
88
|
"1-1-5-1": "Type {type_1}, cannot be cast to {type_2}.",
|
|
88
89
|
"1-1-5-3": "Impossible to cast from type {type_1} to {type_2}, without providing a mask.",
|
|
89
90
|
"1-1-5-4": "Invalid mask to cast from type {type_1} to {type_2}.",
|
|
90
91
|
"1-1-5-5": "A mask can't be provided to cast from type {type_1} to {type_2}. Mask provided: "
|
|
91
|
-
|
|
92
|
+
"{mask_value}.",
|
|
92
93
|
"2-1-5-1": "Impossible to cast {value} from type {type_1} to {type_2}.",
|
|
93
94
|
# Clause errors
|
|
94
95
|
# "1-1-6-1": "At op {op}: Component {comp_name} not found in dataset {dataset_name}.",
|
|
95
96
|
"1-1-6-2": "At op {op}: The identifier {name} in dataset {dataset} could not be included "
|
|
96
|
-
|
|
97
|
+
"in the {op} op.",
|
|
97
98
|
# TODO: This is not possible at all, as calc clause adds a new column and
|
|
98
99
|
# identifiers are still unique
|
|
99
100
|
# "1-1-6-3": "Found duplicated values on identifiers after Calc clause.",
|
|
100
101
|
"1-1-6-4": "At op {op}: Alias symbol cannot have the name of a component symbol: "
|
|
101
|
-
|
|
102
|
+
"{symbol_name} - {comp_name}.",
|
|
102
103
|
"1-1-6-5": "At op {op}: Scalar values are not allowed at sub operator, found {name}.",
|
|
103
104
|
"1-1-6-6": "Membership is not allowed inside a clause, found {dataset_name}#{comp_name}.",
|
|
104
105
|
"1-1-6-7": "Cannot use component {comp_name} as it was generated in another calc expression.",
|
|
105
106
|
# all the components used in calccomp must belong to the operand dataset
|
|
106
107
|
"1-1-6-8": "Cannot use component {comp_name} for rename, it is already in the dataset "
|
|
107
|
-
|
|
108
|
+
"{dataset_name}.",
|
|
108
109
|
# it is the same error that 1-1-8-1 AND similar but not the same 1-3-1
|
|
109
110
|
"1-1-6-9": "At op {op}: The following components are repeated: {from_components}.",
|
|
110
111
|
"1-1-6-10": "At op {op}: Component {operand} in dataset {dataset_name} is not an identifier",
|
|
@@ -115,39 +116,46 @@ centralised_messages = {
|
|
|
115
116
|
# "1-1-6-15": "At op {op}: Component {comp_name} already exists in dataset {dataset_name}",
|
|
116
117
|
# Comparison errors
|
|
117
118
|
"1-1-7-1": "At op {op}: Value in {left_name} of type {left_type} is not comparable to value "
|
|
118
|
-
|
|
119
|
+
"{right_name} of type {right_type}.",
|
|
119
120
|
# Conditional errors
|
|
120
121
|
"1-1-9-1": "At op {op}: The evaluation condition must result in a Boolean "
|
|
121
|
-
|
|
122
|
+
"expression, found '{type}'.",
|
|
122
123
|
"1-1-9-3": "At op {op}: Then clause {then_name} and else clause {else_name}, both must be "
|
|
123
|
-
|
|
124
|
+
"Scalars.",
|
|
124
125
|
"1-1-9-4": "At op {op}: The condition dataset {name} must contain an unique measure.",
|
|
125
126
|
"1-1-9-5": "At op {op}: The condition dataset Measure must be a Boolean, found '{type}'.",
|
|
126
127
|
"1-1-9-6": "At op {op}: Then-else datasets have different number of identifiers compared "
|
|
127
|
-
|
|
128
|
+
"with condition dataset.",
|
|
128
129
|
"1-1-9-9": "At op {op}: {clause} component {clause_name} role must be {role_1}, found "
|
|
129
|
-
|
|
130
|
+
"{role_2}.",
|
|
130
131
|
"1-1-9-10": "At op {op}: {clause} dataset have different number of identifiers compared with "
|
|
131
|
-
|
|
132
|
+
"condition dataset.",
|
|
132
133
|
"1-1-9-11": "At op {op}: Condition component {name} must be Boolean, found {type}.",
|
|
133
134
|
"1-1-9-12": "At op {op}: then clause {then_symbol} and else clause {else_symbol}, both must "
|
|
134
|
-
|
|
135
|
+
"be Datasets or at least one of them a Scalar.",
|
|
135
136
|
"1-1-9-13": "At op {op}: then {then} and else {else_clause} datasets must contain the same "
|
|
136
|
-
|
|
137
|
+
"number of components.",
|
|
138
|
+
"2-1-9-1": "At op {op}: Condition operators must have the same operator type.",
|
|
139
|
+
"2-1-9-2": "At op {op}: Condition {name} it's not a boolean.",
|
|
140
|
+
"2-1-9-3": "At op {op}: All then and else operands must be scalars.",
|
|
141
|
+
"2-1-9-4": "At op {op}: Condition {name} must be boolean type.",
|
|
142
|
+
"2-1-9-5": "At op {op}: Condition Dataset {name} measure must be Boolean.",
|
|
143
|
+
"2-1-9-6": "At op {op}: At least a then or else operand must be Dataset.",
|
|
144
|
+
"2-1-9-7": "At op {op}: All Dataset operands must have the same components.",
|
|
137
145
|
# Data Validation errors
|
|
138
146
|
"1-1-10-1": "At op {op}: The {op_type} operand must have exactly one measure of type {me_type}",
|
|
139
147
|
"1-1-10-2": "At op {op}: Number of variable has to be equal between the call and signature.",
|
|
140
148
|
"1-1-10-3": "At op {op}: Name in the call {found} has to be equal to variable rule in "
|
|
141
|
-
|
|
149
|
+
"signature {expected}.",
|
|
142
150
|
"1-1-10-4": "At op {op}: When a hierarchical ruleset is defined for value domain, it is "
|
|
143
|
-
|
|
151
|
+
"necessary to specify the component with the rule clause on call.",
|
|
144
152
|
"1-1-10-5": "No rules to analyze on Hierarchy Roll-up as rules have no = operator.",
|
|
145
153
|
"1-1-10-6": "At op {op}: Name in the call {found} has to be equal to variable condition in "
|
|
146
|
-
|
|
154
|
+
"signature {expected} .",
|
|
147
155
|
"1-1-10-7": "Not found component {comp_name} on signature.",
|
|
148
156
|
"1-1-10-8": "At op {op}: Measures involved have to be numerical, other types found {found}.",
|
|
149
157
|
"1-1-10-9": "Invalid signature for the ruleset {ruleset}. On variables, condComp and "
|
|
150
|
-
|
|
158
|
+
"ruleComp must be the same",
|
|
151
159
|
# General Operators
|
|
152
160
|
# "1-1-12-1": "At op {op}: You could not recalculate the identifier {name} on dataset "
|
|
153
161
|
# "{dataset}.",
|
|
@@ -157,49 +165,49 @@ centralised_messages = {
|
|
|
157
165
|
"1-1-13-1": "At op {op}: Duplicated alias {duplicates}.",
|
|
158
166
|
"1-1-13-2": "At op {op}: Missing mandatory aliasing.",
|
|
159
167
|
"1-1-13-3": "At op {op}: Join conflict with duplicated names for column {name} from original "
|
|
160
|
-
|
|
168
|
+
"datasets.",
|
|
161
169
|
"1-1-13-4": "At op {op}: Using clause, using={using_names}, does not define all the "
|
|
162
|
-
|
|
170
|
+
"identifiers, of non reference dataset {dataset}.",
|
|
163
171
|
"1-1-13-5": "At op {op}: Invalid subcase B1, All the datasets must share as identifiers the "
|
|
164
|
-
|
|
172
|
+
"using ones.",
|
|
165
173
|
# not in use but we keep for later, in use 1-1-13-4
|
|
166
174
|
"1-1-13-6": "At op {op}: Invalid subcase B2, All the declared using components "
|
|
167
|
-
|
|
168
|
-
|
|
175
|
+
"'{using_components}' must be present as components in the reference dataset "
|
|
176
|
+
"'{reference}'.",
|
|
169
177
|
"1-1-13-7": "At op {op}: Invalid subcase B2, All the non reference datasets must share as "
|
|
170
|
-
|
|
178
|
+
"identifiers the using ones.",
|
|
171
179
|
"1-1-13-8": "At op {op}: No available using clause.",
|
|
172
180
|
"1-1-13-9": "Ambiguity for this variable {comp_name} inside a join clause.",
|
|
173
181
|
"1-1-13-10": "The join operator does not perform scalar/component operations.",
|
|
174
182
|
"1-1-13-11": "At op {op}: Invalid subcase A, {dataset_reference} should be a superset but "
|
|
175
|
-
|
|
183
|
+
"{component} not found.",
|
|
176
184
|
# inner_join and left join
|
|
177
185
|
"1-1-13-12": "At op {op}: Invalid subcase A. There are different identifiers for the provided "
|
|
178
|
-
|
|
186
|
+
"datasets",
|
|
179
187
|
# full_join
|
|
180
188
|
"1-1-13-13": "At op {op}: Invalid subcase A. There are not same number of identifiers for the "
|
|
181
|
-
|
|
189
|
+
"provided datasets",
|
|
182
190
|
# full_join
|
|
183
191
|
"1-1-13-14": "Cannot perform a join over a Dataset Without Identifiers: {name}.",
|
|
184
192
|
"1-1-13-15": "At op {op}: {comp_name} has to be a Measure for all the provided datasets inside "
|
|
185
|
-
|
|
193
|
+
"the join",
|
|
186
194
|
"1-1-13-16": "At op {op}: Invalid use, please review : {msg}.",
|
|
187
195
|
"1-1-13-17": "At op {op}: {comp_name} not present in the dataset(result from join VDS) at the "
|
|
188
|
-
|
|
196
|
+
"time it is called",
|
|
189
197
|
# Operators general errors
|
|
190
198
|
"1-1-14-1": "At op {op}: Measure names don't match: {left} - {right}.",
|
|
191
199
|
"1-1-14-3": "At op {op}: Invalid scalar types for identifiers at DataSet {dataset}. One {type} "
|
|
192
|
-
|
|
200
|
+
"identifier expected, {count} found.",
|
|
193
201
|
"1-1-14-5": "At op {op}: {names} with type/s {types} is not compatible with {op}",
|
|
194
202
|
"1-1-14-6": "At op {op}: {comp_name} with type {comp_type} and scalar_set with type "
|
|
195
|
-
|
|
203
|
+
"{scalar_type} is not compatible with {op}",
|
|
196
204
|
# "1-1-14-8": "At op {op}: Operation not allowed for multimeasure datasets.",
|
|
197
205
|
"1-1-14-9": "At op {op}: {names} with type/s {types} is not compatible with {op} on datasets "
|
|
198
|
-
|
|
206
|
+
"{datasets}.",
|
|
199
207
|
# Numeric Operators
|
|
200
208
|
"1-1-15-8": "At op {op}: {op} operator cannot have a {comp_type} as parameter.",
|
|
201
209
|
"2-1-15-1": "At op {op}: Component {comp_name} from dataset {dataset_name} contains negative "
|
|
202
|
-
|
|
210
|
+
"values.",
|
|
203
211
|
"2-1-15-2": "At op {op}: Value {value} could not be negative.",
|
|
204
212
|
"2-1-15-3": "At op {op}: Base value {value} could not be less or equal 0.",
|
|
205
213
|
"2-1-15-4": "At op {op}: Invalid values in Component {name}.",
|
|
@@ -208,7 +216,7 @@ centralised_messages = {
|
|
|
208
216
|
"2-1-15-7": "At op {op}: {op} operator cannot be a dataset.",
|
|
209
217
|
# Set Operators
|
|
210
218
|
"1-1-17-1": "At op {op}: Datasets {dataset_1} and {dataset_2} have different number of "
|
|
211
|
-
|
|
219
|
+
"components",
|
|
212
220
|
# String Operators
|
|
213
221
|
# "1-1-18-1": "At op {op}: Invalid Dataset {name}. Dataset with one measure expected.",
|
|
214
222
|
"1-1-18-2": "At op {op}: Composition of DataSet and Component is not allowed.",
|
|
@@ -222,29 +230,41 @@ centralised_messages = {
|
|
|
222
230
|
"1-1-19-2": "At op {op}: Unknown date type for {op}.",
|
|
223
231
|
"1-1-19-3": "At op {op}: Invalid {param} for {op}.",
|
|
224
232
|
"1-1-19-4": "At op {op}: Invalid values {value_1} and {value_2}, periodIndTo parameter must be "
|
|
225
|
-
|
|
233
|
+
"a larger duration value than periodIndFrom parameter.",
|
|
226
234
|
"1-1-19-5": "At op {op}: periodIndTo parameter must be a larger duration value than the values "
|
|
227
|
-
|
|
235
|
+
"to aggregate.",
|
|
228
236
|
"1-1-19-6": "At op {op}: Time type used in the component {comp} is not supported.",
|
|
229
237
|
"1-1-19-7": "At op {op}: can be applied only on Data Sets (of time series) and returns a Data "
|
|
230
|
-
|
|
238
|
+
"Set (of time series).",
|
|
231
239
|
# flow_to_stock, stock_to_flow
|
|
232
240
|
"1-1-19-8": "At op {op}: {op} can only be applied to a {comp_type}",
|
|
233
241
|
"1-1-19-9": "At op {op}: {op} can only be applied to a {comp_type} with a {param}",
|
|
242
|
+
# New Unary time operators
|
|
243
|
+
"1-1-19-10": "{op} can only be applied to operands with data type as Date or Time Period",
|
|
234
244
|
# Other time operators
|
|
235
245
|
"2-1-19-1": "At op {op}: Invalid values {value_1} and {value_2} for duration, "
|
|
236
|
-
|
|
237
|
-
|
|
246
|
+
"periodIndTo parameter must be a larger duration value than the "
|
|
247
|
+
"values to aggregate.",
|
|
238
248
|
"2-1-19-2": "Invalid period indicator {period}.",
|
|
239
249
|
"2-1-19-3": "Only same period indicator allowed for both parameters ({period1} != {period2}).",
|
|
240
250
|
"2-1-19-4": "Date setter, ({value} > {date}). Cannot set date1 with a value higher than date2.",
|
|
241
251
|
"2-1-19-5": "Date setter, ({value} < {date}). Cannot set date2 with a value lower than date1.",
|
|
242
252
|
"2-1-19-6": "Invalid period format, must be YYYY-(L)NNN: {period_format}",
|
|
243
253
|
"2-1-19-7": "Period Number must be between 1 and {periods} for period indicator "
|
|
244
|
-
|
|
245
|
-
"2-1-19-8": "Invalid date format, must be YYYY-MM-DD: {
|
|
254
|
+
"{period_indicator}.",
|
|
255
|
+
"2-1-19-8": "Invalid date format, must be YYYY-MM-DD: {date}",
|
|
246
256
|
"2-1-19-9": "Invalid day {day} for year {year}.",
|
|
247
257
|
"2-1-19-10": "Invalid year {year}, must be between 1900 and 9999.",
|
|
258
|
+
"2-1-19-11": "{op} operator is not compatible with time values",
|
|
259
|
+
"2-1-19-12": "At op {op}: Invalid param type {type} for param {name}, "
|
|
260
|
+
"expected {expected}.",
|
|
261
|
+
"2-1-19-13": "At op {op}: Invalid param data_type {type} for param {name}, "
|
|
262
|
+
"expected {expected}.",
|
|
263
|
+
"2-1-19-14": "At op {op}: Invalid dataset {name}, requires at least one Date/Time_Period "
|
|
264
|
+
"measure.",
|
|
265
|
+
"2-1-19-15": "{op} can only be applied according to the following mask: PY/YDDD/D",
|
|
266
|
+
"2-1-19-16": "{op} can only be applied according to the following mask: PM/MDD/D",
|
|
267
|
+
"2-1-19-17": "{op} can only be positive numbers",
|
|
248
268
|
# ----------- Interpreter Common ------
|
|
249
269
|
"2-3-1": "{comp_type} {comp_name} not found.",
|
|
250
270
|
"2-3-2": "{op_type} cannot be used with {node_op} operators.",
|
|
@@ -273,13 +293,13 @@ centralised_messages = {
|
|
|
273
293
|
"1-3-22": "Unable to categorize {node_value}.",
|
|
274
294
|
"1-3-23": "Missing value domain '{name}' definition, please provide an structure.",
|
|
275
295
|
"1-3-24": "Internal error on Analytic operators inside a calc, No partition or "
|
|
276
|
-
|
|
296
|
+
"order symbol found.",
|
|
277
297
|
"1-3-26": "Value domain {name} not found.",
|
|
278
298
|
"1-3-27": "Dataset without identifiers are not allowed in {op} operator.",
|
|
279
299
|
"1-3-28": "At op {op}: invalid number of parameters: received {received}, expected at "
|
|
280
|
-
|
|
300
|
+
"least: {expected}",
|
|
281
301
|
"1-3-29": "At op {op}: can not use user defined operator that returns a component outside "
|
|
282
|
-
|
|
302
|
+
"clause operator or rule",
|
|
283
303
|
"1-3-30": "At op {op}: too many parameters: received {received}, expected: {expected}",
|
|
284
304
|
"1-3-31": "Cannot use component {name} outside an aggregate function in a having clause.",
|
|
285
305
|
"1-3-32": "Cannot perform operation {op} inside having clause.",
|
|
@@ -293,23 +313,23 @@ centralised_messages = {
|
|
|
293
313
|
"1-4-1-3": "At op {op}: using variable {value}, not defined as an argument.",
|
|
294
314
|
"1-4-1-4": "Found duplicates at arguments naming, please review {type} " "definition {op}.",
|
|
295
315
|
"1-4-1-5": "Found duplicates at rule naming: {names}. Please review {type} "
|
|
296
|
-
|
|
316
|
+
"{ruleset_name} definition.",
|
|
297
317
|
"1-4-1-6": "At op {op}: Arguments incoherence, {defined} defined {passed} passed.",
|
|
298
318
|
"1-4-1-7": "All rules must be named or not named, but found mixed criteria at {type} "
|
|
299
|
-
|
|
319
|
+
"definition {name}.",
|
|
300
320
|
"1-4-1-8": "All rules must have different code items in the left side of '=' in hierarchy "
|
|
301
|
-
|
|
321
|
+
"operator at hierachical ruleset definition {name}.",
|
|
302
322
|
"1-4-1-9": "At op check_datapoint: {name} has an invalid datatype expected DataSet, found "
|
|
303
|
-
|
|
323
|
+
"Scalar.",
|
|
304
324
|
# AST Creation
|
|
305
325
|
"1-4-2-1": "Eval could not be called without a {option} type definition.",
|
|
306
326
|
"1-4-2-2": "Optional or empty expression node is not allowed in time_agg.",
|
|
307
327
|
"1-4-2-3": "{value} could not be called in the count.",
|
|
308
328
|
"1-4-2-4": "At op {op}: Only one order_by element must be used in Analytic with range "
|
|
309
|
-
|
|
329
|
+
"windowing.",
|
|
310
330
|
"1-4-2-5": "At op {op}: User defined operator without returns is not implemented.",
|
|
311
331
|
"1-4-2-6": "At op {op}: Window must be provided.",
|
|
312
332
|
"1-4-2-7": "At op {op}: Partition by or order by clause must be provided for Analytic "
|
|
313
|
-
|
|
333
|
+
"operators.",
|
|
314
334
|
# Not Implemented Error
|
|
315
335
|
}
|