vtlengine 1.0.2__py3-none-any.whl → 1.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of vtlengine might be problematic. Click here for more details.

Files changed (46) hide show
  1. vtlengine/API/_InternalApi.py +12 -5
  2. vtlengine/API/__init__.py +8 -8
  3. vtlengine/AST/ASTConstructor.py +23 -43
  4. vtlengine/AST/ASTConstructorModules/Expr.py +69 -84
  5. vtlengine/AST/ASTConstructorModules/ExprComponents.py +47 -57
  6. vtlengine/AST/ASTConstructorModules/Terminals.py +28 -39
  7. vtlengine/AST/ASTTemplate.py +0 -1
  8. vtlengine/AST/DAG/__init__.py +12 -15
  9. vtlengine/AST/Grammar/tokens.py +2 -2
  10. vtlengine/AST/VtlVisitor.py +0 -1
  11. vtlengine/AST/__init__.py +2 -3
  12. vtlengine/DataTypes/TimeHandling.py +10 -7
  13. vtlengine/DataTypes/__init__.py +17 -24
  14. vtlengine/Exceptions/__init__.py +3 -5
  15. vtlengine/Exceptions/messages.py +68 -56
  16. vtlengine/Interpreter/__init__.py +82 -103
  17. vtlengine/Model/__init__.py +10 -12
  18. vtlengine/Operators/Aggregation.py +14 -14
  19. vtlengine/Operators/Analytic.py +3 -10
  20. vtlengine/Operators/Assignment.py +2 -3
  21. vtlengine/Operators/Boolean.py +5 -7
  22. vtlengine/Operators/CastOperator.py +12 -13
  23. vtlengine/Operators/Clause.py +11 -13
  24. vtlengine/Operators/Comparison.py +31 -17
  25. vtlengine/Operators/Conditional.py +48 -49
  26. vtlengine/Operators/General.py +4 -4
  27. vtlengine/Operators/HROperators.py +41 -34
  28. vtlengine/Operators/Join.py +18 -22
  29. vtlengine/Operators/Numeric.py +44 -45
  30. vtlengine/Operators/RoleSetter.py +6 -8
  31. vtlengine/Operators/Set.py +7 -12
  32. vtlengine/Operators/String.py +19 -27
  33. vtlengine/Operators/Time.py +298 -109
  34. vtlengine/Operators/Validation.py +4 -7
  35. vtlengine/Operators/__init__.py +38 -41
  36. vtlengine/Utils/__init__.py +133 -114
  37. vtlengine/__init__.py +1 -1
  38. vtlengine/files/output/__init__.py +2 -2
  39. vtlengine/files/output/_time_period_representation.py +0 -1
  40. vtlengine/files/parser/__init__.py +16 -18
  41. vtlengine/files/parser/_time_checking.py +1 -2
  42. {vtlengine-1.0.2.dist-info → vtlengine-1.0.3.dist-info}/METADATA +5 -2
  43. vtlengine-1.0.3.dist-info/RECORD +58 -0
  44. vtlengine-1.0.2.dist-info/RECORD +0 -58
  45. {vtlengine-1.0.2.dist-info → vtlengine-1.0.3.dist-info}/LICENSE.md +0 -0
  46. {vtlengine-1.0.2.dist-info → vtlengine-1.0.3.dist-info}/WHEEL +0 -0
@@ -1,31 +1,29 @@
1
1
  import os
2
2
  from copy import copy
3
- from typing import Any, Union, Optional
3
+ from typing import Any, Optional, Union
4
4
 
5
+ # if os.environ.get("SPARK", False):
6
+ # import pyspark.pandas as pd
7
+ # else:
8
+ # import pandas as pd
9
+ import pandas as pd
10
+
11
+ from vtlengine.AST.Grammar.tokens import AND, CEIL, EQ, FLOOR, GT, GTE, LT, LTE, NEQ, OR, ROUND, XOR
5
12
  from vtlengine.DataTypes import (
6
13
  COMP_NAME_MAPPING,
14
+ SCALAR_TYPES_CLASS_REVERSE,
7
15
  binary_implicit_promotion,
8
16
  check_binary_implicit_promotion,
9
17
  check_unary_implicit_promotion,
10
18
  unary_implicit_promotion,
11
- SCALAR_TYPES_CLASS_REVERSE,
12
19
  )
13
20
  from vtlengine.DataTypes.TimeHandling import (
21
+ DURATION_MAPPING,
14
22
  TimeIntervalHandler,
15
23
  TimePeriodHandler,
16
- DURATION_MAPPING,
17
24
  )
18
-
19
- from vtlengine.AST.Grammar.tokens import CEIL, FLOOR, ROUND, EQ, NEQ, GT, GTE, LT, LTE, XOR, OR, AND
20
25
  from vtlengine.Exceptions import SemanticError
21
-
22
- # if os.environ.get("SPARK", False):
23
- # import pyspark.pandas as pd
24
- # else:
25
- # import pandas as pd
26
- import pandas as pd
27
-
28
- from vtlengine.Model import Component, Dataset, Role, Scalar, DataComponent, ScalarSet
26
+ from vtlengine.Model import Component, DataComponent, Dataset, Role, Scalar, ScalarSet
29
27
 
30
28
  ALL_MODEL_DATA_TYPES = Union[Dataset, Scalar, DataComponent]
31
29
 
@@ -167,13 +165,12 @@ class Operator:
167
165
 
168
166
 
169
167
  def _id_type_promotion_join_keys(
170
- c_left: Component,
171
- c_right: Component,
172
- join_key: str,
173
- left_data: Optional[pd.DataFrame] = None,
174
- right_data: Optional[pd.DataFrame] = None,
168
+ c_left: Component,
169
+ c_right: Component,
170
+ join_key: str,
171
+ left_data: Optional[pd.DataFrame] = None,
172
+ right_data: Optional[pd.DataFrame] = None,
175
173
  ) -> None:
176
-
177
174
  if left_data is None:
178
175
  left_data = pd.DataFrame()
179
176
  if right_data is None:
@@ -187,7 +184,7 @@ def _id_type_promotion_join_keys(
187
184
  right_data[join_key] = right_data[join_key].astype(object)
188
185
  return
189
186
  if (left_type_name == "Integer" and right_type_name == "Number") or (
190
- left_type_name == "Number" and right_type_name == "Integer"
187
+ left_type_name == "Number" and right_type_name == "Integer"
191
188
  ):
192
189
  left_data[join_key] = left_data[join_key].map(lambda x: int(float(x)))
193
190
  right_data[join_key] = right_data[join_key].map(lambda x: int(float(x)))
@@ -200,7 +197,6 @@ def _id_type_promotion_join_keys(
200
197
 
201
198
 
202
199
  def _handle_str_number(x: Union[str, int, float]) -> Union[str, int, float]:
203
-
204
200
  if isinstance(x, int):
205
201
  return x
206
202
  try:
@@ -238,10 +234,10 @@ class Binary(Operator):
238
234
 
239
235
  @classmethod
240
236
  def apply_operation_series_scalar(
241
- cls,
242
- series: Any,
243
- scalar: Scalar,
244
- series_left: bool,
237
+ cls,
238
+ series: Any,
239
+ scalar: Scalar,
240
+ series_left: bool,
245
241
  ) -> Any:
246
242
 
247
243
  if scalar is None:
@@ -353,7 +349,8 @@ class Binary(Operator):
353
349
 
354
350
  if not cls.validate_type_compatibility(left_operand.data_type, right_operand.data_type):
355
351
  raise SemanticError(
356
- "1-1-1-2",
352
+ "1-1-1-2", type_1=left_operand.data_type, type_2=right_operand.data_type,
353
+ type_check=cls.type_to_check
357
354
  )
358
355
  return Scalar(
359
356
  name="result",
@@ -363,7 +360,7 @@ class Binary(Operator):
363
360
 
364
361
  @classmethod
365
362
  def component_validation(
366
- cls, left_operand: DataComponent, right_operand: DataComponent
363
+ cls, left_operand: DataComponent, right_operand: DataComponent
367
364
  ) -> DataComponent:
368
365
  """
369
366
  Validates the compatibility between the types of the components and the operator
@@ -415,7 +412,7 @@ class Binary(Operator):
415
412
 
416
413
  @classmethod
417
414
  def component_set_validation(
418
- cls, component: DataComponent, scalar_set: ScalarSet
415
+ cls, component: DataComponent, scalar_set: ScalarSet
419
416
  ) -> DataComponent:
420
417
 
421
418
  cls.type_validation(component.data_type, scalar_set.data_type)
@@ -471,7 +468,7 @@ class Binary(Operator):
471
468
 
472
469
  @classmethod
473
470
  def apply_return_type_dataset(
474
- cls, result_dataset: Dataset, left_operand: Any, right_operand: Any
471
+ cls, result_dataset: Dataset, left_operand: Any, right_operand: Any
475
472
  ) -> None:
476
473
  """
477
474
  Used in dataset's validation.
@@ -501,9 +498,9 @@ class Binary(Operator):
501
498
  if result_dataset.data is not None:
502
499
  result_dataset.data.rename(columns={measure.name: component.name}, inplace=True)
503
500
  elif (
504
- changed_allowed is False
505
- and is_mono_measure is False
506
- and left_type.promotion_changed_type(result_data_type)
501
+ changed_allowed is False
502
+ and is_mono_measure is False
503
+ and left_type.promotion_changed_type(result_data_type)
507
504
  ):
508
505
  raise SemanticError("1-1-1-4", op=cls.op)
509
506
  else:
@@ -597,7 +594,7 @@ class Binary(Operator):
597
594
 
598
595
  @classmethod
599
596
  def dataset_scalar_evaluation(
600
- cls, dataset: Dataset, scalar: Scalar, dataset_left: bool = True
597
+ cls, dataset: Dataset, scalar: Scalar, dataset_left: bool = True
601
598
  ) -> Dataset:
602
599
 
603
600
  result_dataset = cls.dataset_scalar_validation(dataset, scalar)
@@ -609,7 +606,7 @@ class Binary(Operator):
609
606
  for measure in dataset.get_measures():
610
607
  measure_data = cls.cast_time_types(measure.data_type, result_data[measure.name].copy())
611
608
  if measure.data_type.__name__.__str__() == "Duration" and not isinstance(
612
- scalar_value, int
609
+ scalar_value, int
613
610
  ):
614
611
  scalar_value = DURATION_MAPPING[scalar_value]
615
612
  result_dataset.data[measure.name] = cls.apply_operation_series_scalar(
@@ -624,7 +621,7 @@ class Binary(Operator):
624
621
 
625
622
  @classmethod
626
623
  def component_evaluation(
627
- cls, left_operand: DataComponent, right_operand: DataComponent
624
+ cls, left_operand: DataComponent, right_operand: DataComponent
628
625
  ) -> DataComponent:
629
626
 
630
627
  result_component = cls.component_validation(left_operand, right_operand)
@@ -641,7 +638,7 @@ class Binary(Operator):
641
638
 
642
639
  @classmethod
643
640
  def component_scalar_evaluation(
644
- cls, component: DataComponent, scalar: Scalar, component_left: bool = True
641
+ cls, component: DataComponent, scalar: Scalar, component_left: bool = True
645
642
  ) -> DataComponent:
646
643
 
647
644
  result_component = cls.component_scalar_validation(component, scalar)
@@ -651,7 +648,7 @@ class Binary(Operator):
651
648
  )
652
649
  scalar_value = cls.cast_time_types_scalar(scalar.data_type, scalar.value)
653
650
  if component.data_type.__name__.__str__() == "Duration" and not isinstance(
654
- scalar_value, int
651
+ scalar_value, int
655
652
  ):
656
653
  scalar_value = DURATION_MAPPING[scalar_value]
657
654
  result_component.data = cls.apply_operation_series_scalar(
@@ -679,7 +676,7 @@ class Binary(Operator):
679
676
 
680
677
  @classmethod
681
678
  def component_set_evaluation(
682
- cls, component: DataComponent, scalar_set: ScalarSet
679
+ cls, component: DataComponent, scalar_set: ScalarSet
683
680
  ) -> DataComponent:
684
681
 
685
682
  result_component = cls.component_set_validation(component, scalar_set)
@@ -853,9 +850,9 @@ class Unary(Operator):
853
850
  if result_dataset.data is not None:
854
851
  result_dataset.data.rename(columns={measure.name: component.name}, inplace=True)
855
852
  elif (
856
- changed_allowed is False
857
- and is_mono_measure is False
858
- and operand_type.promotion_changed_type(result_data_type)
853
+ changed_allowed is False
854
+ and is_mono_measure is False
855
+ and operand_type.promotion_changed_type(result_data_type)
859
856
  ):
860
857
  raise SemanticError("1-1-1-4", op=cls.op)
861
858
  else:
@@ -1,5 +1,101 @@
1
1
  from typing import Any, Dict
2
2
 
3
+ from vtlengine.AST.Grammar.tokens import (
4
+ ABS,
5
+ AGGREGATE,
6
+ AND,
7
+ APPLY,
8
+ AS,
9
+ ATTRIBUTE,
10
+ AVG,
11
+ CALC,
12
+ CEIL,
13
+ CHARSET_MATCH,
14
+ CONCAT,
15
+ COUNT,
16
+ CROSS_JOIN,
17
+ DATE_ADD,
18
+ DATEDIFF,
19
+ DAYOFMONTH,
20
+ DAYOFYEAR,
21
+ DAYTOMONTH,
22
+ DAYTOYEAR,
23
+ DIV,
24
+ DROP,
25
+ EQ,
26
+ EXP,
27
+ FILL_TIME_SERIES,
28
+ FILTER,
29
+ FIRST_VALUE,
30
+ FLOOR,
31
+ FLOW_TO_STOCK,
32
+ FULL_JOIN,
33
+ GT,
34
+ GTE,
35
+ IDENTIFIER,
36
+ IN,
37
+ INNER_JOIN,
38
+ INTERSECT,
39
+ ISNULL,
40
+ KEEP,
41
+ LAG,
42
+ LAST_VALUE,
43
+ LCASE,
44
+ LEAD,
45
+ LEFT_JOIN,
46
+ LEN,
47
+ LN,
48
+ LOG,
49
+ LT,
50
+ LTE,
51
+ LTRIM,
52
+ MAX,
53
+ MEASURE,
54
+ MEDIAN,
55
+ MEMBERSHIP,
56
+ MIN,
57
+ MINUS,
58
+ MOD,
59
+ MONTH,
60
+ MONTHTODAY,
61
+ MULT,
62
+ NEQ,
63
+ NOT,
64
+ NOT_IN,
65
+ NVL,
66
+ OR,
67
+ PERIOD_INDICATOR,
68
+ PIVOT,
69
+ PLUS,
70
+ POWER,
71
+ RANDOM,
72
+ RANK,
73
+ RATIO_TO_REPORT,
74
+ RENAME,
75
+ REPLACE,
76
+ ROUND,
77
+ RTRIM,
78
+ SETDIFF,
79
+ SQRT,
80
+ STDDEV_POP,
81
+ STDDEV_SAMP,
82
+ STOCK_TO_FLOW,
83
+ SUBSPACE,
84
+ SUBSTR,
85
+ SUM,
86
+ SYMDIFF,
87
+ TIMESHIFT,
88
+ TRIM,
89
+ TRUNC,
90
+ UCASE,
91
+ UNION,
92
+ UNPIVOT,
93
+ VAR_POP,
94
+ VAR_SAMP,
95
+ XOR,
96
+ YEAR,
97
+ YEARTODAY,
98
+ )
3
99
  from vtlengine.Operators.Aggregation import (
4
100
  Avg,
5
101
  Count,
@@ -14,20 +110,40 @@ from vtlengine.Operators.Aggregation import (
14
110
  )
15
111
  from vtlengine.Operators.Analytic import (
16
112
  Avg as AvgAnalytic,
113
+ )
114
+ from vtlengine.Operators.Analytic import (
17
115
  Count as CountAnalytic,
116
+ )
117
+ from vtlengine.Operators.Analytic import (
18
118
  FirstValue,
19
119
  Lag,
20
120
  LastValue,
21
121
  Lead,
122
+ Rank,
123
+ RatioToReport,
124
+ )
125
+ from vtlengine.Operators.Analytic import (
22
126
  Max as MaxAnalytic,
127
+ )
128
+ from vtlengine.Operators.Analytic import (
23
129
  Median as MedianAnalytic,
130
+ )
131
+ from vtlengine.Operators.Analytic import (
24
132
  Min as MinAnalytic,
133
+ )
134
+ from vtlengine.Operators.Analytic import (
25
135
  PopulationStandardDeviation as PopulationStandardDeviationAnalytic,
136
+ )
137
+ from vtlengine.Operators.Analytic import (
26
138
  PopulationVariance as PopulationVarianceAnalytic,
27
- Rank,
28
- RatioToReport,
139
+ )
140
+ from vtlengine.Operators.Analytic import (
29
141
  SampleStandardDeviation as SampleStandardDeviationAnalytic,
142
+ )
143
+ from vtlengine.Operators.Analytic import (
30
144
  SampleVariance as SampleVarianceAnalytic,
145
+ )
146
+ from vtlengine.Operators.Analytic import (
31
147
  Sum as SumAnalytic,
32
148
  )
33
149
  from vtlengine.Operators.Boolean import And, Not, Or, Xor
@@ -50,22 +166,22 @@ from vtlengine.Operators.Comparison import (
50
166
  IsNull,
51
167
  Less,
52
168
  LessEqual,
169
+ Match,
53
170
  NotEqual,
54
171
  NotIn,
55
- Match,
56
172
  )
57
173
  from vtlengine.Operators.Conditional import Nvl
58
174
  from vtlengine.Operators.General import Alias, Membership
59
175
  from vtlengine.Operators.HROperators import (
176
+ HRBinMinus,
177
+ HRBinPlus,
60
178
  HREqual,
61
179
  HRGreater,
62
180
  HRGreaterEqual,
63
181
  HRLess,
64
182
  HRLessEqual,
65
- HRBinPlus,
66
- HRBinMinus,
67
- HRUnPlus,
68
183
  HRUnMinus,
184
+ HRUnPlus,
69
185
  )
70
186
  from vtlengine.Operators.Join import Apply, CrossJoin, FullJoin, InnerJoin, LeftJoin
71
187
  from vtlengine.Operators.Numeric import (
@@ -81,12 +197,12 @@ from vtlengine.Operators.Numeric import (
81
197
  Mult,
82
198
  NaturalLogarithm,
83
199
  Power,
200
+ Random,
84
201
  Round,
85
202
  SquareRoot,
86
203
  Trunc,
87
204
  UnMinus,
88
205
  UnPlus,
89
- Random,
90
206
  )
91
207
  from vtlengine.Operators.RoleSetter import Attribute, Identifier, Measure
92
208
  from vtlengine.Operators.Set import Intersection, Setdiff, Symdiff, Union
@@ -102,118 +218,21 @@ from vtlengine.Operators.String import (
102
218
  Upper,
103
219
  )
104
220
  from vtlengine.Operators.Time import (
221
+ Date_Add,
222
+ Date_Diff,
223
+ Day_of_Month,
224
+ Day_of_Year,
225
+ Day_to_Month,
226
+ Day_to_Year,
227
+ Fill_time_series,
105
228
  Flow_to_stock,
229
+ Month,
230
+ Month_to_Day,
106
231
  Period_indicator,
107
232
  Stock_to_flow,
108
- Fill_time_series,
109
233
  Time_Shift,
110
234
  Year,
111
- Month,
112
- Day_of_Month,
113
- Day_of_Year,
114
- Day_to_Year,
115
- Day_to_Month,
116
235
  Year_to_Day,
117
- Month_to_Day,
118
- Date_Diff,
119
- Date_Add,
120
- )
121
-
122
- from vtlengine.AST.Grammar.tokens import (
123
- MEMBERSHIP,
124
- AND,
125
- OR,
126
- XOR,
127
- EQ,
128
- NEQ,
129
- GT,
130
- GTE,
131
- LT,
132
- LTE,
133
- IN,
134
- NOT_IN,
135
- NVL,
136
- PLUS,
137
- MINUS,
138
- MULT,
139
- LOG,
140
- MOD,
141
- POWER,
142
- DIV,
143
- AS,
144
- CONCAT,
145
- TIMESHIFT,
146
- CHARSET_MATCH,
147
- NOT,
148
- ABS,
149
- EXP,
150
- LN,
151
- SQRT,
152
- CEIL,
153
- FLOOR,
154
- ISNULL,
155
- PERIOD_INDICATOR,
156
- LEN,
157
- LCASE,
158
- LTRIM,
159
- RTRIM,
160
- TRIM,
161
- UCASE,
162
- FLOW_TO_STOCK,
163
- STOCK_TO_FLOW,
164
- ROUND,
165
- TRUNC,
166
- SUBSTR,
167
- REPLACE,
168
- FILL_TIME_SERIES,
169
- IDENTIFIER,
170
- ATTRIBUTE,
171
- MEASURE,
172
- CALC,
173
- FILTER,
174
- KEEP,
175
- DROP,
176
- RENAME,
177
- PIVOT,
178
- UNPIVOT,
179
- SUBSPACE,
180
- AGGREGATE,
181
- APPLY,
182
- UNION,
183
- INTERSECT,
184
- SYMDIFF,
185
- SETDIFF,
186
- MAX,
187
- MIN,
188
- SUM,
189
- COUNT,
190
- AVG,
191
- MEDIAN,
192
- STDDEV_POP,
193
- STDDEV_SAMP,
194
- VAR_POP,
195
- VAR_SAMP,
196
- LAG,
197
- LEAD,
198
- FIRST_VALUE,
199
- LAST_VALUE,
200
- RATIO_TO_REPORT,
201
- RANK,
202
- INNER_JOIN,
203
- LEFT_JOIN,
204
- FULL_JOIN,
205
- CROSS_JOIN,
206
- RANDOM,
207
- DAYOFYEAR,
208
- DAYOFMONTH,
209
- MONTH,
210
- YEAR,
211
- DAYTOYEAR,
212
- DAYTOMONTH,
213
- YEARTODAY,
214
- MONTHTODAY,
215
- DATE_DIFF,
216
- DATE_ADD,
217
236
  )
218
237
 
219
238
  BINARY_MAPPING: Dict[Any, Any] = {
@@ -250,7 +269,7 @@ BINARY_MAPPING: Dict[Any, Any] = {
250
269
  # Time
251
270
  TIMESHIFT: Time_Shift,
252
271
  CHARSET_MATCH: Match,
253
- DATE_DIFF: Date_Diff,
272
+ DATEDIFF: Date_Diff,
254
273
  }
255
274
 
256
275
  UNARY_MAPPING = {
vtlengine/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
- from vtlengine.API import semantic_analysis, run
1
+ from vtlengine.API import run, semantic_analysis
2
2
 
3
3
  __all__ = ["semantic_analysis", "run"]
@@ -3,11 +3,11 @@ from typing import Optional, Union
3
3
 
4
4
  import pandas as pd
5
5
 
6
- from vtlengine.Model import Dataset
7
6
  from vtlengine.files.output._time_period_representation import (
8
- format_time_period_external_representation,
9
7
  TimePeriodRepresentation,
8
+ format_time_period_external_representation,
10
9
  )
10
+ from vtlengine.Model import Dataset
11
11
 
12
12
 
13
13
  def save_datapoints(
@@ -2,7 +2,6 @@ from enum import Enum
2
2
 
3
3
  from vtlengine.DataTypes import TimePeriod
4
4
  from vtlengine.DataTypes.TimeHandling import TimePeriodHandler
5
-
6
5
  from vtlengine.Model import Dataset, Scalar
7
6
 
8
7
 
@@ -1,28 +1,27 @@
1
1
  import warnings
2
2
  from csv import DictReader
3
3
  from pathlib import Path
4
-
5
- from typing import Optional, Dict, Union, Any, Type, List
4
+ from typing import Any, Dict, List, Optional, Type, Union
6
5
 
7
6
  import numpy as np
8
7
  import pandas as pd
8
+
9
9
  from vtlengine.DataTypes import (
10
+ SCALAR_TYPES_CLASS_REVERSE,
11
+ Boolean,
10
12
  Date,
11
- TimePeriod,
12
- TimeInterval,
13
+ Duration,
13
14
  Integer,
14
15
  Number,
15
- Boolean,
16
- Duration,
17
- SCALAR_TYPES_CLASS_REVERSE,
18
16
  ScalarType,
17
+ TimeInterval,
18
+ TimePeriod,
19
19
  )
20
20
  from vtlengine.DataTypes.TimeHandling import DURATION_MAPPING
21
- from vtlengine.files.parser._rfc_dialect import register_rfc
22
- from vtlengine.files.parser._time_checking import check_date, check_time_period, check_time
23
-
24
21
  from vtlengine.Exceptions import InputValidationException, SemanticError
25
- from vtlengine.Model import Component, Role, Dataset
22
+ from vtlengine.files.parser._rfc_dialect import register_rfc
23
+ from vtlengine.files.parser._time_checking import check_date, check_time, check_time_period
24
+ from vtlengine.Model import Component, Dataset, Role
26
25
 
27
26
  TIME_CHECKS_MAPPING: Dict[Type[ScalarType], Any] = {
28
27
  Date: check_date,
@@ -74,9 +73,9 @@ def _sanitize_pandas_columns(
74
73
  components: Dict[str, Component], csv_path: Union[str, Path], data: pd.DataFrame
75
74
  ) -> pd.DataFrame:
76
75
  # Fast loading from SDMX-CSV
77
- if "DATAFLOW" in data.columns and data.columns[0] == "DATAFLOW":
78
- if "DATAFLOW" not in components:
79
- data.drop(columns=["DATAFLOW"], inplace=True)
76
+ if ("DATAFLOW" in data.columns and data.columns[0] == "DATAFLOW" and
77
+ "DATAFLOW" not in components):
78
+ data.drop(columns=["DATAFLOW"], inplace=True)
80
79
  if "STRUCTURE" in data.columns and data.columns[0] == "STRUCTURE":
81
80
  if "STRUCTURE" not in components:
82
81
  data.drop(columns=["STRUCTURE"], inplace=True)
@@ -135,9 +134,8 @@ def _pandas_load_s3_csv(components: Dict[str, Component], csv_path: str) -> pd.D
135
134
  def _parse_boolean(value: str) -> bool:
136
135
  if isinstance(value, bool):
137
136
  return value
138
- if value.lower() == "true" or value == "1":
139
- return True
140
- return False
137
+ result = value.lower() == "true" or value == "1"
138
+ return result
141
139
 
142
140
 
143
141
  def _validate_pandas(
@@ -148,7 +146,7 @@ def _validate_pandas(
148
146
 
149
147
  id_names = [comp_name for comp_name, comp in components.items() if comp.role == Role.IDENTIFIER]
150
148
 
151
- missing_columns = [name for name in components.keys() if name not in data.columns.tolist()]
149
+ missing_columns = [name for name in components if name not in data.columns.tolist()]
152
150
  if missing_columns:
153
151
  for name in missing_columns:
154
152
  if components[name].nullable is False:
@@ -1,9 +1,8 @@
1
1
  import calendar
2
2
  import re
3
- from datetime import datetime, date
3
+ from datetime import date, datetime
4
4
 
5
5
  from vtlengine.DataTypes.TimeHandling import TimePeriodHandler
6
-
7
6
  from vtlengine.Exceptions import InputValidationException
8
7
 
9
8
 
@@ -1,8 +1,7 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vtlengine
3
- Version: 1.0.2
3
+ Version: 1.0.3
4
4
  Summary: Run and Validate VTL Scripts
5
- Home-page: https://github.com/Meaningful-Data/vtlengine
6
5
  License: AGPL-3.0
7
6
  Keywords: vtl,sdmx,vtlengine,Validation and Transformation Language
8
7
  Author: MeaningfulData
@@ -27,6 +26,10 @@ Requires-Dist: numexpr (>=2.9.0,<3.0.0)
27
26
  Requires-Dist: pandas (>=2.1.4,<3.0.0)
28
27
  Requires-Dist: s3fs (>=2024.9.0,<2025.0.0)
29
28
  Requires-Dist: sqlglot (>=22.2.0,<23.0.0)
29
+ Project-URL: Authors, https://github.com/Meaningful-Data/vtlengine/graphs/contributors
30
+ Project-URL: Documentation, https://docs.vtlengine.meaningfuldata.eu
31
+ Project-URL: IssueTracker, https://github.com/Meaningful-Data/vtlengine/issues
32
+ Project-URL: MeaningfulData, https://www.meaningfuldata.eu/
30
33
  Project-URL: Repository, https://github.com/Meaningful-Data/vtlengine
31
34
  Description-Content-Type: text/markdown
32
35