vtlengine 1.2.2__py3-none-any.whl → 1.3.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of vtlengine might be problematic. Click here for more details.

@@ -255,6 +255,10 @@ class TimePeriodHandler:
255
255
  if isinstance(other, str):
256
256
  other = TimePeriodHandler(other)
257
257
 
258
+ if self.period_indicator != other.period_indicator:
259
+ tokens = {operator.lt: "<", operator.le: "<=", operator.gt: ">", operator.ge: ">="}
260
+ raise SemanticError("2-1-19-19", op=tokens[py_op], value1=self, value2=other)
261
+
258
262
  self_lapse, other_lapse = self.period_dates, other.period_dates
259
263
  is_lt_or_le = py_op in [operator.lt, operator.le]
260
264
  is_gt_or_ge = py_op in [operator.gt, operator.ge]
@@ -3,6 +3,11 @@ from typing import Any, Dict, Optional, Set, Type, Union
3
3
 
4
4
  import pandas as pd
5
5
 
6
+ from vtlengine.DataTypes._time_checking import (
7
+ check_date,
8
+ check_time,
9
+ check_time_period,
10
+ )
6
11
  from vtlengine.DataTypes.TimeHandling import (
7
12
  check_max_date,
8
13
  date_to_period_str,
@@ -102,6 +107,14 @@ class ScalarType:
102
107
  class_name: str = cls.__name__.__str__()
103
108
  return DTYPE_MAPPING[class_name]
104
109
 
110
+ @classmethod
111
+ def check(cls, value: Any) -> bool:
112
+ try:
113
+ cls.cast(value)
114
+ return True
115
+ except Exception:
116
+ return False
117
+
105
118
 
106
119
  class String(ScalarType):
107
120
  """ """
@@ -143,6 +156,10 @@ class String(ScalarType):
143
156
  type_2=SCALAR_TYPES_CLASS_REVERSE[cls],
144
157
  )
145
158
 
159
+ @classmethod
160
+ def check(cls, value: Any) -> bool:
161
+ return True
162
+
146
163
 
147
164
  class Number(ScalarType):
148
165
  """ """
@@ -201,6 +218,19 @@ class Number(ScalarType):
201
218
  return 0.0
202
219
  return float(value)
203
220
 
221
+ @classmethod
222
+ def check(cls, value: Any) -> bool:
223
+ if pd.isnull(value):
224
+ return True
225
+ if isinstance(value, (int, float, bool)):
226
+ return True
227
+ if isinstance(value, str):
228
+ v = value.strip()
229
+ if v.lower() in {"true", "false"}:
230
+ return True
231
+ return bool(re.match(r"^\d+(\.\d*)?$|^\.\d+$", v))
232
+ return False
233
+
204
234
 
205
235
  class Integer(Number):
206
236
  """ """
@@ -286,6 +316,16 @@ class Integer(Number):
286
316
  return 0
287
317
  return int(value)
288
318
 
319
+ @classmethod
320
+ def check(cls, value: Any) -> bool:
321
+ if pd.isnull(value):
322
+ return True
323
+ if isinstance(value, str):
324
+ return value.isdigit() or value.lower() in {"true", "false"}
325
+ if isinstance(value, float):
326
+ return value.is_integer()
327
+ return isinstance(value, (int, bool))
328
+
289
329
 
290
330
  class TimeInterval(ScalarType):
291
331
  """ """
@@ -325,6 +365,16 @@ class TimeInterval(ScalarType):
325
365
  type_2=SCALAR_TYPES_CLASS_REVERSE[cls],
326
366
  )
327
367
 
368
+ @classmethod
369
+ def check(cls, value: Any) -> bool:
370
+ if pd.isnull(value):
371
+ return True
372
+ try:
373
+ check_time(value)
374
+ except Exception:
375
+ return False
376
+ return True
377
+
328
378
 
329
379
  class Date(TimeInterval):
330
380
  """ """
@@ -357,6 +407,16 @@ class Date(TimeInterval):
357
407
  type_2=SCALAR_TYPES_CLASS_REVERSE[cls],
358
408
  )
359
409
 
410
+ @classmethod
411
+ def check(cls, value: Any) -> bool:
412
+ if pd.isnull(value):
413
+ return True
414
+ try:
415
+ check_date(value)
416
+ except Exception:
417
+ return False
418
+ return True
419
+
360
420
 
361
421
  class TimePeriod(TimeInterval):
362
422
  """ """
@@ -400,6 +460,16 @@ class TimePeriod(TimeInterval):
400
460
  type_2=SCALAR_TYPES_CLASS_REVERSE[cls],
401
461
  )
402
462
 
463
+ @classmethod
464
+ def check(cls, value: Any) -> bool:
465
+ if pd.isnull(value):
466
+ return True
467
+ try:
468
+ check_time_period(value)
469
+ except Exception:
470
+ return False
471
+ return True
472
+
403
473
 
404
474
  class Duration(ScalarType):
405
475
  iso8601_duration_pattern = r"^P((\d+Y)?(\d+M)?(\d+D)?)$"
@@ -461,6 +531,16 @@ class Duration(ScalarType):
461
531
  total_days = years * 365 + months * 30 + days
462
532
  return int(total_days)
463
533
 
534
+ @classmethod
535
+ def check(cls, value: Any) -> bool:
536
+ if pd.isnull(value):
537
+ return True
538
+
539
+ if isinstance(value, str):
540
+ match = re.match(cls.iso8601_duration_pattern, value)
541
+ return bool(match)
542
+ return False
543
+
464
544
 
465
545
  class Boolean(ScalarType):
466
546
  """ """
@@ -514,6 +594,14 @@ class Boolean(ScalarType):
514
594
  type_2=SCALAR_TYPES_CLASS_REVERSE[cls],
515
595
  )
516
596
 
597
+ @classmethod
598
+ def check(cls, value: Any) -> bool:
599
+ if pd.isnull(value):
600
+ return True
601
+ if isinstance(value, str):
602
+ return value.lower() in {"true", "false", "1", "0"}
603
+ return isinstance(value, (int, float, bool))
604
+
517
605
 
518
606
  class Null(ScalarType):
519
607
  """ """
@@ -534,6 +622,10 @@ class Null(ScalarType):
534
622
  def dtype(cls) -> str:
535
623
  return "string"
536
624
 
625
+ @classmethod
626
+ def check(cls, value: Any) -> bool:
627
+ return True
628
+
537
629
 
538
630
  SCALAR_TYPES: Dict[str, Type[ScalarType]] = {
539
631
  "String": String,
@@ -11,7 +11,7 @@ def check_date(value: str) -> str:
11
11
  Check if the date is in the correct format.
12
12
  """
13
13
  # Remove all whitespaces
14
- value = value.replace(" ", "")
14
+ value = value.strip()
15
15
  try:
16
16
  if len(value) == 9 and value[7] == "-":
17
17
  value = value[:-1] + "0" + value[-1]
@@ -49,7 +49,7 @@ time_pattern = r"^" + date_pattern + r"/" + date_pattern + r"$"
49
49
 
50
50
 
51
51
  def check_time(value: str) -> str:
52
- value = value.replace(" ", "")
52
+ value = value.strip()
53
53
  year_result = re.fullmatch(year_pattern, value)
54
54
  if year_result is not None:
55
55
  date1_time = datetime.strptime(value, "%Y")
@@ -94,7 +94,7 @@ further_options_period_pattern = (
94
94
  def check_time_period(value: str) -> str:
95
95
  if isinstance(value, int):
96
96
  value = str(value)
97
- value = value.replace(" ", "")
97
+ value = value.strip()
98
98
 
99
99
  match = re.fullmatch(r"^(\d{4})-(\d{2})$", value)
100
100
  if match:
@@ -17,6 +17,8 @@ centralised_messages = {
17
17
  "0-1-2-3": "Component {component} is duplicated.",
18
18
  "0-1-2-4": "Invalid json structure because {err} on file {filename}.",
19
19
  "0-1-2-5": "File {file} must be encoded in utf-8 (without BOM).",
20
+ "0-1-2-6": "Not found scalar {name} in datastructures",
21
+ "0-1-2-7": "Invalid value '{value}' for type {type_} {op_type} {name}.",
20
22
  # Run SDMX errors
21
23
  "0-1-3-1": "Expected exactly one input dataset in the whole script, found: {number_datasets}",
22
24
  "0-1-3-2": "SDMX Dataset {schema} requires to have a Schema object defined as structure",
@@ -247,6 +249,10 @@ centralised_messages = {
247
249
  "= and <> comparison operations",
248
250
  "2-1-19-18": "At op {op}: Time operators do not support < and > comparison operations, "
249
251
  "so its not possible to use get the max or min between two time operators",
252
+ "2-1-19-19": "Time Period comparison (>, <, >=, <=) with different period indicator is not "
253
+ "supported, found {value1} {op} {value2}",
254
+ "2-1-19-20": "Time Period operands with different period indicators do not support < and > "
255
+ "comparison operations, unable to get the {op}",
250
256
  # ----------- Interpreter Common ------
251
257
  "2-3-1": "{comp_type} {comp_name} not found.",
252
258
  "2-3-2": "{op_type} cannot be used with {node_op} operators.",
@@ -14,7 +14,7 @@ from pandas._testing import assert_frame_equal
14
14
  import vtlengine.DataTypes as DataTypes
15
15
  from vtlengine.DataTypes import SCALAR_TYPES, ScalarType
16
16
  from vtlengine.DataTypes.TimeHandling import TimePeriodHandler
17
- from vtlengine.Exceptions import SemanticError
17
+ from vtlengine.Exceptions import InputValidationException, SemanticError
18
18
 
19
19
  # from pyspark.pandas import DataFrame as SparkDataFrame, Series as SparkSeries
20
20
 
@@ -27,7 +27,28 @@ class Scalar:
27
27
 
28
28
  name: str
29
29
  data_type: Type[ScalarType]
30
- value: Any
30
+ _value: Any
31
+
32
+ def __init__(self, name: str, data_type: Type[ScalarType], value: Any) -> None:
33
+ self.name = name
34
+ self.data_type = data_type
35
+ self.value = value
36
+
37
+ @property
38
+ def value(self) -> Any:
39
+ return self._value
40
+
41
+ @value.setter
42
+ def value(self, new_value: Any) -> None:
43
+ if self.data_type and not self.data_type.check(new_value):
44
+ raise InputValidationException(
45
+ code="0-1-2-7",
46
+ value=new_value,
47
+ type_=self.data_type.__name__,
48
+ op_type=self.__class__.__name__,
49
+ name=self.name,
50
+ )
51
+ self._value = new_value
31
52
 
32
53
  @classmethod
33
54
  def from_json(cls, json_str: str) -> "Scalar":
@@ -368,7 +389,30 @@ class ScalarSet:
368
389
  """
369
390
 
370
391
  data_type: Type[ScalarType]
371
- values: List[Union[int, float, str, bool]]
392
+ _values: List[Union[int, float, str, bool]]
393
+
394
+ def __init__(
395
+ self, data_type: Type[ScalarType], values: List[Union[int, float, str, bool]]
396
+ ) -> None:
397
+ self.data_type = data_type
398
+ self.values = values
399
+
400
+ @property
401
+ def values(self) -> List[Union[int, float, str, bool]]:
402
+ return self._values
403
+
404
+ @values.setter
405
+ def values(self, new_values: List[Union[int, float, str, bool]]) -> None:
406
+ for value in new_values:
407
+ if self.data_type and not self.data_type.check(value):
408
+ raise InputValidationException(
409
+ code="0-1-2-7",
410
+ value=value,
411
+ type_=self.data_type.__name__,
412
+ op_type=self.__class__.__name__,
413
+ name="",
414
+ )
415
+ self._values = new_values
372
416
 
373
417
  def __contains__(self, item: str) -> Optional[bool]:
374
418
  if isinstance(item, float) and item.is_integer():
@@ -72,7 +72,7 @@ class Aggregation(Operator.Unary):
72
72
  new_value = ["9999-99-99"]
73
73
  else:
74
74
  to_replace = ["9999-99-99"]
75
- data[measure.name] = data[measure.name].replace(to_replace, new_value)
75
+ data[measure.name] = data[measure.name].replace(to_replace, new_value) # type: ignore[arg-type, unused-ignore]
76
76
  elif measure.data_type == TimePeriod:
77
77
  if mode == "input":
78
78
  data[measure.name] = (
@@ -80,6 +80,10 @@ class Aggregation(Operator.Unary):
80
80
  .astype(object)
81
81
  .map(lambda x: TimePeriodHandler(str(x)), na_action="ignore")
82
82
  )
83
+ if cls.op in [MAX, MIN]:
84
+ indicators = {v.period_indicator for v in data[measure.name].dropna()}
85
+ if len(indicators) > 1:
86
+ raise SemanticError("2-1-19-20", op=cls.op)
83
87
  else:
84
88
  data[measure.name] = data[measure.name].map(
85
89
  lambda x: str(x), na_action="ignore"
@@ -99,7 +103,7 @@ class Aggregation(Operator.Unary):
99
103
  lambda x: str(x), na_action="ignore"
100
104
  )
101
105
  elif measure.data_type == String:
102
- data[measure.name] = data[measure.name].replace(to_replace, new_value)
106
+ data[measure.name] = data[measure.name].replace(to_replace, new_value) # type: ignore[arg-type, unused-ignore]
103
107
  elif measure.data_type == Duration:
104
108
  if mode == "input":
105
109
  data[measure.name] = data[measure.name].map(
@@ -66,6 +66,7 @@ class If(Operator):
66
66
  false_data = pd.Series(false_branch.value, index=condition.data.index)
67
67
  else:
68
68
  false_data = false_branch.data.reindex(condition.data.index)
69
+ condition.data = condition.data.fillna(False)
69
70
  result = np.where(condition.data, true_data, false_data)
70
71
 
71
72
  return pd.Series(result, index=condition.data.index) # type: ignore[union-attr]
@@ -154,7 +154,7 @@ class Eval(Unary):
154
154
  for comp_name in component_names:
155
155
  if comp_name not in output.components:
156
156
  raise SemanticError(
157
- "1-1-1-10", op=cls.op, comp_name=comp_name, dataset_name=df.name
157
+ "1-1-1-10", op=cls.op, comp_name=comp_name, dataset_name=output.name
158
158
  )
159
159
 
160
160
  for comp_name in output.components:
@@ -84,6 +84,8 @@ class Operator:
84
84
  def cast_time_types_scalar(cls, data_type: Any, value: str) -> Any:
85
85
  if cls.op not in BINARY_COMPARISON_OPERATORS:
86
86
  return value
87
+ if value is None:
88
+ return None
87
89
  if data_type.__name__ == "TimeInterval":
88
90
  return TimeIntervalHandler.from_iso_format(value)
89
91
  elif data_type.__name__ == "TimePeriod":
@@ -614,8 +616,10 @@ class Binary(Operator):
614
616
 
615
617
  for measure in dataset.get_measures():
616
618
  measure_data = cls.cast_time_types(measure.data_type, result_data[measure.name].copy())
617
- if measure.data_type.__name__.__str__() == "Duration" and not isinstance(
618
- scalar_value, int
619
+ if (
620
+ measure.data_type.__name__.__str__() == "Duration"
621
+ and not isinstance(scalar_value, int)
622
+ and scalar_value is not None
619
623
  ):
620
624
  scalar_value = PERIOD_IND_MAPPING[scalar_value]
621
625
  result_dataset.data[measure.name] = cls.apply_operation_series_scalar(
@@ -654,8 +658,10 @@ class Binary(Operator):
654
658
  component.data.copy() if component.data is not None else pd.Series(),
655
659
  )
656
660
  scalar_value = cls.cast_time_types_scalar(scalar.data_type, scalar.value)
657
- if component.data_type.__name__.__str__() == "Duration" and not isinstance(
658
- scalar_value, int
661
+ if (
662
+ component.data_type.__name__.__str__() == "Duration"
663
+ and not isinstance(scalar_value, int)
664
+ and scalar_value is not None
659
665
  ):
660
666
  scalar_value = PERIOD_IND_MAPPING[scalar_value]
661
667
  result_component.data = cls.apply_operation_series_scalar(
vtlengine/__init__.py CHANGED
@@ -2,4 +2,4 @@ from vtlengine.API import generate_sdmx, prettify, run, run_sdmx, semantic_analy
2
2
 
3
3
  __all__ = ["semantic_analysis", "run", "generate_sdmx", "run_sdmx", "prettify"]
4
4
 
5
- __version__ = "1.2.2"
5
+ __version__ = "1.3.0rc1"
@@ -17,14 +17,14 @@ from vtlengine.DataTypes import (
17
17
  TimeInterval,
18
18
  TimePeriod,
19
19
  )
20
- from vtlengine.DataTypes.TimeHandling import PERIOD_IND_MAPPING
21
- from vtlengine.Exceptions import InputValidationException, SemanticError
22
- from vtlengine.files.parser._rfc_dialect import register_rfc
23
- from vtlengine.files.parser._time_checking import (
20
+ from vtlengine.DataTypes._time_checking import (
24
21
  check_date,
25
22
  check_time,
26
23
  check_time_period,
27
24
  )
25
+ from vtlengine.DataTypes.TimeHandling import PERIOD_IND_MAPPING
26
+ from vtlengine.Exceptions import InputValidationException, SemanticError
27
+ from vtlengine.files.parser._rfc_dialect import register_rfc
28
28
  from vtlengine.Model import Component, Dataset, Role
29
29
 
30
30
  TIME_CHECKS_MAPPING: Dict[Type[ScalarType], Any] = {
@@ -108,7 +108,7 @@ def _pandas_load_csv(components: Dict[str, Component], csv_path: Union[str, Path
108
108
  obj_dtypes = {comp_name: object for comp_name, comp in components.items()}
109
109
 
110
110
  data = pd.read_csv(
111
- csv_path,
111
+ csv_path, # type: ignore[call-overload, unused-ignore]
112
112
  dtype=obj_dtypes,
113
113
  engine="c",
114
114
  keep_default_na=False,
@@ -148,7 +148,7 @@ def _validate_pandas(
148
148
  if len(id_names) == 0 and len(data) > 1:
149
149
  raise SemanticError("0-1-1-5", name=dataset_name)
150
150
 
151
- data = data.fillna(np.nan).replace([np.nan], [None])
151
+ data = data.fillna(np.nan).replace([np.nan], None)
152
152
  # Checking data types on all data types
153
153
  comp_name = ""
154
154
  comp = None
@@ -182,7 +182,7 @@ def _validate_pandas(
182
182
  values_correct = (
183
183
  data[comp_name]
184
184
  .map(
185
- lambda x: x.replace(" ", "") in PERIOD_IND_MAPPING, # type: ignore[union-attr]
185
+ lambda x: x.replace(" ", "") in PERIOD_IND_MAPPING, # type: ignore[union-attr, unused-ignore]
186
186
  na_action="ignore",
187
187
  )
188
188
  .all()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: vtlengine
3
- Version: 1.2.2
3
+ Version: 1.3.0rc1
4
4
  Summary: Run and Validate VTL Scripts
5
5
  License-Expression: AGPL-3.0
6
6
  License-File: LICENSE.md
@@ -17,8 +17,8 @@ Classifier: Intended Audience :: Science/Research
17
17
  Classifier: Typing :: Typed
18
18
  Provides-Extra: all
19
19
  Provides-Extra: s3
20
- Requires-Dist: antlr4-python3-runtime (>=4.13.2,<4.14)
21
- Requires-Dist: duckdb (>=1.1,<1.2)
20
+ Requires-Dist: antlr4-python3-runtime (>=4.9,<4.10)
21
+ Requires-Dist: duckdb (>=1.4,<1.5)
22
22
  Requires-Dist: fsspec (>=2022.11.0,<2023.0) ; extra == "all"
23
23
  Requires-Dist: fsspec (>=2022.11.0,<2023.0) ; extra == "s3"
24
24
  Requires-Dist: jsonschema (>=3.2.0,<5.0)
@@ -1,6 +1,8 @@
1
- vtlengine/API/_InternalApi.py,sha256=ptmL3F07ThTN2G2yLAo7p6Az_njScJYfBbasYUaLEF0,24167
2
- vtlengine/API/__init__.py,sha256=XyL_7ZNaEfL5Xbler7iHI7MtsbHsQRvopSa25h14R3A,18598
1
+ vtlengine/API/_InternalApi.py,sha256=6nZPc8jARsa7rVw09YOhvgPHkVOJtOta5sPyEc9U2T0,25165
2
+ vtlengine/API/__init__.py,sha256=-dy9QfT79knEjcAPZ55ck0U-d1XxTu6htYtfUDmy1Es,21034
3
+ vtlengine/API/data/schema/external_routines_schema.json,sha256=--egdFkK1-vB-WkJ10bNko2qZ9ZP24bgpZv_D_qquQI,696
3
4
  vtlengine/API/data/schema/json_schema_2.1.json,sha256=v3-C0Xnq8qScJSPAtLgb3rjKMrd3nz-bIxgZdTSEUiU,4336
5
+ vtlengine/API/data/schema/value_domain_schema.json,sha256=Qr7AOnPsfLAWv1XZk-pm8d0vVU79Qw3P1tBs7g4lthg,2229
4
6
  vtlengine/AST/ASTComment.py,sha256=bAJW7aaqBXU2LqMtRvL_XOttdl1AFZufa15vmQdvNlY,1667
5
7
  vtlengine/AST/ASTConstructor.py,sha256=X55I98BKG1ItyGIDObF9ALVfCcWnU-0wwCWJsiPILkg,21488
6
8
  vtlengine/AST/ASTConstructorModules/Expr.py,sha256=PdI66D3dwA4ymxgqqcChkctsWMRgBSfuyUtgH-KOkss,70207
@@ -17,26 +19,27 @@ vtlengine/AST/DAG/_words.py,sha256=LyRL9j-vZUNHdLDJZJrq2nKUmVlpbxdzd9ovW6CnNoU,2
17
19
  vtlengine/AST/Grammar/Vtl.g4,sha256=g4a76A04qH-SaR9a9LfrG4rt3GPZ7UpqZLISkY1BkmI,26323
18
20
  vtlengine/AST/Grammar/VtlTokens.g4,sha256=SwDR_59U25APqslczFcvTUiPoH7bC6kGaH2GkJ3kYzA,9972
19
21
  vtlengine/AST/Grammar/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
- vtlengine/AST/Grammar/lexer.py,sha256=66cH0cJi83Sxd8XPrPRYkBwdz4NGhPaadUnq5p0GYUI,256579
21
- vtlengine/AST/Grammar/parser.py,sha256=fWJaGcXvUCwN2pvjJBU4l5apoQnkhQbUv_mSlKKiDXc,712465
22
+ vtlengine/AST/Grammar/lexer.py,sha256=zg3HdNvuoKNkijt-jWhavmWfTPmU4Dx6Jb0sf_1xvEg,105722
23
+ vtlengine/AST/Grammar/parser.py,sha256=NIcPZPjwke4Ik5ksm96BHL0WaVEs_FXWu61waehVHVk,638343
22
24
  vtlengine/AST/Grammar/tokens.py,sha256=YF7tO0nF2zYC-VaBAJLyc6VitM72CvYfFQpoPDGCMzo,3139
23
25
  vtlengine/AST/VtlVisitor.py,sha256=NJfXJVP6wNmasJmPLlojFqm9R5VSamOAKg_w7BMrhac,35332
24
26
  vtlengine/AST/__init__.py,sha256=zTrSDHd3AFaHvvqdPYT8ZthqN2anHfI9Ul1QomA4rNo,11708
25
- vtlengine/DataTypes/TimeHandling.py,sha256=K8769_b3Q4kPXGC2XVQ4oN7VsGnhxmJtgdhC_ryXjho,20250
26
- vtlengine/DataTypes/__init__.py,sha256=LYXrde68bYm7MLeMLmr4haeOTSE4Fnpq9G2Ewy7DiaU,23084
27
+ vtlengine/DataTypes/TimeHandling.py,sha256=6kP5CWrrmP1DdRq_mViG5G2-48J7eNW3_FCqHAhoXNM,20497
28
+ vtlengine/DataTypes/__init__.py,sha256=60iB8m6izBDrY1FkI5woIo2oEJZ2dqCTGiH-3UmFXQs,25479
29
+ vtlengine/DataTypes/_time_checking.py,sha256=YAbjO8IEc8fJz3mSstCV3Qma2S5Sk5PRZzQjI48RElY,4822
27
30
  vtlengine/Exceptions/__init__.py,sha256=rSSskV_qCBFzg_W67Q1QBAL7Lnq88D7yi2BDYo1hytw,4727
28
- vtlengine/Exceptions/messages.py,sha256=V68Al8_TGvb_kY9PHEtSRp26rF_PjhjHdDMRtB-IuTI,20113
31
+ vtlengine/Exceptions/messages.py,sha256=DMeQt9cNE1hutDo0BAq0KoVLhnPWJS9suDud51C68QU,20544
29
32
  vtlengine/Interpreter/__init__.py,sha256=6Ffl5bJRL1KSF335xSxfA8a5y_pV8ZNQUYM9BmYN6hg,87256
30
- vtlengine/Model/__init__.py,sha256=xWrwhdUOj8Y-5x38zP5XnmFPw8IkBVBBG2bPsUBGLA8,15869
31
- vtlengine/Operators/Aggregation.py,sha256=K9u0PE77ZAqEwKTCRyRx9G9uvpyVVZQBB_E4648B4V8,12158
33
+ vtlengine/Model/__init__.py,sha256=9FCxBuGotA1_L6x0l8uk9emVguE4PFIaA3cPAjN_D3E,17340
34
+ vtlengine/Operators/Aggregation.py,sha256=bweme6bum3QA5LFR6uvNm7JQkFUpNAkyD2kIqAGeQiQ,12500
32
35
  vtlengine/Operators/Analytic.py,sha256=adm8y4mTeen4iVMsQvcvxM9U5f6Xj9UNjdCQI2OBINE,12934
33
36
  vtlengine/Operators/Assignment.py,sha256=xyJgGPoFYbq6mzX06gz7Q7L8jXJxpUkgzdY3Lrne2hw,793
34
37
  vtlengine/Operators/Boolean.py,sha256=3U5lHkxW5d7QQdGDNxXeXqejlPfFrXKG8_TqknrC8Ls,2856
35
38
  vtlengine/Operators/CastOperator.py,sha256=pXTSs0UYBeR5hS3J2HWUyaHmoZoifl2EFch6ol_Taok,17115
36
39
  vtlengine/Operators/Clause.py,sha256=Lu6zjcUBkShN6kQmjEZu_7ytaFGwfH-yB4ROoCSkLGI,15505
37
40
  vtlengine/Operators/Comparison.py,sha256=CRMvs9qXVXUW32pxAnCua8b7ZHpJy0-Egvs691ekOCk,17403
38
- vtlengine/Operators/Conditional.py,sha256=Py4QwCgCi4t4-FG75-RMe7M5sDRgUZlRGyuFt4RwwJ4,21182
39
- vtlengine/Operators/General.py,sha256=ltRK8Sw686sb4rC5ji2OX-GYVxaK_PpL0Lev8P5OFHI,6828
41
+ vtlengine/Operators/Conditional.py,sha256=wtpfhMw8Pnw61SGb1CX2iOu1ELPqObJXcmodYXVE9ZY,21240
42
+ vtlengine/Operators/General.py,sha256=uu-n9FvXzEvgqDTUMy5LhqdsOgqDd3rGaYRXwXl77OQ,6832
40
43
  vtlengine/Operators/HROperators.py,sha256=YybwD70906AA00c0k4IP6sjeta0pg7hqb2EUVsFqdmA,8979
41
44
  vtlengine/Operators/Join.py,sha256=lYmC_jGlJ4RRmn2vplB13Ysrxgv6O8sNFEHQYZzun5s,18393
42
45
  vtlengine/Operators/Numeric.py,sha256=icYTWzEsw6VQFLYc5Wucgr8961d8ZwTFx_wfZ8Wp9Co,12083
@@ -45,19 +48,18 @@ vtlengine/Operators/Set.py,sha256=f1uLeY4XZF0cWEwpXRB_CczgbXr6s33DYPuFt39HlEg,70
45
48
  vtlengine/Operators/String.py,sha256=ghWtYl6oUEAAzynY1a9Hg4yqRA9Sa7uk2B6iF9uuSqQ,20230
46
49
  vtlengine/Operators/Time.py,sha256=ESn6ldPg73bdZxOXZYJuIwCLDQnXDGTqR1y7ckQmV1M,43025
47
50
  vtlengine/Operators/Validation.py,sha256=ot-og75Ce690DddQM-xILrY3PoRf8Z0M3aIovFK-wMY,11245
48
- vtlengine/Operators/__init__.py,sha256=N1zi9RFC_l0qggRm5IPLOkPFtFS4CGAg-r1taHOrbTI,37667
51
+ vtlengine/Operators/__init__.py,sha256=iwYUO00ce_J6jRHmFScIJZB3WTLuhS_bNOQkN5BfeCw,37835
49
52
  vtlengine/Utils/__Virtual_Assets.py,sha256=0jPXysZrBr0hYVzqFoyg9La8ZcZoZ01Ql245X5vrth4,862
50
53
  vtlengine/Utils/__init__.py,sha256=zhGPJA8MjHmtEEwMS4CxEFYL0tk2L5F0YPn7bitdRzM,8954
51
54
  vtlengine/__extras_check.py,sha256=Wr-lxGZhXJZEacVV5cUkvKt7XM-mry0kYAe3VxNrVcY,614
52
- vtlengine/__init__.py,sha256=2ylv7tLHXE_OKN9-zlvcdqenn_WPaMklmkjAt-EJ16A,188
55
+ vtlengine/__init__.py,sha256=zTOiIdhnN9M97pRWFDTsvCYQ4n2wrpF7QdXjD5FKkRQ,191
53
56
  vtlengine/files/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
57
  vtlengine/files/output/__init__.py,sha256=4tmf-p1Y1u5Ohrwt3clQA-FMGaijKI3HC_iwn3H9J8c,1250
55
58
  vtlengine/files/output/_time_period_representation.py,sha256=D5XCSXyEuX_aBzTvBV3sZxACcgwXz2Uu_YH3loMP8q0,1610
56
- vtlengine/files/parser/__init__.py,sha256=WdvToMTIeWgkkuUWSDvtACQlguEZN3plz5LPvyCvdt0,9030
59
+ vtlengine/files/parser/__init__.py,sha256=RWtQSKFk64Ag4gzIuRw1nHzWvO91_zvAATEbMPR2Urc,9086
57
60
  vtlengine/files/parser/_rfc_dialect.py,sha256=Y8kAYBxH_t9AieN_tYg7QRh5A4DgvabKarx9Ko3QeCQ,462
58
- vtlengine/files/parser/_time_checking.py,sha256=-MsfAJdIHpVwPulaiklxvyPfWAVocwvf43WSqgusryc,4849
59
61
  vtlengine/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
60
- vtlengine-1.2.2.dist-info/METADATA,sha256=BzikHNHbu6YDewYFLYeYD5tAkwoEg2sfI2ySJBBcnR8,4165
61
- vtlengine-1.2.2.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
62
- vtlengine-1.2.2.dist-info/licenses/LICENSE.md,sha256=2xqHuoHohba7gpcZZKtOICRjzeKsQANXG8WoV9V35KM,33893
63
- vtlengine-1.2.2.dist-info/RECORD,,
62
+ vtlengine-1.3.0rc1.dist-info/METADATA,sha256=YWAp6Hql-chdGgKnlIKELuaL3ZaR0C4amn_sEpcZe-s,4165
63
+ vtlengine-1.3.0rc1.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
64
+ vtlengine-1.3.0rc1.dist-info/licenses/LICENSE.md,sha256=2xqHuoHohba7gpcZZKtOICRjzeKsQANXG8WoV9V35KM,33893
65
+ vtlengine-1.3.0rc1.dist-info/RECORD,,