vtlengine 1.2.1rc1__py3-none-any.whl → 1.3.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of vtlengine might be problematic. Click here for more details.

@@ -66,6 +66,7 @@ class If(Operator):
66
66
  false_data = pd.Series(false_branch.value, index=condition.data.index)
67
67
  else:
68
68
  false_data = false_branch.data.reindex(condition.data.index)
69
+ condition.data = condition.data.fillna(False)
69
70
  result = np.where(condition.data, true_data, false_data)
70
71
 
71
72
  return pd.Series(result, index=condition.data.index) # type: ignore[union-attr]
@@ -316,6 +317,7 @@ class Case(Operator):
316
317
  cls, conditions: List[Any], thenOps: List[Any], elseOp: Any
317
318
  ) -> Union[Scalar, DataComponent, Dataset]:
318
319
  result = cls.validate(conditions, thenOps, elseOp)
320
+
319
321
  for condition in conditions:
320
322
  if isinstance(condition, Dataset) and condition.data is not None:
321
323
  condition.data.fillna(False, inplace=True)
@@ -344,57 +346,73 @@ class Case(Operator):
344
346
  result.value = thenOps[i].value
345
347
 
346
348
  if isinstance(result, DataComponent):
347
- result.data = pd.Series(None, index=conditions[0].data.index)
349
+ full_index = conditions[0].data.index
350
+ result.data = pd.Series(None, index=full_index)
348
351
 
349
352
  for i, condition in enumerate(conditions):
350
- value = thenOps[i].value if isinstance(thenOps[i], Scalar) else thenOps[i].data
351
- result.data = np.where(
352
- condition.data.notna(),
353
- np.where(condition.data, value, result.data),
354
- result.data,
355
- )
356
-
357
- condition_mask_else = ~np.any([condition.data for condition in conditions], axis=0)
358
- else_value = elseOp.value if isinstance(elseOp, Scalar) else elseOp.data
359
- result.data = pd.Series(
360
- np.where(condition_mask_else, else_value, result.data),
361
- index=conditions[0].data.index,
353
+ if isinstance(thenOps[i], Scalar):
354
+ value_series = pd.Series(thenOps[i].value, index=full_index)
355
+ else:
356
+ value_series = thenOps[i].data.reindex(full_index)
357
+ cond_series = condition.data.reindex(full_index)
358
+ cond_mask = cond_series.notna() & cond_series == True
359
+ result_data = result.data.copy()
360
+ result_data[cond_mask] = value_series[cond_mask]
361
+ result.data = result_data
362
+
363
+ conditions_stack = [c.data.reindex(full_index).fillna(False) for c in conditions]
364
+ else_cond_mask = (
365
+ ~np.logical_or.reduce(conditions_stack)
366
+ if conditions_stack
367
+ else pd.Series(True, index=full_index)
362
368
  )
369
+ if isinstance(elseOp, Scalar):
370
+ else_series = pd.Series(elseOp.value, index=full_index)
371
+ else:
372
+ else_series = elseOp.data.reindex(full_index)
373
+ result.data[else_cond_mask] = else_series[else_cond_mask]
363
374
 
364
- if isinstance(result, Dataset):
375
+ elif isinstance(result, Dataset):
365
376
  identifiers = result.get_identifiers_names()
366
377
  columns = [col for col in result.get_components_names() if col not in identifiers]
367
378
  result.data = (
368
379
  conditions[0].data[identifiers]
369
380
  if conditions[0].data is not None
370
381
  else pd.DataFrame(columns=identifiers)
371
- )
382
+ ).copy()
372
383
 
384
+ full_index = result.data.index
373
385
  for i in range(len(conditions)):
374
386
  condition = conditions[i]
375
387
  bool_col = next(x.name for x in condition.get_measures() if x.data_type == Boolean)
376
- condition_mask = condition.data[bool_col]
388
+ cond_mask = condition.data[bool_col].reindex(full_index).astype(bool)
377
389
 
378
- result.data.loc[condition_mask, columns] = (
379
- thenOps[i].value
380
- if isinstance(thenOps[i], Scalar)
381
- else thenOps[i].data.loc[condition_mask, columns]
382
- )
383
-
384
- condition_mask_else = ~np.logical_or.reduce(
385
- [
386
- condition.data[
387
- next(x.name for x in condition.get_measures() if x.data_type == Boolean)
388
- ].astype(bool)
389
- for condition in conditions
390
- ]
390
+ if isinstance(thenOps[i], Scalar):
391
+ for col in columns:
392
+ result.data.loc[cond_mask, col] = thenOps[i].value
393
+ else:
394
+ cond_df = thenOps[i].data.reindex(full_index)
395
+ result.data.loc[cond_mask, columns] = cond_df.loc[cond_mask, columns]
396
+
397
+ then_cond_masks = [
398
+ c.data[next(x.name for x in c.get_measures() if x.data_type == Boolean)]
399
+ .reindex(full_index)
400
+ .fillna(False)
401
+ .astype(bool)
402
+ for c in conditions
403
+ ]
404
+ else_cond_mask = (
405
+ ~np.logical_or.reduce(then_cond_masks)
406
+ if then_cond_masks
407
+ else pd.Series(True, index=full_index)
391
408
  )
392
409
 
393
- result.data.loc[condition_mask_else, columns] = ( # type: ignore[index, unused-ignore]
394
- elseOp.value
395
- if isinstance(elseOp, Scalar)
396
- else elseOp.data.loc[condition_mask_else, columns]
397
- )
410
+ if isinstance(elseOp, Scalar):
411
+ for col in columns:
412
+ result.data.loc[else_cond_mask, col] = elseOp.value
413
+ else:
414
+ else_df = elseOp.data.reindex(full_index)
415
+ result.data.loc[else_cond_mask, columns] = else_df.loc[else_cond_mask, columns]
398
416
 
399
417
  return result
400
418
 
@@ -154,7 +154,7 @@ class Eval(Unary):
154
154
  for comp_name in component_names:
155
155
  if comp_name not in output.components:
156
156
  raise SemanticError(
157
- "1-1-1-10", op=cls.op, comp_name=comp_name, dataset_name=df.name
157
+ "1-1-1-10", op=cls.op, comp_name=comp_name, dataset_name=output.name
158
158
  )
159
159
 
160
160
  for comp_name in output.components:
@@ -1,5 +1,5 @@
1
1
  from copy import copy
2
- from typing import Any, Dict, Optional
2
+ from typing import Any, Dict, Optional, Union
3
3
 
4
4
  import pandas as pd
5
5
 
@@ -27,7 +27,7 @@ class Check(Operator):
27
27
  validation_element: Dataset,
28
28
  imbalance_element: Optional[Dataset],
29
29
  error_code: Optional[str],
30
- error_level: Optional[int],
30
+ error_level: Optional[Union[int, str]],
31
31
  invalid: bool,
32
32
  ) -> Dataset:
33
33
  dataset_name = VirtualCounter._new_ds_name()
@@ -36,6 +36,13 @@ class Check(Operator):
36
36
  measure = validation_element.get_measures()[0]
37
37
  if measure.data_type != Boolean:
38
38
  raise SemanticError("1-1-10-1", op=cls.op, op_type="validation", me_type="Boolean")
39
+ error_level_type = None
40
+ if error_level is None or isinstance(error_level, int):
41
+ error_level_type = Integer
42
+ elif isinstance(error_level, str):
43
+ error_level_type = String # type: ignore[assignment]
44
+ else:
45
+ error_level_type = String
39
46
 
40
47
  imbalance_measure = None
41
48
  if imbalance_element is not None:
@@ -69,8 +76,12 @@ class Check(Operator):
69
76
  result_components["errorcode"] = Component(
70
77
  name="errorcode", data_type=String, role=Role.MEASURE, nullable=True
71
78
  )
79
+
72
80
  result_components["errorlevel"] = Component(
73
- name="errorlevel", data_type=Integer, role=Role.MEASURE, nullable=True
81
+ name="errorlevel",
82
+ data_type=error_level_type, # type: ignore[arg-type]
83
+ role=Role.MEASURE,
84
+ nullable=True,
74
85
  )
75
86
 
76
87
  return Dataset(name=dataset_name, components=result_components, data=None)
@@ -81,7 +92,7 @@ class Check(Operator):
81
92
  validation_element: Dataset,
82
93
  imbalance_element: Optional[Dataset],
83
94
  error_code: Optional[str],
84
- error_level: Optional[int],
95
+ error_level: Optional[Union[int, str]],
85
96
  invalid: bool,
86
97
  ) -> Dataset:
87
98
  result = cls.validate(
@@ -128,6 +139,20 @@ class Validation(Operator):
128
139
 
129
140
  @classmethod
130
141
  def validate(cls, dataset_element: Dataset, rule_info: Dict[str, Any], output: str) -> Dataset:
142
+ error_level_type = None
143
+ error_levels = [
144
+ rule_data.get("errorlevel")
145
+ for rule_data in rule_info.values()
146
+ if "errorlevel" in rule_data
147
+ ]
148
+ non_null_levels = [el for el in error_levels if el is not None]
149
+
150
+ if len(non_null_levels) == 0 or all(isinstance(el, int) for el in non_null_levels):
151
+ error_level_type = Number
152
+ elif all(isinstance(el, str) for el in non_null_levels):
153
+ error_level_type = String # type: ignore[assignment]
154
+ else:
155
+ error_level_type = String # type: ignore[assignment]
131
156
  dataset_name = VirtualCounter._new_ds_name()
132
157
  result_components = {comp.name: comp for comp in dataset_element.get_identifiers()}
133
158
  result_components["ruleid"] = Component(
@@ -154,7 +179,10 @@ class Validation(Operator):
154
179
  name="errorcode", data_type=String, role=Role.MEASURE, nullable=True
155
180
  )
156
181
  result_components["errorlevel"] = Component(
157
- name="errorlevel", data_type=Number, role=Role.MEASURE, nullable=True
182
+ name="errorlevel",
183
+ data_type=error_level_type, # type: ignore[arg-type]
184
+ role=Role.MEASURE,
185
+ nullable=True,
158
186
  )
159
187
 
160
188
  return Dataset(name=dataset_name, components=result_components, data=None)
@@ -84,6 +84,8 @@ class Operator:
84
84
  def cast_time_types_scalar(cls, data_type: Any, value: str) -> Any:
85
85
  if cls.op not in BINARY_COMPARISON_OPERATORS:
86
86
  return value
87
+ if value is None:
88
+ return None
87
89
  if data_type.__name__ == "TimeInterval":
88
90
  return TimeIntervalHandler.from_iso_format(value)
89
91
  elif data_type.__name__ == "TimePeriod":
@@ -614,8 +616,10 @@ class Binary(Operator):
614
616
 
615
617
  for measure in dataset.get_measures():
616
618
  measure_data = cls.cast_time_types(measure.data_type, result_data[measure.name].copy())
617
- if measure.data_type.__name__.__str__() == "Duration" and not isinstance(
618
- scalar_value, int
619
+ if (
620
+ measure.data_type.__name__.__str__() == "Duration"
621
+ and not isinstance(scalar_value, int)
622
+ and scalar_value is not None
619
623
  ):
620
624
  scalar_value = PERIOD_IND_MAPPING[scalar_value]
621
625
  result_dataset.data[measure.name] = cls.apply_operation_series_scalar(
@@ -654,8 +658,10 @@ class Binary(Operator):
654
658
  component.data.copy() if component.data is not None else pd.Series(),
655
659
  )
656
660
  scalar_value = cls.cast_time_types_scalar(scalar.data_type, scalar.value)
657
- if component.data_type.__name__.__str__() == "Duration" and not isinstance(
658
- scalar_value, int
661
+ if (
662
+ component.data_type.__name__.__str__() == "Duration"
663
+ and not isinstance(scalar_value, int)
664
+ and scalar_value is not None
659
665
  ):
660
666
  scalar_value = PERIOD_IND_MAPPING[scalar_value]
661
667
  result_component.data = cls.apply_operation_series_scalar(
vtlengine/__init__.py CHANGED
@@ -2,4 +2,4 @@ from vtlengine.API import generate_sdmx, prettify, run, run_sdmx, semantic_analy
2
2
 
3
3
  __all__ = ["semantic_analysis", "run", "generate_sdmx", "run_sdmx", "prettify"]
4
4
 
5
- __version__ = "1.2.1rc1"
5
+ __version__ = "1.3.0rc1"
@@ -17,14 +17,14 @@ from vtlengine.DataTypes import (
17
17
  TimeInterval,
18
18
  TimePeriod,
19
19
  )
20
- from vtlengine.DataTypes.TimeHandling import PERIOD_IND_MAPPING
21
- from vtlengine.Exceptions import InputValidationException, SemanticError
22
- from vtlengine.files.parser._rfc_dialect import register_rfc
23
- from vtlengine.files.parser._time_checking import (
20
+ from vtlengine.DataTypes._time_checking import (
24
21
  check_date,
25
22
  check_time,
26
23
  check_time_period,
27
24
  )
25
+ from vtlengine.DataTypes.TimeHandling import PERIOD_IND_MAPPING
26
+ from vtlengine.Exceptions import InputValidationException, SemanticError
27
+ from vtlengine.files.parser._rfc_dialect import register_rfc
28
28
  from vtlengine.Model import Component, Dataset, Role
29
29
 
30
30
  TIME_CHECKS_MAPPING: Dict[Type[ScalarType], Any] = {
@@ -108,7 +108,7 @@ def _pandas_load_csv(components: Dict[str, Component], csv_path: Union[str, Path
108
108
  obj_dtypes = {comp_name: object for comp_name, comp in components.items()}
109
109
 
110
110
  data = pd.read_csv(
111
- csv_path,
111
+ csv_path, # type: ignore[call-overload, unused-ignore]
112
112
  dtype=obj_dtypes,
113
113
  engine="c",
114
114
  keep_default_na=False,
@@ -148,7 +148,7 @@ def _validate_pandas(
148
148
  if len(id_names) == 0 and len(data) > 1:
149
149
  raise SemanticError("0-1-1-5", name=dataset_name)
150
150
 
151
- data = data.fillna(np.nan).replace([np.nan], [None])
151
+ data = data.fillna(np.nan).replace([np.nan], None)
152
152
  # Checking data types on all data types
153
153
  comp_name = ""
154
154
  comp = None
@@ -182,7 +182,7 @@ def _validate_pandas(
182
182
  values_correct = (
183
183
  data[comp_name]
184
184
  .map(
185
- lambda x: x.replace(" ", "") in PERIOD_IND_MAPPING, # type: ignore[union-attr]
185
+ lambda x: x.replace(" ", "") in PERIOD_IND_MAPPING, # type: ignore[union-attr, unused-ignore]
186
186
  na_action="ignore",
187
187
  )
188
188
  .all()
@@ -203,9 +203,19 @@ def _validate_pandas(
203
203
  str_comp = SCALAR_TYPES_CLASS_REVERSE[comp.data_type] if comp else "Null"
204
204
  raise SemanticError("0-1-1-12", name=dataset_name, column=comp_name, type=str_comp)
205
205
 
206
+ if id_names:
207
+ check_identifiers_duplicity(data, id_names, dataset_name)
208
+
206
209
  return data
207
210
 
208
211
 
212
+ def check_identifiers_duplicity(data: pd.DataFrame, identifiers: List[str], name: str) -> None:
213
+ dup_id_row = data.duplicated(subset=identifiers, keep=False)
214
+ if dup_id_row.any():
215
+ row_index = int(dup_id_row.idxmax()) + 1
216
+ raise SemanticError("0-1-1-15", name=name, row_index=row_index)
217
+
218
+
209
219
  def load_datapoints(
210
220
  components: Dict[str, Component],
211
221
  dataset_name: str,
@@ -1,8 +1,9 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: vtlengine
3
- Version: 1.2.1rc1
3
+ Version: 1.3.0rc1
4
4
  Summary: Run and Validate VTL Scripts
5
- License: AGPL-3.0
5
+ License-Expression: AGPL-3.0
6
+ License-File: LICENSE.md
6
7
  Keywords: vtl,sdmx,vtlengine,Validation and Transformation Language
7
8
  Author: MeaningfulData
8
9
  Author-email: info@meaningfuldata.eu
@@ -16,8 +17,8 @@ Classifier: Intended Audience :: Science/Research
16
17
  Classifier: Typing :: Typed
17
18
  Provides-Extra: all
18
19
  Provides-Extra: s3
19
- Requires-Dist: antlr4-python3-runtime (>=4.13.2,<4.14)
20
- Requires-Dist: duckdb (>=1.1,<1.2)
20
+ Requires-Dist: antlr4-python3-runtime (>=4.9,<4.10)
21
+ Requires-Dist: duckdb (>=1.4,<1.5)
21
22
  Requires-Dist: fsspec (>=2022.11.0,<2023.0) ; extra == "all"
22
23
  Requires-Dist: fsspec (>=2022.11.0,<2023.0) ; extra == "s3"
23
24
  Requires-Dist: jsonschema (>=3.2.0,<5.0)
@@ -25,7 +26,7 @@ Requires-Dist: networkx (>=2.8,<3.0)
25
26
  Requires-Dist: numpy (>=1.23.2,<2) ; python_version < "3.13"
26
27
  Requires-Dist: numpy (>=2.1.0) ; python_version >= "3.13"
27
28
  Requires-Dist: pandas (>=2.1.4,<3.0)
28
- Requires-Dist: pysdmx[xml] (>=1.4.0rc1,<2.0)
29
+ Requires-Dist: pysdmx[xml] (>=1.5.2,<2.0)
29
30
  Requires-Dist: s3fs (>=2022.11.0,<2023.0) ; extra == "all"
30
31
  Requires-Dist: s3fs (>=2022.11.0,<2023.0) ; extra == "s3"
31
32
  Requires-Dist: sqlglot (>=22.2.0,<23.0)
@@ -1,11 +1,13 @@
1
- vtlengine/API/_InternalApi.py,sha256=ptmL3F07ThTN2G2yLAo7p6Az_njScJYfBbasYUaLEF0,24167
2
- vtlengine/API/__init__.py,sha256=XyL_7ZNaEfL5Xbler7iHI7MtsbHsQRvopSa25h14R3A,18598
1
+ vtlengine/API/_InternalApi.py,sha256=6nZPc8jARsa7rVw09YOhvgPHkVOJtOta5sPyEc9U2T0,25165
2
+ vtlengine/API/__init__.py,sha256=-dy9QfT79knEjcAPZ55ck0U-d1XxTu6htYtfUDmy1Es,21034
3
+ vtlengine/API/data/schema/external_routines_schema.json,sha256=--egdFkK1-vB-WkJ10bNko2qZ9ZP24bgpZv_D_qquQI,696
3
4
  vtlengine/API/data/schema/json_schema_2.1.json,sha256=v3-C0Xnq8qScJSPAtLgb3rjKMrd3nz-bIxgZdTSEUiU,4336
5
+ vtlengine/API/data/schema/value_domain_schema.json,sha256=Qr7AOnPsfLAWv1XZk-pm8d0vVU79Qw3P1tBs7g4lthg,2229
4
6
  vtlengine/AST/ASTComment.py,sha256=bAJW7aaqBXU2LqMtRvL_XOttdl1AFZufa15vmQdvNlY,1667
5
7
  vtlengine/AST/ASTConstructor.py,sha256=X55I98BKG1ItyGIDObF9ALVfCcWnU-0wwCWJsiPILkg,21488
6
8
  vtlengine/AST/ASTConstructorModules/Expr.py,sha256=PdI66D3dwA4ymxgqqcChkctsWMRgBSfuyUtgH-KOkss,70207
7
9
  vtlengine/AST/ASTConstructorModules/ExprComponents.py,sha256=2Ft4e5w2NtbfaqSNW8I9qSpG9iUaPIfdug7yYWo2gqE,38553
8
- vtlengine/AST/ASTConstructorModules/Terminals.py,sha256=7zWDx_SFcbnL35G7Y0qZwl-lLEsfqReyzBX0UxwTCOk,27054
10
+ vtlengine/AST/ASTConstructorModules/Terminals.py,sha256=0-5XZbGTXSQxAAWeF-xBBszyaGEb02lD1Ar82ljxk28,26911
9
11
  vtlengine/AST/ASTConstructorModules/__init__.py,sha256=J6g6NhJD8j0Ek1YmpethxRiFdjhLxUTM0mc3NHRFLlM,1879
10
12
  vtlengine/AST/ASTDataExchange.py,sha256=kPSz21DGbEv-2bZowObseqf2d2_iQj1VnrqWuD9ZwtA,140
11
13
  vtlengine/AST/ASTEncoders.py,sha256=-Ar6a0GqMdJZK4CtZ1pUpIeGv57oSdN5qy3-aF0Zt9c,948
@@ -17,26 +19,27 @@ vtlengine/AST/DAG/_words.py,sha256=LyRL9j-vZUNHdLDJZJrq2nKUmVlpbxdzd9ovW6CnNoU,2
17
19
  vtlengine/AST/Grammar/Vtl.g4,sha256=g4a76A04qH-SaR9a9LfrG4rt3GPZ7UpqZLISkY1BkmI,26323
18
20
  vtlengine/AST/Grammar/VtlTokens.g4,sha256=SwDR_59U25APqslczFcvTUiPoH7bC6kGaH2GkJ3kYzA,9972
19
21
  vtlengine/AST/Grammar/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
- vtlengine/AST/Grammar/lexer.py,sha256=66cH0cJi83Sxd8XPrPRYkBwdz4NGhPaadUnq5p0GYUI,256579
21
- vtlengine/AST/Grammar/parser.py,sha256=fWJaGcXvUCwN2pvjJBU4l5apoQnkhQbUv_mSlKKiDXc,712465
22
+ vtlengine/AST/Grammar/lexer.py,sha256=zg3HdNvuoKNkijt-jWhavmWfTPmU4Dx6Jb0sf_1xvEg,105722
23
+ vtlengine/AST/Grammar/parser.py,sha256=NIcPZPjwke4Ik5ksm96BHL0WaVEs_FXWu61waehVHVk,638343
22
24
  vtlengine/AST/Grammar/tokens.py,sha256=YF7tO0nF2zYC-VaBAJLyc6VitM72CvYfFQpoPDGCMzo,3139
23
25
  vtlengine/AST/VtlVisitor.py,sha256=NJfXJVP6wNmasJmPLlojFqm9R5VSamOAKg_w7BMrhac,35332
24
- vtlengine/AST/__init__.py,sha256=JnPilognG2rT2gtpjD4OwKFX0O3ZqvV-ic8gJxRu7Xo,11672
25
- vtlengine/DataTypes/TimeHandling.py,sha256=CYnC0sb1qbRjTnCSsA3wgez7QftOzrXHxbuZXlY3O3Q,20151
26
- vtlengine/DataTypes/__init__.py,sha256=LYXrde68bYm7MLeMLmr4haeOTSE4Fnpq9G2Ewy7DiaU,23084
26
+ vtlengine/AST/__init__.py,sha256=zTrSDHd3AFaHvvqdPYT8ZthqN2anHfI9Ul1QomA4rNo,11708
27
+ vtlengine/DataTypes/TimeHandling.py,sha256=6kP5CWrrmP1DdRq_mViG5G2-48J7eNW3_FCqHAhoXNM,20497
28
+ vtlengine/DataTypes/__init__.py,sha256=60iB8m6izBDrY1FkI5woIo2oEJZ2dqCTGiH-3UmFXQs,25479
29
+ vtlengine/DataTypes/_time_checking.py,sha256=YAbjO8IEc8fJz3mSstCV3Qma2S5Sk5PRZzQjI48RElY,4822
27
30
  vtlengine/Exceptions/__init__.py,sha256=rSSskV_qCBFzg_W67Q1QBAL7Lnq88D7yi2BDYo1hytw,4727
28
- vtlengine/Exceptions/messages.py,sha256=h2RHfgolbNsYXO39FXT3NTe2RwG-1AK5NL9k6utPtCA,19658
29
- vtlengine/Interpreter/__init__.py,sha256=aO7CGEzFgg0W6kkXFYHzogVxsiVI0l9pJ3wH8m1WuPE,85502
30
- vtlengine/Model/__init__.py,sha256=xWrwhdUOj8Y-5x38zP5XnmFPw8IkBVBBG2bPsUBGLA8,15869
31
- vtlengine/Operators/Aggregation.py,sha256=BI4cHzdWWtxEHisg1cr87twg8gvc1MHfR05JsiXpo6M,11956
31
+ vtlengine/Exceptions/messages.py,sha256=DMeQt9cNE1hutDo0BAq0KoVLhnPWJS9suDud51C68QU,20544
32
+ vtlengine/Interpreter/__init__.py,sha256=6Ffl5bJRL1KSF335xSxfA8a5y_pV8ZNQUYM9BmYN6hg,87256
33
+ vtlengine/Model/__init__.py,sha256=9FCxBuGotA1_L6x0l8uk9emVguE4PFIaA3cPAjN_D3E,17340
34
+ vtlengine/Operators/Aggregation.py,sha256=bweme6bum3QA5LFR6uvNm7JQkFUpNAkyD2kIqAGeQiQ,12500
32
35
  vtlengine/Operators/Analytic.py,sha256=adm8y4mTeen4iVMsQvcvxM9U5f6Xj9UNjdCQI2OBINE,12934
33
36
  vtlengine/Operators/Assignment.py,sha256=xyJgGPoFYbq6mzX06gz7Q7L8jXJxpUkgzdY3Lrne2hw,793
34
37
  vtlengine/Operators/Boolean.py,sha256=3U5lHkxW5d7QQdGDNxXeXqejlPfFrXKG8_TqknrC8Ls,2856
35
38
  vtlengine/Operators/CastOperator.py,sha256=pXTSs0UYBeR5hS3J2HWUyaHmoZoifl2EFch6ol_Taok,17115
36
39
  vtlengine/Operators/Clause.py,sha256=Lu6zjcUBkShN6kQmjEZu_7ytaFGwfH-yB4ROoCSkLGI,15505
37
40
  vtlengine/Operators/Comparison.py,sha256=CRMvs9qXVXUW32pxAnCua8b7ZHpJy0-Egvs691ekOCk,17403
38
- vtlengine/Operators/Conditional.py,sha256=lu0S06GtBzVR_pbq9_QyAGeUC0Xcawag-3wBh5W93Xc,20340
39
- vtlengine/Operators/General.py,sha256=ltRK8Sw686sb4rC5ji2OX-GYVxaK_PpL0Lev8P5OFHI,6828
41
+ vtlengine/Operators/Conditional.py,sha256=wtpfhMw8Pnw61SGb1CX2iOu1ELPqObJXcmodYXVE9ZY,21240
42
+ vtlengine/Operators/General.py,sha256=uu-n9FvXzEvgqDTUMy5LhqdsOgqDd3rGaYRXwXl77OQ,6832
40
43
  vtlengine/Operators/HROperators.py,sha256=YybwD70906AA00c0k4IP6sjeta0pg7hqb2EUVsFqdmA,8979
41
44
  vtlengine/Operators/Join.py,sha256=lYmC_jGlJ4RRmn2vplB13Ysrxgv6O8sNFEHQYZzun5s,18393
42
45
  vtlengine/Operators/Numeric.py,sha256=icYTWzEsw6VQFLYc5Wucgr8961d8ZwTFx_wfZ8Wp9Co,12083
@@ -44,20 +47,19 @@ vtlengine/Operators/RoleSetter.py,sha256=mHZIdcHC3wflj81ekLbioDG1f8yHZXYDQFymV-K
44
47
  vtlengine/Operators/Set.py,sha256=f1uLeY4XZF0cWEwpXRB_CczgbXr6s33DYPuFt39HlEg,7084
45
48
  vtlengine/Operators/String.py,sha256=ghWtYl6oUEAAzynY1a9Hg4yqRA9Sa7uk2B6iF9uuSqQ,20230
46
49
  vtlengine/Operators/Time.py,sha256=ESn6ldPg73bdZxOXZYJuIwCLDQnXDGTqR1y7ckQmV1M,43025
47
- vtlengine/Operators/Validation.py,sha256=tnHRZ7o0Z_AE1Bb2DtRVP6pGGUtSs5KVwNSEJxzzGnk,10162
48
- vtlengine/Operators/__init__.py,sha256=N1zi9RFC_l0qggRm5IPLOkPFtFS4CGAg-r1taHOrbTI,37667
50
+ vtlengine/Operators/Validation.py,sha256=ot-og75Ce690DddQM-xILrY3PoRf8Z0M3aIovFK-wMY,11245
51
+ vtlengine/Operators/__init__.py,sha256=iwYUO00ce_J6jRHmFScIJZB3WTLuhS_bNOQkN5BfeCw,37835
49
52
  vtlengine/Utils/__Virtual_Assets.py,sha256=0jPXysZrBr0hYVzqFoyg9La8ZcZoZ01Ql245X5vrth4,862
50
53
  vtlengine/Utils/__init__.py,sha256=zhGPJA8MjHmtEEwMS4CxEFYL0tk2L5F0YPn7bitdRzM,8954
51
54
  vtlengine/__extras_check.py,sha256=Wr-lxGZhXJZEacVV5cUkvKt7XM-mry0kYAe3VxNrVcY,614
52
- vtlengine/__init__.py,sha256=l8c8MEFiqOOnoRwhFkch3oZAxLsyNg2vIj0c96LYZmU,191
55
+ vtlengine/__init__.py,sha256=zTOiIdhnN9M97pRWFDTsvCYQ4n2wrpF7QdXjD5FKkRQ,191
53
56
  vtlengine/files/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
57
  vtlengine/files/output/__init__.py,sha256=4tmf-p1Y1u5Ohrwt3clQA-FMGaijKI3HC_iwn3H9J8c,1250
55
58
  vtlengine/files/output/_time_period_representation.py,sha256=D5XCSXyEuX_aBzTvBV3sZxACcgwXz2Uu_YH3loMP8q0,1610
56
- vtlengine/files/parser/__init__.py,sha256=JamEIWI0pFZxT0sKYE6Fii8H2JQcsFn4Nf3T0OLSm9g,8637
59
+ vtlengine/files/parser/__init__.py,sha256=RWtQSKFk64Ag4gzIuRw1nHzWvO91_zvAATEbMPR2Urc,9086
57
60
  vtlengine/files/parser/_rfc_dialect.py,sha256=Y8kAYBxH_t9AieN_tYg7QRh5A4DgvabKarx9Ko3QeCQ,462
58
- vtlengine/files/parser/_time_checking.py,sha256=UAC_Pv-eQJKrhgTguWb--xfqMMs6quyMeiAkGBt_vgI,4725
59
61
  vtlengine/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
60
- vtlengine-1.2.1rc1.dist-info/LICENSE.md,sha256=2xqHuoHohba7gpcZZKtOICRjzeKsQANXG8WoV9V35KM,33893
61
- vtlengine-1.2.1rc1.dist-info/METADATA,sha256=OgvVzj7n35H2Z9qPn3vzXcUXAPiFcYUzzACX7PA053A,4135
62
- vtlengine-1.2.1rc1.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
63
- vtlengine-1.2.1rc1.dist-info/RECORD,,
62
+ vtlengine-1.3.0rc1.dist-info/METADATA,sha256=YWAp6Hql-chdGgKnlIKELuaL3ZaR0C4amn_sEpcZe-s,4165
63
+ vtlengine-1.3.0rc1.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
64
+ vtlengine-1.3.0rc1.dist-info/licenses/LICENSE.md,sha256=2xqHuoHohba7gpcZZKtOICRjzeKsQANXG8WoV9V35KM,33893
65
+ vtlengine-1.3.0rc1.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 2.1.3
2
+ Generator: poetry-core 2.2.1
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any