vtlengine 1.4.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. vtlengine/API/_InternalApi.py +791 -0
  2. vtlengine/API/__init__.py +612 -0
  3. vtlengine/API/data/schema/external_routines_schema.json +34 -0
  4. vtlengine/API/data/schema/json_schema_2.1.json +116 -0
  5. vtlengine/API/data/schema/value_domain_schema.json +97 -0
  6. vtlengine/AST/ASTComment.py +57 -0
  7. vtlengine/AST/ASTConstructor.py +598 -0
  8. vtlengine/AST/ASTConstructorModules/Expr.py +1928 -0
  9. vtlengine/AST/ASTConstructorModules/ExprComponents.py +995 -0
  10. vtlengine/AST/ASTConstructorModules/Terminals.py +790 -0
  11. vtlengine/AST/ASTConstructorModules/__init__.py +50 -0
  12. vtlengine/AST/ASTDataExchange.py +10 -0
  13. vtlengine/AST/ASTEncoders.py +32 -0
  14. vtlengine/AST/ASTString.py +675 -0
  15. vtlengine/AST/ASTTemplate.py +558 -0
  16. vtlengine/AST/ASTVisitor.py +25 -0
  17. vtlengine/AST/DAG/__init__.py +479 -0
  18. vtlengine/AST/DAG/_words.py +10 -0
  19. vtlengine/AST/Grammar/Vtl.g4 +705 -0
  20. vtlengine/AST/Grammar/VtlTokens.g4 +409 -0
  21. vtlengine/AST/Grammar/__init__.py +0 -0
  22. vtlengine/AST/Grammar/lexer.py +2139 -0
  23. vtlengine/AST/Grammar/parser.py +16597 -0
  24. vtlengine/AST/Grammar/tokens.py +169 -0
  25. vtlengine/AST/VtlVisitor.py +824 -0
  26. vtlengine/AST/__init__.py +674 -0
  27. vtlengine/DataTypes/TimeHandling.py +562 -0
  28. vtlengine/DataTypes/__init__.py +863 -0
  29. vtlengine/DataTypes/_time_checking.py +135 -0
  30. vtlengine/Exceptions/__exception_file_generator.py +96 -0
  31. vtlengine/Exceptions/__init__.py +159 -0
  32. vtlengine/Exceptions/messages.py +1004 -0
  33. vtlengine/Interpreter/__init__.py +2048 -0
  34. vtlengine/Model/__init__.py +501 -0
  35. vtlengine/Operators/Aggregation.py +357 -0
  36. vtlengine/Operators/Analytic.py +455 -0
  37. vtlengine/Operators/Assignment.py +23 -0
  38. vtlengine/Operators/Boolean.py +106 -0
  39. vtlengine/Operators/CastOperator.py +451 -0
  40. vtlengine/Operators/Clause.py +366 -0
  41. vtlengine/Operators/Comparison.py +488 -0
  42. vtlengine/Operators/Conditional.py +495 -0
  43. vtlengine/Operators/General.py +191 -0
  44. vtlengine/Operators/HROperators.py +254 -0
  45. vtlengine/Operators/Join.py +447 -0
  46. vtlengine/Operators/Numeric.py +422 -0
  47. vtlengine/Operators/RoleSetter.py +77 -0
  48. vtlengine/Operators/Set.py +176 -0
  49. vtlengine/Operators/String.py +578 -0
  50. vtlengine/Operators/Time.py +1144 -0
  51. vtlengine/Operators/Validation.py +275 -0
  52. vtlengine/Operators/__init__.py +900 -0
  53. vtlengine/Utils/__Virtual_Assets.py +34 -0
  54. vtlengine/Utils/__init__.py +479 -0
  55. vtlengine/__extras_check.py +17 -0
  56. vtlengine/__init__.py +27 -0
  57. vtlengine/files/__init__.py +0 -0
  58. vtlengine/files/output/__init__.py +35 -0
  59. vtlengine/files/output/_time_period_representation.py +55 -0
  60. vtlengine/files/parser/__init__.py +240 -0
  61. vtlengine/files/parser/_rfc_dialect.py +22 -0
  62. vtlengine/py.typed +0 -0
  63. vtlengine-1.4.0rc2.dist-info/METADATA +89 -0
  64. vtlengine-1.4.0rc2.dist-info/RECORD +66 -0
  65. vtlengine-1.4.0rc2.dist-info/WHEEL +4 -0
  66. vtlengine-1.4.0rc2.dist-info/licenses/LICENSE.md +661 -0
@@ -0,0 +1,240 @@
1
+ import warnings
2
+ from csv import DictReader
3
+ from pathlib import Path
4
+ from typing import Any, Dict, List, Optional, Type, Union
5
+
6
+ import numpy as np
7
+ import pandas as pd
8
+
9
+ from vtlengine.DataTypes import (
10
+ SCALAR_TYPES_CLASS_REVERSE,
11
+ Boolean,
12
+ Date,
13
+ Duration,
14
+ Integer,
15
+ Number,
16
+ ScalarType,
17
+ TimeInterval,
18
+ TimePeriod,
19
+ )
20
+ from vtlengine.DataTypes._time_checking import (
21
+ check_date,
22
+ check_time,
23
+ check_time_period,
24
+ )
25
+ from vtlengine.DataTypes.TimeHandling import PERIOD_IND_MAPPING
26
+ from vtlengine.Exceptions import DataLoadError, InputValidationException
27
+ from vtlengine.files.parser._rfc_dialect import register_rfc
28
+ from vtlengine.Model import Component, Dataset, Role
29
+
30
+ TIME_CHECKS_MAPPING: Dict[Type[ScalarType], Any] = {
31
+ Date: check_date,
32
+ TimePeriod: check_time_period,
33
+ TimeInterval: check_time,
34
+ }
35
+
36
+
37
+ def _validate_csv_path(components: Dict[str, Component], csv_path: Path) -> None:
38
+ # GE1 check if the file is empty
39
+ if not csv_path.exists():
40
+ raise DataLoadError(code="0-3-1-1", file=csv_path)
41
+ if not csv_path.is_file():
42
+ raise DataLoadError(code="0-3-1-1", file=csv_path)
43
+ register_rfc()
44
+ try:
45
+ with open(csv_path, "r", errors="replace", encoding="utf-8") as f:
46
+ reader = DictReader(f, dialect="rfc")
47
+ csv_columns = reader.fieldnames
48
+ except InputValidationException as ie:
49
+ raise InputValidationException("{}".format(str(ie))) from None
50
+ except Exception as e:
51
+ raise InputValidationException(
52
+ f"ERROR: {str(e)}, review file {str(csv_path.as_posix())}"
53
+ ) from None
54
+
55
+ if not csv_columns:
56
+ raise InputValidationException(code="0-1-1-6", file=csv_path)
57
+
58
+ if len(list(set(csv_columns))) != len(csv_columns):
59
+ duplicates = list(set([item for item in csv_columns if csv_columns.count(item) > 1]))
60
+ raise InputValidationException(
61
+ code="0-1-2-3", element_type="Columns", element=f"{', '.join(duplicates)}"
62
+ )
63
+
64
+ comp_names = set([c.name for c in components.values() if c.role == Role.IDENTIFIER])
65
+ comps_missing: Union[str, List[str]] = (
66
+ [id_m for id_m in comp_names if id_m not in reader.fieldnames] if reader.fieldnames else []
67
+ )
68
+ if comps_missing:
69
+ comps_missing = ", ".join(comps_missing)
70
+ raise InputValidationException(code="0-1-1-8", ids=comps_missing, file=str(csv_path.name))
71
+
72
+
73
+ def _sanitize_pandas_columns(
74
+ components: Dict[str, Component], csv_path: Union[str, Path], data: pd.DataFrame
75
+ ) -> pd.DataFrame:
76
+ # Fast loading from SDMX-CSV
77
+ if (
78
+ "DATAFLOW" in data.columns
79
+ and data.columns[0] == "DATAFLOW"
80
+ and "DATAFLOW" not in components
81
+ ):
82
+ data.drop(columns=["DATAFLOW"], inplace=True)
83
+ if "STRUCTURE" in data.columns and data.columns[0] == "STRUCTURE":
84
+ if "STRUCTURE" not in components:
85
+ data.drop(columns=["STRUCTURE"], inplace=True)
86
+ if "STRUCTURE_ID" in data.columns:
87
+ data.drop(columns=["STRUCTURE_ID"], inplace=True)
88
+ if "ACTION" in data.columns:
89
+ data = data[data["ACTION"] != "D"]
90
+ data.drop(columns=["ACTION"], inplace=True)
91
+
92
+ # Validate identifiers
93
+ comp_names = set([c.name for c in components.values() if c.role == Role.IDENTIFIER])
94
+ comps_missing: Union[str, List[str]] = [id_m for id_m in comp_names if id_m not in data.columns]
95
+ if comps_missing:
96
+ comps_missing = ", ".join(comps_missing)
97
+ file = csv_path if isinstance(csv_path, str) else csv_path.name
98
+ raise InputValidationException(code="0-1-1-7", ids=comps_missing, file=file)
99
+
100
+ # Fill rest of components with null values
101
+ for comp_name, comp in components.items():
102
+ if comp_name not in data:
103
+ if not comp.nullable:
104
+ raise InputValidationException(f"Component {comp_name} is missing in the file.")
105
+ data[comp_name] = None
106
+ return data
107
+
108
+
109
+ def _pandas_load_csv(components: Dict[str, Component], csv_path: Union[str, Path]) -> pd.DataFrame:
110
+ obj_dtypes = {comp_name: object for comp_name, comp in components.items()}
111
+
112
+ data = pd.read_csv(
113
+ csv_path, # type: ignore[call-overload, unused-ignore]
114
+ dtype=obj_dtypes,
115
+ engine="c",
116
+ keep_default_na=False,
117
+ na_values=[""],
118
+ encoding_errors="replace",
119
+ )
120
+
121
+ return _sanitize_pandas_columns(components, csv_path, data)
122
+
123
+
124
+ def _parse_boolean(value: str) -> bool:
125
+ if isinstance(value, bool):
126
+ return value
127
+ result = value.lower() == "true" or value == "1"
128
+ return result
129
+
130
+
131
+ def _validate_pandas(
132
+ components: Dict[str, Component], data: pd.DataFrame, dataset_name: str
133
+ ) -> pd.DataFrame:
134
+ warnings.filterwarnings("ignore", category=FutureWarning)
135
+ # Identifier checking
136
+
137
+ id_names = [comp_name for comp_name, comp in components.items() if comp.role == Role.IDENTIFIER]
138
+
139
+ missing_columns = [name for name in components if name not in data.columns.tolist()]
140
+ if missing_columns:
141
+ for name in missing_columns:
142
+ if components[name].nullable is False:
143
+ raise DataLoadError("0-3-1-5", name=dataset_name, comp_name=name)
144
+ data[name] = None
145
+
146
+ for id_name in id_names:
147
+ if data[id_name].isnull().any():
148
+ raise DataLoadError("0-3-1-3", null_identifier=id_name, name=dataset_name)
149
+
150
+ if len(id_names) == 0 and len(data) > 1:
151
+ raise DataLoadError("0-3-1-4", name=dataset_name)
152
+
153
+ data = data.fillna(np.nan).replace([np.nan], None)
154
+ # Checking data types on all data types
155
+ comp_name = ""
156
+ comp = None
157
+ try:
158
+ for comp_name, comp in components.items():
159
+ if comp.data_type in (Date, TimePeriod, TimeInterval):
160
+ data[comp_name] = data[comp_name].map(
161
+ TIME_CHECKS_MAPPING[comp.data_type], na_action="ignore"
162
+ )
163
+ elif comp.data_type == Integer:
164
+ data[comp_name] = data[comp_name].map(
165
+ lambda x: Integer.cast(float(str(x))), na_action="ignore"
166
+ )
167
+ elif comp.data_type == Number:
168
+ data[comp_name] = data[comp_name].map(lambda x: float((str(x))), na_action="ignore")
169
+ elif comp.data_type == Boolean:
170
+ data[comp_name] = data[comp_name].map(
171
+ lambda x: _parse_boolean(str(x)), na_action="ignore"
172
+ )
173
+ elif comp.data_type == Duration:
174
+ values_correct = (
175
+ data[comp_name]
176
+ .map(
177
+ lambda x: Duration.validate_duration(x),
178
+ na_action="ignore",
179
+ )
180
+ .all()
181
+ )
182
+ if not values_correct:
183
+ try:
184
+ values_correct = (
185
+ data[comp_name]
186
+ .map(
187
+ lambda x: x.replace(" ", "") in PERIOD_IND_MAPPING, # type: ignore[union-attr, unused-ignore]
188
+ na_action="ignore",
189
+ )
190
+ .all()
191
+ )
192
+ if not values_correct:
193
+ raise ValueError(
194
+ f"Duration values are not correct in column {comp_name}"
195
+ )
196
+ except ValueError:
197
+ raise ValueError(f"Duration values are not correct in column {comp_name}")
198
+ else:
199
+ data[comp_name] = data[comp_name].map(
200
+ lambda x: str(x).replace('"', ""), na_action="ignore"
201
+ )
202
+ data[comp_name] = data[comp_name].astype(object, errors="raise")
203
+
204
+ except ValueError:
205
+ str_comp = SCALAR_TYPES_CLASS_REVERSE[comp.data_type] if comp else "Null"
206
+ raise DataLoadError("0-3-1-6", name=dataset_name, column=comp_name, type=str_comp)
207
+
208
+ if id_names:
209
+ check_identifiers_duplicity(data, id_names, dataset_name)
210
+
211
+ return data
212
+
213
+
214
+ def check_identifiers_duplicity(data: pd.DataFrame, identifiers: List[str], name: str) -> None:
215
+ dup_id_row = data.duplicated(subset=identifiers, keep=False)
216
+ if dup_id_row.any():
217
+ row_index = int(dup_id_row.idxmax()) + 1
218
+ raise DataLoadError("0-3-1-7", name=name, row_index=row_index)
219
+
220
+
221
+ def load_datapoints(
222
+ components: Dict[str, Component],
223
+ dataset_name: str,
224
+ csv_path: Optional[Union[Path, str]] = None,
225
+ ) -> pd.DataFrame:
226
+ if csv_path is None or (isinstance(csv_path, Path) and not csv_path.exists()):
227
+ return pd.DataFrame(columns=list(components.keys()))
228
+ elif isinstance(csv_path, (str, Path)):
229
+ if isinstance(csv_path, Path):
230
+ _validate_csv_path(components, csv_path)
231
+ data = _pandas_load_csv(components, csv_path)
232
+ else:
233
+ raise InputValidationException(code="0-1-1-2", input=csv_path)
234
+ data = _validate_pandas(components, data, dataset_name)
235
+
236
+ return data
237
+
238
+
239
+ def _fill_dataset_empty_data(dataset: Dataset) -> None:
240
+ dataset.data = pd.DataFrame(columns=list(dataset.components.keys()))
@@ -0,0 +1,22 @@
1
+ import csv
2
+
3
+
4
+ class RFCDialect(csv.Dialect):
5
+ """
6
+ https://docs.python.org/3/library/csv.html#csv.Dialect
7
+ https://tools.ietf.org/html/rfc4180
8
+ """
9
+
10
+ delimiter = ","
11
+ doublequote = True
12
+ lineterminator = "\r\n"
13
+ quotechar = '"'
14
+ quoting = csv.QUOTE_MINIMAL
15
+ strict = True
16
+ escapechar = None
17
+ skipinitialspace = False
18
+
19
+
20
+ def register_rfc() -> None:
21
+ """Register the RFC dialect."""
22
+ csv.register_dialect("rfc", RFCDialect)
vtlengine/py.typed ADDED
File without changes
@@ -0,0 +1,89 @@
1
+ Metadata-Version: 2.4
2
+ Name: vtlengine
3
+ Version: 1.4.0rc2
4
+ Summary: Run and Validate VTL Scripts
5
+ License-Expression: AGPL-3.0
6
+ License-File: LICENSE.md
7
+ Keywords: vtl,sdmx,vtlengine,Validation and Transformation Language
8
+ Author: MeaningfulData
9
+ Author-email: info@meaningfuldata.eu
10
+ Maintainer: Francisco Javier Hernandez del Caño
11
+ Maintainer-email: javier.hernandez@meaningfuldata.eu
12
+ Requires-Python: >=3.9
13
+ Classifier: Development Status :: 5 - Production/Stable
14
+ Classifier: Intended Audience :: Developers
15
+ Classifier: Intended Audience :: Information Technology
16
+ Classifier: Intended Audience :: Science/Research
17
+ Classifier: Typing :: Typed
18
+ Provides-Extra: all
19
+ Provides-Extra: s3
20
+ Requires-Dist: antlr4-python3-runtime (>=4.9,<4.10)
21
+ Requires-Dist: duckdb (>=1.4,<1.5)
22
+ Requires-Dist: jsonschema (>=3.2.0,<5.0)
23
+ Requires-Dist: networkx (>=2.8,<3.0)
24
+ Requires-Dist: pandas (>=2.1.4,<3.0)
25
+ Requires-Dist: pysdmx[xml] (>=1.5.2,<2.0)
26
+ Requires-Dist: s3fs (>=2022.11.0) ; extra == "all"
27
+ Requires-Dist: s3fs (>=2022.11.0) ; extra == "s3"
28
+ Requires-Dist: sqlglot (>=22.2.0,<23.0)
29
+ Project-URL: Authors, https://github.com/Meaningful-Data/vtlengine/graphs/contributors
30
+ Project-URL: Documentation, https://docs.vtlengine.meaningfuldata.eu
31
+ Project-URL: IssueTracker, https://github.com/Meaningful-Data/vtlengine/issues
32
+ Project-URL: MeaningfulData, https://www.meaningfuldata.eu/
33
+ Project-URL: Repository, https://github.com/Meaningful-Data/vtlengine
34
+ Description-Content-Type: text/markdown
35
+
36
+ # VTL Engine
37
+
38
+ | | |
39
+ |--------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
40
+ | Testing | [![Testing](https://github.com/Meaningful-Data/vtlengine/actions/workflows/testing.yml/badge.svg)](https://github.com/Meaningful-Data/vtlengine/actions/workflows/testing.yml) |
41
+ | Package | [![PyPI Latest Release](https://img.shields.io/pypi/v/vtlengine.svg)](https://pypi.org/project/vtlengine/) |
42
+ | License | [![License - AGPL 3.0](https://img.shields.io/pypi/l/vtlengine.svg)](https://github.com/Meaningful-Data/vtlengine/blob/main/LICENSE.md) |
43
+ | Mentioned in | [![Mentioned in Awesome Official Statistics ](https://awesome.re/mentioned-badge.svg)](https://github.com/SNStatComp/awesome-official-statistics-software) |
44
+
45
+ ## Introduction
46
+
47
+ The VTL Engine is a Python library that allows you to validate, format and execute VTL scripts.
48
+
49
+ It is a Python-based library around
50
+ the [VTL Language 2.1](https://sdmx-twg.github.io/vtl/2.1/html/index.html).
51
+
52
+ ## Useful Links
53
+
54
+ - [MeaningfulData: who we are](https://www.meaningfuldata.eu)
55
+ - [Documentation](https://docs.vtlengine.meaningfuldata.eu)
56
+ - [Source Code](https://github.com/Meaningful-Data/vtlengine)
57
+ - [Bug Tracker](https://github.com/Meaningful-Data/vtlengine/issues?q=is%3Aopen+is%3Aissue+label%3Abug)
58
+ - [New features Tracker](https://github.com/Meaningful-Data/vtlengine/issues?q=is%3Aopen+is%3Aissue+label%3Aenhancement)
59
+
60
+ ## Installation
61
+
62
+ ### Requirements
63
+
64
+ The VTL Engine requires Python 3.9 or higher.
65
+
66
+ ### Install with pip
67
+
68
+ To install the VTL Engine on any Operating System, you can use pip:
69
+
70
+ ```bash
71
+
72
+ pip install vtlengine
73
+
74
+ ```
75
+
76
+ *Note: it is recommended to install the VTL Engine in a virtual environment.*
77
+
78
+ ### S3 extra
79
+
80
+ If you want to use the S3 functionality, you can install the VTL Engine with the `s3` extra:
81
+
82
+ ```bash
83
+ pip install vtlengine[s3]
84
+ ```
85
+
86
+ ## Documentation
87
+
88
+ The documentation for the VTL Engine is available
89
+ at [docs.vtlengine.meaningfuldata.eu](https://docs.vtlengine.meaningfuldata.eu).
@@ -0,0 +1,66 @@
1
+ vtlengine/API/_InternalApi.py,sha256=qkrEPwrOCWDeL4TeAw0IKNeLb30UfsTgqRRQnUDLhKY,29571
2
+ vtlengine/API/__init__.py,sha256=JZ_SoLp34xC9rglN8CCgExo9GTSg_bJVU_yUu3vhJJs,22992
3
+ vtlengine/API/data/schema/external_routines_schema.json,sha256=--egdFkK1-vB-WkJ10bNko2qZ9ZP24bgpZv_D_qquQI,696
4
+ vtlengine/API/data/schema/json_schema_2.1.json,sha256=v3-C0Xnq8qScJSPAtLgb3rjKMrd3nz-bIxgZdTSEUiU,4336
5
+ vtlengine/API/data/schema/value_domain_schema.json,sha256=Qr7AOnPsfLAWv1XZk-pm8d0vVU79Qw3P1tBs7g4lthg,2229
6
+ vtlengine/AST/ASTComment.py,sha256=rn9lVIafpjoBc4fjCs0o3ytglf8URg2whsUzLhCE19w,1723
7
+ vtlengine/AST/ASTConstructor.py,sha256=Ypi4iLW-2LXbtd4jsEjvICpll79cBIuSD0REYjfDJAE,21488
8
+ vtlengine/AST/ASTConstructorModules/Expr.py,sha256=Wjl6Eb_tF-ncF3Frgp2UAZXBUSwxR6ZuegWUR9Y3MyI,70556
9
+ vtlengine/AST/ASTConstructorModules/ExprComponents.py,sha256=_HdsvJ2JiDWI19EwgjDkLtVf9YkSaFnZJLf1R5KndoA,38553
10
+ vtlengine/AST/ASTConstructorModules/Terminals.py,sha256=mWvjEoeF2hA1GVzm5pD33al3oNDzXjNydGq_4wdMdfo,26891
11
+ vtlengine/AST/ASTConstructorModules/__init__.py,sha256=J6g6NhJD8j0Ek1YmpethxRiFdjhLxUTM0mc3NHRFLlM,1879
12
+ vtlengine/AST/ASTDataExchange.py,sha256=kPSz21DGbEv-2bZowObseqf2d2_iQj1VnrqWuD9ZwtA,140
13
+ vtlengine/AST/ASTEncoders.py,sha256=-Ar6a0GqMdJZK4CtZ1pUpIeGv57oSdN5qy3-aF0Zt9c,948
14
+ vtlengine/AST/ASTString.py,sha256=HBvDW0te1ZNIkPgmqW4D4D20Wca1JWdr475u64tqWSg,27901
15
+ vtlengine/AST/ASTTemplate.py,sha256=UN9T6BBgd964dF4I9DpdnvU0n0qV1R5RKGdr_Jtm5pw,13099
16
+ vtlengine/AST/ASTVisitor.py,sha256=3QQTudBpbR4pPQdH7y07EgwuzhoGzNQ59qox8R-E3fM,500
17
+ vtlengine/AST/DAG/__init__.py,sha256=2xAQpKJrH01jlKXgKHuBeB7eDXVO5CSUC4OWpmuXqc4,17120
18
+ vtlengine/AST/DAG/_words.py,sha256=LyRL9j-vZUNHdLDJZJrq2nKUmVlpbxdzd9ovW6CnNoU,200
19
+ vtlengine/AST/Grammar/Vtl.g4,sha256=g4a76A04qH-SaR9a9LfrG4rt3GPZ7UpqZLISkY1BkmI,26323
20
+ vtlengine/AST/Grammar/VtlTokens.g4,sha256=SwDR_59U25APqslczFcvTUiPoH7bC6kGaH2GkJ3kYzA,9972
21
+ vtlengine/AST/Grammar/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
+ vtlengine/AST/Grammar/lexer.py,sha256=zg3HdNvuoKNkijt-jWhavmWfTPmU4Dx6Jb0sf_1xvEg,105722
23
+ vtlengine/AST/Grammar/parser.py,sha256=NIcPZPjwke4Ik5ksm96BHL0WaVEs_FXWu61waehVHVk,638343
24
+ vtlengine/AST/Grammar/tokens.py,sha256=YF7tO0nF2zYC-VaBAJLyc6VitM72CvYfFQpoPDGCMzo,3139
25
+ vtlengine/AST/VtlVisitor.py,sha256=NJfXJVP6wNmasJmPLlojFqm9R5VSamOAKg_w7BMrhac,35332
26
+ vtlengine/AST/__init__.py,sha256=ypsyviCe1AxpJmJkI2z9MbB0WtJk0IEK4rXhLhu6z1w,11736
27
+ vtlengine/DataTypes/TimeHandling.py,sha256=6kP5CWrrmP1DdRq_mViG5G2-48J7eNW3_FCqHAhoXNM,20497
28
+ vtlengine/DataTypes/__init__.py,sha256=j1ijUT7OiK1WltI9CRL0dc868Emcj1Y-WhiCGT51bfQ,25486
29
+ vtlengine/DataTypes/_time_checking.py,sha256=YAbjO8IEc8fJz3mSstCV3Qma2S5Sk5PRZzQjI48RElY,4822
30
+ vtlengine/Exceptions/__exception_file_generator.py,sha256=E-MEXfdL2eivdoUXDQZ3Tq8XT77ZH3e8WkWr600xLS0,3782
31
+ vtlengine/Exceptions/__init__.py,sha256=5eVyH9gNJpE9VfUh2Te9ZXOiLX0gJtKqqIZ0n4rOnPg,4445
32
+ vtlengine/Exceptions/messages.py,sha256=PXsHe3xG5oXeisZUq6sAO03FeDZvoYntMO5CkRDjRxw,46407
33
+ vtlengine/Interpreter/__init__.py,sha256=5zL2oXjNGCAVj3yB64VTt0hkQR6q0B0m_Jd9rDrbtew,89128
34
+ vtlengine/Model/__init__.py,sha256=rcgrYbQmP8bOvnUO6tLcg6kMZp4I9EveqyQJ4KZAeqY,17576
35
+ vtlengine/Operators/Aggregation.py,sha256=rD9LYZHVbfrZ7DWc1m8vb4BcoP_nBF7UAnp_Qnlz5cE,12510
36
+ vtlengine/Operators/Analytic.py,sha256=adm8y4mTeen4iVMsQvcvxM9U5f6Xj9UNjdCQI2OBINE,12934
37
+ vtlengine/Operators/Assignment.py,sha256=xyJgGPoFYbq6mzX06gz7Q7L8jXJxpUkgzdY3Lrne2hw,793
38
+ vtlengine/Operators/Boolean.py,sha256=3U5lHkxW5d7QQdGDNxXeXqejlPfFrXKG8_TqknrC8Ls,2856
39
+ vtlengine/Operators/CastOperator.py,sha256=IzyCehwPp5lG6Txd3mH352hEV9f5fQ6wRTfYikljHlE,17213
40
+ vtlengine/Operators/Clause.py,sha256=ctD0pQX3qrxcQGoBztbMQKhhBkXOZ3w6ensWCuAN7OY,15505
41
+ vtlengine/Operators/Comparison.py,sha256=paSX24S0iW-KYi_11cjIkRGwxToCfeUsbCXzLqvxMHw,17457
42
+ vtlengine/Operators/Conditional.py,sha256=Pu42sRwIqhYOCFO40aShVw9YYWgm6PUoUU6ialI8yRs,21476
43
+ vtlengine/Operators/General.py,sha256=BVvBYYWGWFq-eaYoD1pgYMJjILATrX4zpUtqBbxgTw8,7548
44
+ vtlengine/Operators/HROperators.py,sha256=a_oh6H7E6Ex6t7FX78e--rkXcFcegqxEotzw3LvUuQc,9153
45
+ vtlengine/Operators/Join.py,sha256=ZtgL_aL9PLjL8V5zM7ZSGQ5bJOQ94TZqAuROvGH82bo,18393
46
+ vtlengine/Operators/Numeric.py,sha256=icYTWzEsw6VQFLYc5Wucgr8961d8ZwTFx_wfZ8Wp9Co,12083
47
+ vtlengine/Operators/RoleSetter.py,sha256=mHZIdcHC3wflj81ekLbioDG1f8yHZXYDQFymV-KnyXA,2274
48
+ vtlengine/Operators/Set.py,sha256=f1uLeY4XZF0cWEwpXRB_CczgbXr6s33DYPuFt39HlEg,7084
49
+ vtlengine/Operators/String.py,sha256=ghWtYl6oUEAAzynY1a9Hg4yqRA9Sa7uk2B6iF9uuSqQ,20230
50
+ vtlengine/Operators/Time.py,sha256=wjYAp_4hvqgyK0lvweIZOhzUqI0TLmjcns3iLnN4erQ,43332
51
+ vtlengine/Operators/Validation.py,sha256=BWHwCOajDnYJVWOkkDsTcjrDqmpKCJ3jykj-rMxfTSU,11259
52
+ vtlengine/Operators/__init__.py,sha256=ECSr7YL5WUs1ab-KlYwz7nDlnXc2S4eBgiQMPaj-Y68,37835
53
+ vtlengine/Utils/__Virtual_Assets.py,sha256=0jPXysZrBr0hYVzqFoyg9La8ZcZoZ01Ql245X5vrth4,862
54
+ vtlengine/Utils/__init__.py,sha256=zhGPJA8MjHmtEEwMS4CxEFYL0tk2L5F0YPn7bitdRzM,8954
55
+ vtlengine/__extras_check.py,sha256=Wr-lxGZhXJZEacVV5cUkvKt7XM-mry0kYAe3VxNrVcY,614
56
+ vtlengine/__init__.py,sha256=36roYHxGbmUgbIN4d5zODv-MznsPss-iRKrit7qqcNo,533
57
+ vtlengine/files/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
58
+ vtlengine/files/output/__init__.py,sha256=4tmf-p1Y1u5Ohrwt3clQA-FMGaijKI3HC_iwn3H9J8c,1250
59
+ vtlengine/files/output/_time_period_representation.py,sha256=D5XCSXyEuX_aBzTvBV3sZxACcgwXz2Uu_YH3loMP8q0,1610
60
+ vtlengine/files/parser/__init__.py,sha256=8or2JoWSDwUwvOEQH6sO4HxD_rrIMwLAAVAeT4kQzm0,9155
61
+ vtlengine/files/parser/_rfc_dialect.py,sha256=Y8kAYBxH_t9AieN_tYg7QRh5A4DgvabKarx9Ko3QeCQ,462
62
+ vtlengine/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
63
+ vtlengine-1.4.0rc2.dist-info/METADATA,sha256=CKSQjSQu5KYK03wZAd4CEYoczggM_P3oVwKcP6Uo3Ts,3909
64
+ vtlengine-1.4.0rc2.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
65
+ vtlengine-1.4.0rc2.dist-info/licenses/LICENSE.md,sha256=2xqHuoHohba7gpcZZKtOICRjzeKsQANXG8WoV9V35KM,33893
66
+ vtlengine-1.4.0rc2.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: poetry-core 2.2.1
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any