vtlengine 1.0.3rc3__tar.gz → 1.0.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of vtlengine might be problematic. Click here for more details.

Files changed (59) hide show
  1. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/PKG-INFO +7 -6
  2. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/pyproject.toml +20 -14
  3. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/API/_InternalApi.py +55 -20
  4. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/API/__init__.py +11 -2
  5. vtlengine-1.0.4/src/vtlengine/API/data/schema/json_schema_2.1.json +116 -0
  6. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/ASTConstructor.py +5 -4
  7. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/ASTConstructorModules/Expr.py +47 -48
  8. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/ASTConstructorModules/ExprComponents.py +45 -23
  9. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/ASTConstructorModules/Terminals.py +21 -11
  10. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/ASTEncoders.py +1 -1
  11. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/DAG/__init__.py +0 -3
  12. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/Grammar/lexer.py +0 -1
  13. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/Grammar/parser.py +185 -440
  14. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/VtlVisitor.py +0 -1
  15. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/DataTypes/TimeHandling.py +50 -15
  16. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/DataTypes/__init__.py +79 -7
  17. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Exceptions/__init__.py +3 -5
  18. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Exceptions/messages.py +65 -105
  19. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Interpreter/__init__.py +83 -38
  20. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Model/__init__.py +7 -9
  21. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/Aggregation.py +13 -7
  22. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/Analytic.py +48 -9
  23. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/Assignment.py +0 -1
  24. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/CastOperator.py +44 -44
  25. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/Clause.py +16 -10
  26. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/Comparison.py +20 -12
  27. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/Conditional.py +30 -13
  28. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/General.py +9 -4
  29. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/HROperators.py +4 -14
  30. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/Join.py +15 -14
  31. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/Numeric.py +32 -26
  32. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/RoleSetter.py +6 -2
  33. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/Set.py +12 -8
  34. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/String.py +9 -9
  35. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/Time.py +136 -116
  36. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/Validation.py +10 -4
  37. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/__init__.py +56 -69
  38. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Utils/__init__.py +6 -1
  39. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/files/output/__init__.py +0 -1
  40. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/files/output/_time_period_representation.py +2 -1
  41. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/files/parser/__init__.py +44 -10
  42. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/files/parser/_rfc_dialect.py +1 -1
  43. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/files/parser/_time_checking.py +4 -4
  44. vtlengine-1.0.3rc3/src/vtlengine/DataTypes/NumericTypesHandling.py +0 -38
  45. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/LICENSE.md +0 -0
  46. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/README.md +0 -0
  47. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/ASTConstructorModules/__init__.py +0 -0
  48. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/ASTDataExchange.py +0 -0
  49. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/ASTTemplate.py +0 -0
  50. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/ASTVisitor.py +0 -0
  51. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/DAG/_words.py +0 -0
  52. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/Grammar/Vtl.g4 +0 -0
  53. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/Grammar/VtlTokens.g4 +0 -0
  54. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/Grammar/__init__.py +0 -0
  55. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/Grammar/tokens.py +0 -0
  56. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/AST/__init__.py +0 -0
  57. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/Operators/Boolean.py +0 -0
  58. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/__init__.py +0 -0
  59. {vtlengine-1.0.3rc3 → vtlengine-1.0.4}/src/vtlengine/files/__init__.py +0 -0
@@ -1,30 +1,31 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.3
2
2
  Name: vtlengine
3
- Version: 1.0.3rc3
3
+ Version: 1.0.4
4
4
  Summary: Run and Validate VTL Scripts
5
5
  License: AGPL-3.0
6
6
  Keywords: vtl,sdmx,vtlengine,Validation and Transformation Language
7
7
  Author: MeaningfulData
8
8
  Author-email: info@meaningfuldata.eu
9
- Requires-Python: >=3.10,<4.0
9
+ Requires-Python: >=3.9,<4.0
10
10
  Classifier: Development Status :: 5 - Production/Stable
11
11
  Classifier: Intended Audience :: Developers
12
12
  Classifier: Intended Audience :: Information Technology
13
13
  Classifier: Intended Audience :: Science/Research
14
14
  Classifier: License :: OSI Approved :: GNU Affero General Public License v3
15
15
  Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3.9
16
17
  Classifier: Programming Language :: Python :: 3.10
17
18
  Classifier: Programming Language :: Python :: 3.11
18
19
  Classifier: Programming Language :: Python :: 3.12
19
20
  Classifier: Programming Language :: Python :: 3.13
20
21
  Classifier: Typing :: Typed
21
22
  Requires-Dist: antlr4-python3-runtime (==4.9.2)
22
- Requires-Dist: bottleneck (>=1.3.4,<2.0.0)
23
- Requires-Dist: duckdb (>=1.1.1,<2.0.0)
23
+ Requires-Dist: duckdb (>=1.1,<2.0)
24
+ Requires-Dist: jsonschema (>=4.23.0,<5.0.0)
24
25
  Requires-Dist: networkx (>=2.8.8,<3.0.0)
25
26
  Requires-Dist: numexpr (>=2.9.0,<3.0.0)
26
27
  Requires-Dist: pandas (>=2.1.4,<3.0.0)
27
- Requires-Dist: s3fs (>=2024.9.0,<2025.0.0)
28
+ Requires-Dist: s3fs (>=2025.2.0,<2026.0.0)
28
29
  Requires-Dist: sqlglot (>=22.2.0,<23.0.0)
29
30
  Project-URL: Authors, https://github.com/Meaningful-Data/vtlengine/graphs/contributors
30
31
  Project-URL: Documentation, https://docs.vtlengine.meaningfuldata.eu
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "vtlengine"
3
- version = "1.0.3rc3"
3
+ version = "1.0.4"
4
4
  description = "Run and Validate VTL Scripts"
5
5
  authors = ["MeaningfulData <info@meaningfuldata.eu>"]
6
6
  license = "AGPL-3.0"
@@ -24,31 +24,31 @@ IssueTracker = 'https://github.com/Meaningful-Data/vtlengine/issues'
24
24
  Authors = 'https://github.com/Meaningful-Data/vtlengine/graphs/contributors'
25
25
 
26
26
  [tool.poetry.dependencies]
27
- python = "^3.10"
27
+ python = "^3.9"
28
28
  # PyPi dependencies
29
- duckdb = "^1.1.1"
29
+ duckdb = "^1.1"
30
30
  #numba = "^0.60.0"
31
- s3fs = "^2024.9.0"
31
+ s3fs = "^2025.2.0"
32
+ jsonschema = "^4.23.0"
32
33
 
33
34
  # APT dependencies
34
35
  antlr4-python3-runtime = "4.9.2"
35
36
  networkx = "^2.8.8"
36
37
  numexpr = "^2.9.0"
37
38
  pandas = "^2.1.4"
38
- bottleneck = "^1.3.4"
39
39
  sqlglot = "^22.2.0"
40
40
 
41
- [tool.poetry.dev-dependencies]
42
- pytest = "^7.3"
43
- pytest-cov = "^5.0.0"
41
+ [tool.poetry.group.dev.dependencies]
42
+ pytest = "^8.3"
43
+ pytest-cov = "^6.0.0"
44
44
  line-profiler-pycharm = "^1.2.0"
45
- sphinx = "^7.4"
46
- sphinx-rtd-theme = "^2.0.0"
47
- mypy = "^1.11.2"
48
- pandas-stubs = "^2.2.3.241009"
49
- stubs = "^1.0.0"
45
+ sphinx = "^7.4.7"
46
+ sphinx-rtd-theme = "^3.0.2"
47
+ mypy = "1.14.1"
48
+ pandas-stubs = "^2.1.4.231227"
50
49
  toml = "^0.10.2"
51
- ruff = "^0.7.1"
50
+ ruff = "^0.9.4"
51
+ types-jsonschema = "4.23.0.20241208"
52
52
 
53
53
  [tool.ruff]
54
54
  line-length = 100
@@ -84,6 +84,12 @@ enable_error_code = [
84
84
  ]
85
85
  warn_return_any = false
86
86
 
87
+ [tool.pytest.ini_options]
88
+ addopts = "--strict-markers"
89
+ markers = [
90
+ "input_path: directory where tests data files are stored"
91
+ ]
92
+
87
93
  [build-system]
88
94
  requires = ["poetry-core"]
89
95
  build-backend = "poetry.core.masonry.api"
@@ -3,12 +3,13 @@ import os
3
3
  from pathlib import Path
4
4
  from typing import Any, Dict, List, Optional, Union
5
5
 
6
+ import jsonschema
6
7
  import pandas as pd
7
8
  from s3fs import S3FileSystem # type: ignore[import-untyped]
8
9
 
9
10
  from vtlengine.AST import PersistentAssignment, Start
10
11
  from vtlengine.DataTypes import SCALAR_TYPES
11
- from vtlengine.Exceptions import check_key
12
+ from vtlengine.Exceptions import InputValidationException, check_key
12
13
  from vtlengine.files.parser import _fill_dataset_empty_data, _validate_pandas
13
14
  from vtlengine.Model import (
14
15
  Component,
@@ -21,13 +22,9 @@ from vtlengine.Model import (
21
22
  )
22
23
 
23
24
  base_path = Path(__file__).parent
24
- filepath_VTL = base_path / "data" / "vtl"
25
- filepath_ValueDomains = base_path / "data" / "ValueDomain"
26
- filepath_sql = base_path / "data" / "sql"
27
- filepath_json = base_path / "data" / "DataStructure" / "input"
28
- filepath_csv = base_path / "data" / "DataSet" / "input"
29
- filepath_out_json = base_path / "data" / "DataStructure" / "output"
30
- filepath_out_csv = base_path / "data" / "DataSet" / "output"
25
+ schema_path = base_path / "data" / "schema"
26
+ with open(schema_path / "json_schema_2.1.json", "r") as file:
27
+ schema = json.load(file)
31
28
 
32
29
 
33
30
  def _load_dataset_from_structure(structures: Dict[str, Any]) -> Dict[str, Any]:
@@ -41,22 +38,60 @@ def _load_dataset_from_structure(structures: Dict[str, Any]) -> Dict[str, Any]:
41
38
  dataset_name = dataset_json["name"]
42
39
  components = {}
43
40
 
44
- for component in dataset_json["DataStructure"]:
45
- check_key("data_type", SCALAR_TYPES.keys(), component["type"])
46
- check_key("role", Role_keys, component["role"])
47
- components[component["name"]] = Component(
48
- name=component["name"],
49
- data_type=SCALAR_TYPES[component["type"]],
50
- role=Role(component["role"]),
51
- nullable=component["nullable"],
52
- )
41
+ if "structure" in dataset_json:
42
+ structure_name = dataset_json["structure"]
43
+ structure_json = None
44
+ for s in structures["structures"]:
45
+ if s["name"] == structure_name:
46
+ structure_json = s
47
+ if structure_json is None:
48
+ raise InputValidationException(code="0-3-1-1", message="Structure not found.")
49
+ try:
50
+ jsonschema.validate(instance=structure_json, schema=schema)
51
+ except jsonschema.exceptions.ValidationError as e:
52
+ raise InputValidationException(code="0-3-1-1", message=e.message)
53
+
54
+ for component in structure_json["components"]:
55
+ check_key("data_type", SCALAR_TYPES.keys(), component["data_type"])
56
+ if component["role"] == "ViralAttribute":
57
+ component["role"] = "Attribute"
58
+
59
+ check_key("role", Role_keys, component["role"])
60
+
61
+ if "nullable" not in component:
62
+ if Role(component["role"]) == Role.IDENTIFIER:
63
+ component["nullable"] = False
64
+ elif Role(component["role"]) in (Role.MEASURE, Role.ATTRIBUTE):
65
+ component["nullable"] = True
66
+ else:
67
+ component["nullable"] = False
68
+
69
+ components[component["name"]] = Component(
70
+ name=component["name"],
71
+ data_type=SCALAR_TYPES[component["data_type"]],
72
+ role=Role(component["role"]),
73
+ nullable=component["nullable"],
74
+ )
75
+
76
+ if "DataStructure" in dataset_json:
77
+ for component in dataset_json["DataStructure"]:
78
+ check_key("data_type", SCALAR_TYPES.keys(), component["type"])
79
+ check_key("role", Role_keys, component["role"])
80
+ components[component["name"]] = Component(
81
+ name=component["name"],
82
+ data_type=SCALAR_TYPES[component["type"]],
83
+ role=Role(component["role"]),
84
+ nullable=component["nullable"],
85
+ )
53
86
 
54
87
  datasets[dataset_name] = Dataset(name=dataset_name, components=components, data=None)
55
88
  if "scalars" in structures:
56
89
  for scalar_json in structures["scalars"]:
57
90
  scalar_name = scalar_json["name"]
58
91
  scalar = Scalar(
59
- name=scalar_name, data_type=SCALAR_TYPES[scalar_json["type"]], value=None
92
+ name=scalar_name,
93
+ data_type=SCALAR_TYPES[scalar_json["type"]],
94
+ value=None,
60
95
  )
61
96
  datasets[scalar_name] = scalar # type: ignore[assignment]
62
97
  return datasets
@@ -115,7 +150,7 @@ def _load_single_datapoint(datapoint: Union[str, Path]) -> Dict[str, Any]:
115
150
 
116
151
 
117
152
  def _load_datapoints_path(
118
- datapoints: Union[Path, str, List[Union[str, Path]]]
153
+ datapoints: Union[Path, str, List[Union[str, Path]]],
119
154
  ) -> Dict[str, Dataset]:
120
155
  """
121
156
  Returns a dict with the data given from a Path.
@@ -156,7 +191,7 @@ def _load_datastructure_single(data_structure: Union[Dict[str, Any], Path]) -> D
156
191
 
157
192
 
158
193
  def load_datasets(
159
- data_structure: Union[Dict[str, Any], Path, List[Union[Dict[str, Any], Path]]]
194
+ data_structure: Union[Dict[str, Any], Path, List[Union[Dict[str, Any], Path]]],
160
195
  ) -> Dict[str, Dataset]:
161
196
  """
162
197
  Loads multiple datasets.
@@ -32,7 +32,13 @@ class __VTLSingleErrorListener(ErrorListener): # type: ignore[misc]
32
32
  """ """
33
33
 
34
34
  def syntaxError(
35
- self, recognizer: Any, offendingSymbol: str, line: str, column: str, msg: str, e: Any
35
+ self,
36
+ recognizer: Any,
37
+ offendingSymbol: str,
38
+ line: str,
39
+ column: str,
40
+ msg: str,
41
+ e: Any,
36
42
  ) -> None:
37
43
  raise Exception(
38
44
  f"Not valid VTL Syntax \n "
@@ -150,7 +156,10 @@ def semantic_analysis(
150
156
 
151
157
  # Running the interpreter
152
158
  interpreter = InterpreterAnalyzer(
153
- datasets=structures, value_domains=vd, external_routines=ext_routines, only_semantic=True
159
+ datasets=structures,
160
+ value_domains=vd,
161
+ external_routines=ext_routines,
162
+ only_semantic=True,
154
163
  )
155
164
  with pd.option_context("future.no_silent_downcasting", True):
156
165
  result = interpreter.visit(ast)
@@ -0,0 +1,116 @@
1
+ {
2
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
3
+ "description": "VTL Metadata JSON serialization",
4
+ "$defs": {
5
+ "vtl-id": {
6
+ "type": "string",
7
+ "pattern": "^[a-zA-Z][a-zA-Z0-9_]*$|^'.*'$"
8
+ },
9
+ "set-type": {
10
+ "type": "array",
11
+ "uniqueItems": true,
12
+ "oneOf": [
13
+ { "items": { "oneOf": [ { "type": "string" }, { "type": "null" } ] } },
14
+ { "items": { "oneOf": [ { "type": "number" }, { "type": "null" } ] } }
15
+ ]
16
+ },
17
+ "identifiable": {
18
+ "type": "object",
19
+ "properties": {
20
+ "name": { "$ref": "#/$defs/vtl-id" },
21
+ "description": { "type": "string" }
22
+ },
23
+ "required": [ "name" ]
24
+ }
25
+ },
26
+ "type": "object",
27
+ "properties": {
28
+ "datasets": {
29
+ "type": "array",
30
+ "items": {
31
+ "allOf": [ { "$ref": "#/$defs/identifiable" } ],
32
+ "properties": {
33
+ "source": { "type": "string" },
34
+ "structure": { "$ref": "#/$defs/vtl-id" }
35
+ },
36
+ "required": [ "structure" ]
37
+ }
38
+ },
39
+ "structures": {
40
+ "type": "array",
41
+ "items": {
42
+ "allOf": [ { "$ref": "#/$defs/identifiable" } ],
43
+ "properties": {
44
+ "components": {
45
+ "type": "array",
46
+ "items": {
47
+ "allOf": [ { "$ref": "#/$defs/identifiable" } ],
48
+ "properties": {
49
+ "role": {
50
+ "type": "string",
51
+ "enum": [ "Identifier", "Measure", "Attribute", "Viral Attribute" ]
52
+ },
53
+ "subset": { "$ref": "#/$defs/vtl-id" },
54
+ "nullable": { "type": "boolean" },
55
+ "data_type": {
56
+ "type": "string",
57
+ "enum": [ "String", "Number", "Integer", "Boolean", "Time", "TimePeriod", "Date", "Duration" ]
58
+ }
59
+ },
60
+ "required": [ "role" ]
61
+ }
62
+ }
63
+ },
64
+ "required": [ "components" ]
65
+ }
66
+ },
67
+ "variables": {
68
+ "type": "array",
69
+ "items": {
70
+ "allOf": [ { "$ref": "#/$defs/identifiable" } ],
71
+ "properties": {
72
+ "domain": { "$ref": "#/$defs/vtl-id" }
73
+ },
74
+ "required": [ "domain" ]
75
+ }
76
+ },
77
+ "domains": {
78
+ "type": "array",
79
+ "items": {
80
+ "allOf": [ { "$ref": "#/$defs/identifiable" } ],
81
+ "unevaluatedProperties": false,
82
+ "oneOf": [
83
+ {
84
+ "properties": {
85
+ "externalRef": { "type": "string" }
86
+ },
87
+ "required": [ "externalRef" ]
88
+ }, {
89
+ "properties": {
90
+ "parent": { "$ref": "#/$defs/vtl-id" }
91
+ },
92
+ "required": [ "parent" ],
93
+ "oneOf": [{
94
+ "properties": {
95
+ "restriction": { "$ref": "#/$defs/set-type" }
96
+ },
97
+ "required": [ "restriction" ]
98
+ }, {
99
+ "properties": {
100
+ "enumerated": { "$ref": "#/$defs/set-type" }
101
+ },
102
+ "required": [ "enumerated" ]
103
+ }, {
104
+ "properties": {
105
+ "described": { "type": "string" }
106
+ },
107
+ "required": [ "described" ]
108
+ }
109
+ ]
110
+ }
111
+ ]
112
+ }
113
+ }
114
+ }
115
+ }
116
+
@@ -382,7 +382,8 @@ class ASTVisitor(VtlVisitor):
382
382
  if conditions:
383
383
  identifiers_list = [
384
384
  DefIdentifier(
385
- value=elto.alias if getattr(elto, "alias", None) else elto.value, kind=kind
385
+ value=elto.alias if getattr(elto, "alias", None) else elto.value,
386
+ kind=kind,
386
387
  )
387
388
  for elto in conditions[0]
388
389
  ]
@@ -395,7 +396,7 @@ class ASTVisitor(VtlVisitor):
395
396
  def visitValueDomainSignature(self, ctx: Parser.ValueDomainSignatureContext):
396
397
  """
397
398
  valueDomainSignature: CONDITION IDENTIFIER (AS IDENTIFIER)? (',' IDENTIFIER (AS IDENTIFIER)?)* ;
398
- """ # noqa E501
399
+ """ # noqa E501
399
400
  # AST_ASTCONSTRUCTOR.7
400
401
  ctx_list = list(ctx.getChildren())
401
402
  component_nodes = [
@@ -459,7 +460,7 @@ class ASTVisitor(VtlVisitor):
459
460
  codeItemRelation: ( WHEN expr THEN )? codeItemRef codeItemRelationClause (codeItemRelationClause)* ;
460
461
  ( WHEN exprComponent THEN )? codetemRef=valueDomainValue comparisonOperand? codeItemRelationClause (codeItemRelationClause)*
461
462
 
462
- """ # noqa E501
463
+ """ # noqa E501
463
464
 
464
465
  ctx_list = list(ctx.getChildren())
465
466
 
@@ -512,7 +513,7 @@ class ASTVisitor(VtlVisitor):
512
513
  def visitCodeItemRelationClause(self, ctx: Parser.CodeItemRelationClauseContext):
513
514
  """
514
515
  (opAdd=( PLUS | MINUS ))? rightCodeItem=valueDomainValue ( QLPAREN rightCondition=exprComponent QRPAREN )?
515
- """ # noqa E501
516
+ """ # noqa E501
516
517
  ctx_list = list(ctx.getChildren())
517
518
 
518
519
  expr = [expr for expr in ctx_list if isinstance(expr, Parser.ExprContext)]
@@ -45,7 +45,8 @@ class Expr(VtlVisitor):
45
45
 
46
46
  Expr Definition.
47
47
 
48
- _______________________________________________________________________________________"""
48
+ _______________________________________________________________________________________
49
+ """
49
50
 
50
51
  def visitExpr(self, ctx: Parser.ExprContext):
51
52
  """
@@ -66,7 +67,7 @@ class Expr(VtlVisitor):
66
67
  | constant # constantExpr
67
68
  | varID # varIdExpr
68
69
  ;
69
- """ # noqa E501
70
+ """ # noqa E501
70
71
  ctx_list = list(ctx.getChildren())
71
72
  c = ctx_list[0]
72
73
 
@@ -121,7 +122,6 @@ class Expr(VtlVisitor):
121
122
 
122
123
  # CASE WHEN expr THEN expr ELSE expr END # caseExpr
123
124
  elif isinstance(c, TerminalNodeImpl) and (c.getSymbol().type == Parser.CASE):
124
-
125
125
  if len(ctx_list) % 4 != 3:
126
126
  raise ValueError("Syntax error.")
127
127
 
@@ -221,7 +221,6 @@ class Expr(VtlVisitor):
221
221
  return previous_node
222
222
 
223
223
  def visitClauseExpr(self, ctx: Parser.ClauseExprContext):
224
-
225
224
  ctx_list = list(ctx.getChildren())
226
225
 
227
226
  dataset = self.visitExpr(ctx_list[0])
@@ -347,7 +346,7 @@ class Expr(VtlVisitor):
347
346
 
348
347
  def visitJoinClause(self, ctx: Parser.JoinClauseContext):
349
348
  """
350
- joinClauseItem (COMMA joinClauseItem)* (USING componentID (COMMA componentID)*)?
349
+ JoinClauseItem (COMMA joinClauseItem)* (USING componentID (COMMA componentID)*)?
351
350
  """
352
351
  ctx_list = list(ctx.getChildren())
353
352
 
@@ -373,7 +372,7 @@ class Expr(VtlVisitor):
373
372
  def visitJoinClauseWithoutUsing(self, ctx: Parser.JoinClauseWithoutUsingContext):
374
373
  """
375
374
  joinClause: joinClauseItem (COMMA joinClauseItem)* (USING componentID (COMMA componentID)*)? ;
376
- """ # noqa E501
375
+ """ # noqa E501
377
376
  ctx_list = list(ctx.getChildren())
378
377
 
379
378
  clause_nodes = []
@@ -388,7 +387,7 @@ class Expr(VtlVisitor):
388
387
  def visitJoinBody(self, ctx: Parser.JoinBodyContext):
389
388
  """
390
389
  joinBody: filterClause? (calcClause|joinApplyClause|aggrClause)? (keepOrDropClause)? renameClause?
391
- """ # noqa E501
390
+ """ # noqa E501
392
391
  ctx_list = list(ctx.getChildren())
393
392
 
394
393
  body_nodes = []
@@ -457,7 +456,7 @@ class Expr(VtlVisitor):
457
456
  def visitEvalAtom(self, ctx: Parser.EvalAtomContext):
458
457
  """
459
458
  | EVAL LPAREN routineName LPAREN (varID|scalarItem)? (COMMA (varID|scalarItem))* RPAREN (LANGUAGE STRING_CONSTANT)? (RETURNS evalDatasetType)? RPAREN # evalAtom
460
- """ # noqa E501
459
+ """ # noqa E501
461
460
  ctx_list = list(ctx.getChildren())
462
461
 
463
462
  routine_name = Terminals().visitRoutineName(ctx_list[2])
@@ -505,7 +504,7 @@ class Expr(VtlVisitor):
505
504
  def visitCastExprDataset(self, ctx: Parser.CastExprDatasetContext):
506
505
  """
507
506
  | CAST LPAREN expr COMMA (basicScalarType|valueDomainName) (COMMA STRING_CONSTANT)? RPAREN # castExprDataset
508
- """ # noqa E501
507
+ """ # noqa E501
509
508
  ctx_list = list(ctx.getChildren())
510
509
  c = ctx_list[0]
511
510
 
@@ -795,15 +794,19 @@ class Expr(VtlVisitor):
795
794
  return self.visitTimeDiffAtom(ctx)
796
795
  elif isinstance(ctx, Parser.DateAddAtomContext):
797
796
  return self.visitTimeAddAtom(ctx)
798
- elif isinstance(ctx, (Parser.YearAtomContext,
799
- Parser.MonthAtomContext,
800
- Parser.DayOfMonthAtomContext,
801
- Parser.DayOfYearAtomContext,
802
- Parser.DayToYearAtomContext,
803
- Parser.DayToMonthAtomContext,
804
- Parser.YearTodayAtomContext,
805
- Parser.MonthTodayAtomContext)):
806
-
797
+ elif isinstance(
798
+ ctx,
799
+ (
800
+ Parser.YearAtomContext,
801
+ Parser.MonthAtomContext,
802
+ Parser.DayOfMonthAtomContext,
803
+ Parser.DayOfYearAtomContext,
804
+ Parser.DayToYearAtomContext,
805
+ Parser.DayToMonthAtomContext,
806
+ Parser.YearTodayAtomContext,
807
+ Parser.MonthTodayAtomContext,
808
+ ),
809
+ ):
807
810
  return self.visitTimeUnaryAtom(ctx)
808
811
  else:
809
812
  raise NotImplementedError
@@ -825,26 +828,6 @@ class Expr(VtlVisitor):
825
828
 
826
829
  return UnaryOp(op=op, operand=operand_node[0])
827
830
 
828
- # def visitPeriodAtom(self, ctx: Parser.PeriodAtomContext):
829
- # """
830
- # periodExpr: PERIOD_INDICATOR '(' expr? ')' ;
831
- # """
832
- # ctx_list = list(ctx.getChildren())
833
- # c = ctx_list[0]
834
- #
835
- # op = c.getSymbol().text
836
- # operand_node = [
837
- # self.visitExpr(operand)
838
- # for operand in ctx_list
839
- # if isinstance(operand, Parser.ExprContext)
840
- # ]
841
- #
842
- # if len(operand_node) == 0:
843
- # # AST_ASTCONSTRUCTOR.15
844
- # raise NotImplementedError
845
- #
846
- # return UnaryOp(op=op, operand=operand_node[0])
847
-
848
831
  def visitTimeShiftAtom(self, ctx: Parser.TimeShiftAtomContext):
849
832
  """
850
833
  timeShiftExpr: TIMESHIFT '(' expr ',' INTEGER_CONSTANT ')' ;
@@ -878,7 +861,7 @@ class Expr(VtlVisitor):
878
861
  def visitTimeAggAtom(self, ctx: Parser.TimeAggAtomContext):
879
862
  """
880
863
  TIME_AGG LPAREN periodIndTo=STRING_CONSTANT (COMMA periodIndFrom=(STRING_CONSTANT| OPTIONAL ))? (COMMA op=optionalExpr)? (COMMA (FIRST|LAST))? RPAREN # timeAggAtom
881
- """ # noqa E501
864
+ """ # noqa E501
882
865
  ctx_list = list(ctx.getChildren())
883
866
  c = ctx_list[0]
884
867
 
@@ -911,7 +894,11 @@ class Expr(VtlVisitor):
911
894
  # AST_ASTCONSTRUCTOR.17
912
895
  raise Exception("Optional as expression node is not allowed in Time Aggregation")
913
896
  return TimeAggregation(
914
- op=op, operand=operand_node, period_to=period_to, period_from=period_from, conf=conf
897
+ op=op,
898
+ operand=operand_node,
899
+ period_to=period_to,
900
+ period_from=period_from,
901
+ conf=conf,
915
902
  )
916
903
 
917
904
  def visitFlowAtom(self, ctx: Parser.FlowAtomContext):
@@ -988,7 +975,7 @@ class Expr(VtlVisitor):
988
975
  setExpr: UNION LPAREN left=expr (COMMA expr)+ RPAREN # unionAtom
989
976
  | INTERSECT LPAREN left=expr (COMMA expr)+ RPAREN # intersectAtom
990
977
  | op=(SETDIFF|SYMDIFF) LPAREN left=expr COMMA right=expr RPAREN # setOrSYmDiffAtom
991
- """ # noqa E501
978
+ """ # noqa E501
992
979
  if isinstance(ctx, Parser.UnionAtomContext):
993
980
  return self.visitUnionAtom(ctx)
994
981
  elif isinstance(ctx, Parser.IntersectAtomContext):
@@ -1031,7 +1018,7 @@ class Expr(VtlVisitor):
1031
1018
  def visitHierarchyFunctions(self, ctx: Parser.HierarchyFunctionsContext):
1032
1019
  """
1033
1020
  HIERARCHY LPAREN op=expr COMMA hrName=IDENTIFIER (conditionClause)? (RULE ruleComponent=componentID)? (validationMode)? (inputModeHierarchy)? outputModeHierarchy? RPAREN
1034
- """ # noqa E501
1021
+ """ # noqa E501
1035
1022
  ctx_list = list(ctx.getChildren())
1036
1023
  c = ctx_list[0]
1037
1024
 
@@ -1102,7 +1089,7 @@ class Expr(VtlVisitor):
1102
1089
  def visitValidateDPruleset(self, ctx: Parser.ValidateDPrulesetContext):
1103
1090
  """
1104
1091
  validationDatapoint: CHECK_DATAPOINT '(' expr ',' IDENTIFIER (COMPONENTS componentID (',' componentID)*)? (INVALID|ALL_MEASURES|ALL)? ')' ;
1105
- """ # noqa E501
1092
+ """ # noqa E501
1106
1093
  ctx_list = list(ctx.getChildren())
1107
1094
  c = ctx_list[0]
1108
1095
 
@@ -1137,7 +1124,7 @@ class Expr(VtlVisitor):
1137
1124
  def visitValidateHRruleset(self, ctx: Parser.ValidateHRrulesetContext):
1138
1125
  """
1139
1126
  CHECK_HIERARCHY LPAREN op=expr COMMA hrName=IDENTIFIER conditionClause? (RULE componentID)? validationMode? inputMode? validationOutput? RPAREN # validateHRruleset
1140
- """ # noqa E501
1127
+ """ # noqa E501
1141
1128
 
1142
1129
  ctx_list = list(ctx.getChildren())
1143
1130
  c = ctx_list[0]
@@ -1199,7 +1186,7 @@ class Expr(VtlVisitor):
1199
1186
  def visitValidationSimple(self, ctx: Parser.ValidationSimpleContext):
1200
1187
  """
1201
1188
  | CHECK LPAREN op=expr (codeErr=erCode)? (levelCode=erLevel)? imbalanceExpr? output=(INVALID|ALL)? RPAREN # validationSimple
1202
- """ # noqa E501
1189
+ """ # noqa E501
1203
1190
  ctx_list = list(ctx.getChildren())
1204
1191
  c = ctx_list[0]
1205
1192
  token = c.getSymbol()
@@ -1331,11 +1318,19 @@ class Expr(VtlVisitor):
1331
1318
 
1332
1319
  if window is None:
1333
1320
  window = Windowing(
1334
- type_="data", start=-1, stop=0, start_mode="preceding", stop_mode="current"
1321
+ type_="data",
1322
+ start=-1,
1323
+ stop=0,
1324
+ start_mode="preceding",
1325
+ stop_mode="current",
1335
1326
  )
1336
1327
 
1337
1328
  return Analytic(
1338
- op=op_node, operand=operand, partition_by=partition_by, order_by=order_by, window=window
1329
+ op=op_node,
1330
+ operand=operand,
1331
+ partition_by=partition_by,
1332
+ order_by=order_by,
1333
+ window=window,
1339
1334
  )
1340
1335
 
1341
1336
  def visitLagOrLeadAn(self, ctx: Parser.LagOrLeadAnContext):
@@ -1369,7 +1364,11 @@ class Expr(VtlVisitor):
1369
1364
  raise Exception(f"{op_node} requires an offset parameter.")
1370
1365
 
1371
1366
  return Analytic(
1372
- op=op_node, operand=operand, partition_by=partition_by, order_by=order_by, params=params
1367
+ op=op_node,
1368
+ operand=operand,
1369
+ partition_by=partition_by,
1370
+ order_by=order_by,
1371
+ params=params,
1373
1372
  )
1374
1373
 
1375
1374
  def visitRatioToReportAn(self, ctx: Parser.RatioToReportAnContext):