vtlengine 1.4.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. vtlengine/API/_InternalApi.py +791 -0
  2. vtlengine/API/__init__.py +612 -0
  3. vtlengine/API/data/schema/external_routines_schema.json +34 -0
  4. vtlengine/API/data/schema/json_schema_2.1.json +116 -0
  5. vtlengine/API/data/schema/value_domain_schema.json +97 -0
  6. vtlengine/AST/ASTComment.py +57 -0
  7. vtlengine/AST/ASTConstructor.py +598 -0
  8. vtlengine/AST/ASTConstructorModules/Expr.py +1928 -0
  9. vtlengine/AST/ASTConstructorModules/ExprComponents.py +995 -0
  10. vtlengine/AST/ASTConstructorModules/Terminals.py +790 -0
  11. vtlengine/AST/ASTConstructorModules/__init__.py +50 -0
  12. vtlengine/AST/ASTDataExchange.py +10 -0
  13. vtlengine/AST/ASTEncoders.py +32 -0
  14. vtlengine/AST/ASTString.py +675 -0
  15. vtlengine/AST/ASTTemplate.py +558 -0
  16. vtlengine/AST/ASTVisitor.py +25 -0
  17. vtlengine/AST/DAG/__init__.py +479 -0
  18. vtlengine/AST/DAG/_words.py +10 -0
  19. vtlengine/AST/Grammar/Vtl.g4 +705 -0
  20. vtlengine/AST/Grammar/VtlTokens.g4 +409 -0
  21. vtlengine/AST/Grammar/__init__.py +0 -0
  22. vtlengine/AST/Grammar/lexer.py +2139 -0
  23. vtlengine/AST/Grammar/parser.py +16597 -0
  24. vtlengine/AST/Grammar/tokens.py +169 -0
  25. vtlengine/AST/VtlVisitor.py +824 -0
  26. vtlengine/AST/__init__.py +674 -0
  27. vtlengine/DataTypes/TimeHandling.py +562 -0
  28. vtlengine/DataTypes/__init__.py +863 -0
  29. vtlengine/DataTypes/_time_checking.py +135 -0
  30. vtlengine/Exceptions/__exception_file_generator.py +96 -0
  31. vtlengine/Exceptions/__init__.py +159 -0
  32. vtlengine/Exceptions/messages.py +1004 -0
  33. vtlengine/Interpreter/__init__.py +2048 -0
  34. vtlengine/Model/__init__.py +501 -0
  35. vtlengine/Operators/Aggregation.py +357 -0
  36. vtlengine/Operators/Analytic.py +455 -0
  37. vtlengine/Operators/Assignment.py +23 -0
  38. vtlengine/Operators/Boolean.py +106 -0
  39. vtlengine/Operators/CastOperator.py +451 -0
  40. vtlengine/Operators/Clause.py +366 -0
  41. vtlengine/Operators/Comparison.py +488 -0
  42. vtlengine/Operators/Conditional.py +495 -0
  43. vtlengine/Operators/General.py +191 -0
  44. vtlengine/Operators/HROperators.py +254 -0
  45. vtlengine/Operators/Join.py +447 -0
  46. vtlengine/Operators/Numeric.py +422 -0
  47. vtlengine/Operators/RoleSetter.py +77 -0
  48. vtlengine/Operators/Set.py +176 -0
  49. vtlengine/Operators/String.py +578 -0
  50. vtlengine/Operators/Time.py +1144 -0
  51. vtlengine/Operators/Validation.py +275 -0
  52. vtlengine/Operators/__init__.py +900 -0
  53. vtlengine/Utils/__Virtual_Assets.py +34 -0
  54. vtlengine/Utils/__init__.py +479 -0
  55. vtlengine/__extras_check.py +17 -0
  56. vtlengine/__init__.py +27 -0
  57. vtlengine/files/__init__.py +0 -0
  58. vtlengine/files/output/__init__.py +35 -0
  59. vtlengine/files/output/_time_period_representation.py +55 -0
  60. vtlengine/files/parser/__init__.py +240 -0
  61. vtlengine/files/parser/_rfc_dialect.py +22 -0
  62. vtlengine/py.typed +0 -0
  63. vtlengine-1.4.0rc2.dist-info/METADATA +89 -0
  64. vtlengine-1.4.0rc2.dist-info/RECORD +66 -0
  65. vtlengine-1.4.0rc2.dist-info/WHEEL +4 -0
  66. vtlengine-1.4.0rc2.dist-info/licenses/LICENSE.md +661 -0
@@ -0,0 +1,50 @@
1
+ from typing import Dict, Union
2
+
3
+ from antlr4.ParserRuleContext import ParserRuleContext
4
+ from antlr4.Token import CommonToken
5
+
6
+ from vtlengine.AST.Grammar.lexer import Lexer
7
+
8
+
9
+ def extract_token_info(token: Union[CommonToken, ParserRuleContext]) -> Dict[str, int]:
10
+ """
11
+ Extracts the token information from a token or ParserRuleContext.
12
+
13
+ The Token information includes:
14
+ - column_start: The starting column of the token.
15
+ - column_stop: The stopping column of the token.
16
+ - line_start: The starting line number of the token.
17
+ - line_stop: The stopping line number of the token.
18
+
19
+ The overall idea is to provide the information from which line and column,
20
+ and to which line and column, the text is referenced by the AST object, including children.
21
+
22
+ Important Note: the keys of the dict are the same as the class attributes of the AST Object.
23
+
24
+ Args:
25
+ token (Union[CommonToken, ParserRuleContext]): The token or ParserRuleContext to extract
26
+ information from.
27
+
28
+ Returns:
29
+ Dict[str, int]: A dictionary containing the token information.
30
+ """
31
+
32
+ if isinstance(token, ParserRuleContext):
33
+ return {
34
+ "column_start": token.start.column,
35
+ "column_stop": token.stop.column + len(token.stop.text),
36
+ "line_start": token.start.line,
37
+ "line_stop": token.stop.line,
38
+ }
39
+ line_start = token.line
40
+ line_stop = token.line
41
+ # For block comments, we need to add the lines inside the block, marked by \n, to the stop line.
42
+ # The ML_COMMENT does not take into account the final \n in its grammar.
43
+ if token.type == Lexer.ML_COMMENT:
44
+ line_stop = token.line + token.text.count("\n")
45
+ return {
46
+ "column_start": token.column,
47
+ "column_stop": token.column + len(token.text),
48
+ "line_start": line_start,
49
+ "line_stop": line_stop,
50
+ }
@@ -0,0 +1,10 @@
1
+ """
2
+ AST.ASTDataExchange.py
3
+ =================
4
+
5
+ Description
6
+ -----------
7
+ Used to rewrite an operator across AST.
8
+ """
9
+
10
+ de_ruleset_elements = {}
@@ -0,0 +1,32 @@
1
+ import json
2
+
3
+ from vtlengine import AST
4
+ from vtlengine.Model import Dataset
5
+
6
+
7
+ class ComplexEncoder(json.JSONEncoder):
8
+ def default(self, obj):
9
+ if hasattr(obj, "toJSON"):
10
+ return obj.toJSON()
11
+ # Makes a circular reference error if we do not check for this
12
+ elif isinstance(obj, Dataset):
13
+ return "dataset"
14
+ else:
15
+ return json.__dict__
16
+
17
+
18
+ class ComplexDecoder(json.JSONDecoder):
19
+ @staticmethod
20
+ def object_hook(dictionary):
21
+ if "class_name" in dictionary:
22
+ if not hasattr(AST, dictionary["class_name"]):
23
+ raise ValueError(f"Class {dictionary['class_name']} not found in AST")
24
+
25
+ ast_class = getattr(AST, dictionary["class_name"])
26
+ del dictionary["class_name"]
27
+ try:
28
+ return ast_class(**dictionary)
29
+ except TypeError as e:
30
+ raise e
31
+ else:
32
+ return dictionary