vtlengine 1.0.1__py3-none-any.whl → 1.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of vtlengine might be problematic. Click here for more details.

Files changed (50) hide show
  1. vtlengine/API/_InternalApi.py +19 -8
  2. vtlengine/API/__init__.py +9 -9
  3. vtlengine/AST/ASTConstructor.py +23 -43
  4. vtlengine/AST/ASTConstructorModules/Expr.py +147 -71
  5. vtlengine/AST/ASTConstructorModules/ExprComponents.py +104 -40
  6. vtlengine/AST/ASTConstructorModules/Terminals.py +28 -39
  7. vtlengine/AST/ASTTemplate.py +16 -1
  8. vtlengine/AST/DAG/__init__.py +12 -15
  9. vtlengine/AST/Grammar/Vtl.g4 +49 -20
  10. vtlengine/AST/Grammar/VtlTokens.g4 +13 -1
  11. vtlengine/AST/Grammar/lexer.py +1293 -1183
  12. vtlengine/AST/Grammar/parser.py +5758 -3939
  13. vtlengine/AST/Grammar/tokens.py +12 -0
  14. vtlengine/AST/VtlVisitor.py +9 -2
  15. vtlengine/AST/__init__.py +21 -3
  16. vtlengine/DataTypes/TimeHandling.py +12 -7
  17. vtlengine/DataTypes/__init__.py +17 -24
  18. vtlengine/Exceptions/__init__.py +43 -1
  19. vtlengine/Exceptions/messages.py +82 -62
  20. vtlengine/Interpreter/__init__.py +125 -120
  21. vtlengine/Model/__init__.py +17 -12
  22. vtlengine/Operators/Aggregation.py +14 -14
  23. vtlengine/Operators/Analytic.py +56 -31
  24. vtlengine/Operators/Assignment.py +2 -3
  25. vtlengine/Operators/Boolean.py +5 -7
  26. vtlengine/Operators/CastOperator.py +12 -13
  27. vtlengine/Operators/Clause.py +11 -13
  28. vtlengine/Operators/Comparison.py +31 -17
  29. vtlengine/Operators/Conditional.py +157 -17
  30. vtlengine/Operators/General.py +4 -4
  31. vtlengine/Operators/HROperators.py +41 -34
  32. vtlengine/Operators/Join.py +18 -22
  33. vtlengine/Operators/Numeric.py +76 -39
  34. vtlengine/Operators/RoleSetter.py +6 -8
  35. vtlengine/Operators/Set.py +7 -12
  36. vtlengine/Operators/String.py +19 -27
  37. vtlengine/Operators/Time.py +366 -43
  38. vtlengine/Operators/Validation.py +4 -7
  39. vtlengine/Operators/__init__.py +38 -41
  40. vtlengine/Utils/__init__.py +149 -94
  41. vtlengine/__init__.py +1 -1
  42. vtlengine/files/output/__init__.py +2 -2
  43. vtlengine/files/output/_time_period_representation.py +0 -1
  44. vtlengine/files/parser/__init__.py +18 -18
  45. vtlengine/files/parser/_time_checking.py +3 -2
  46. {vtlengine-1.0.1.dist-info → vtlengine-1.0.3.dist-info}/METADATA +17 -5
  47. vtlengine-1.0.3.dist-info/RECORD +58 -0
  48. vtlengine-1.0.1.dist-info/RECORD +0 -58
  49. {vtlengine-1.0.1.dist-info → vtlengine-1.0.3.dist-info}/LICENSE.md +0 -0
  50. {vtlengine-1.0.1.dist-info → vtlengine-1.0.3.dist-info}/WHEEL +0 -0
@@ -1,22 +1,45 @@
1
1
  import re
2
- import pandas as pd
2
+ from datetime import date, datetime, timedelta
3
+ from typing import Any, Dict, List, Optional, Type, Union
3
4
 
4
- from datetime import date
5
- from typing import Optional, Union, List, Any, Dict, Type
5
+ import pandas as pd
6
6
 
7
7
  import vtlengine.Operators as Operators
8
- from vtlengine.DataTypes import Date, TimePeriod, TimeInterval, Duration, ScalarType
9
- from vtlengine.DataTypes.TimeHandling import DURATION_MAPPING, date_to_period, TimePeriodHandler
10
-
11
8
  from vtlengine.AST.Grammar.tokens import (
12
- TIME_AGG,
13
- TIMESHIFT,
14
- PERIOD_INDICATOR,
9
+ DATE_ADD,
10
+ DATEDIFF,
11
+ DAYOFMONTH,
12
+ DAYOFYEAR,
13
+ DAYTOMONTH,
14
+ DAYTOYEAR,
15
15
  FILL_TIME_SERIES,
16
16
  FLOW_TO_STOCK,
17
+ MONTH,
18
+ MONTHTODAY,
19
+ PERIOD_INDICATOR,
20
+ TIME_AGG,
21
+ TIMESHIFT,
22
+ YEAR,
23
+ YEARTODAY,
24
+ )
25
+ from vtlengine.DataTypes import (
26
+ Date,
27
+ Duration,
28
+ Integer,
29
+ ScalarType,
30
+ String,
31
+ TimeInterval,
32
+ TimePeriod,
33
+ unary_implicit_promotion,
34
+ )
35
+ from vtlengine.DataTypes.TimeHandling import (
36
+ DURATION_MAPPING,
37
+ TimePeriodHandler,
38
+ date_to_period,
39
+ period_to_date,
17
40
  )
18
41
  from vtlengine.Exceptions import SemanticError
19
- from vtlengine.Model import Dataset, DataComponent, Scalar, Component, Role
42
+ from vtlengine.Model import Component, DataComponent, Dataset, Role, Scalar
20
43
 
21
44
 
22
45
  class Time(Operators.Operator):
@@ -121,7 +144,7 @@ class Unary(Time):
121
144
  result.data = result.data.sort_values(by=cls.other_ids + [cls.time_id])
122
145
  if data_type == TimePeriod:
123
146
  result.data = cls._period_accumulation(result.data, measure_names)
124
- elif data_type == Date or data_type == TimeInterval:
147
+ elif data_type in (Date, TimeInterval):
125
148
  result.data[measure_names] = (
126
149
  result.data.groupby(cls.other_ids)[measure_names]
127
150
  .apply(cls.py_op)
@@ -148,6 +171,10 @@ class Binary(Time):
148
171
  pass
149
172
 
150
173
 
174
+ class Parameterized(Time):
175
+ pass
176
+
177
+
151
178
  class Period_indicator(Unary):
152
179
  op = PERIOD_INDICATOR
153
180
 
@@ -175,7 +202,7 @@ class Period_indicator(Unary):
175
202
 
176
203
  @classmethod
177
204
  def evaluate(
178
- cls, operand: Union[Dataset, DataComponent, Scalar, str]
205
+ cls, operand: Union[Dataset, DataComponent, Scalar, str]
179
206
  ) -> Union[Dataset, DataComponent, Scalar, str]:
180
207
  result = cls.validate(operand)
181
208
  if isinstance(operand, str):
@@ -198,6 +225,17 @@ class Period_indicator(Unary):
198
225
  return result
199
226
 
200
227
 
228
+ class Parametrized(Time):
229
+
230
+ @classmethod
231
+ def validate(cls, operand: Any, param: Any) -> Any:
232
+ pass
233
+
234
+ @classmethod
235
+ def evaluate(cls, operand: Any, param: Any) -> Any:
236
+ pass
237
+
238
+
201
239
  class Flow_to_stock(Unary):
202
240
 
203
241
  @classmethod
@@ -347,7 +385,7 @@ class Fill_time_series(Binary):
347
385
 
348
386
  @classmethod
349
387
  def fill_periods_rows(
350
- cls, group_df: Any, period: str, years: List[int], vals: Optional[List[int]] = None
388
+ cls, group_df: Any, period: str, years: List[int], vals: Optional[List[int]] = None
351
389
  ) -> List[Any]:
352
390
  rows = []
353
391
  for year in years:
@@ -360,7 +398,7 @@ class Fill_time_series(Binary):
360
398
 
361
399
  @classmethod
362
400
  def create_period_row(
363
- cls, group_df: Any, period: str, year: int, val: Optional[int] = None
401
+ cls, group_df: Any, period: str, year: int, val: Optional[int] = None
364
402
  ) -> Any:
365
403
  row = group_df.iloc[0].copy()
366
404
  row[cls.time_id] = f"{year}" if period == "A" else f"{year}-{period}{val:d}"
@@ -399,7 +437,7 @@ class Fill_time_series(Binary):
399
437
  filled_data = []
400
438
 
401
439
  def create_filled_dates(
402
- group: Any, min_max: Dict[str, Any]
440
+ group: Any, min_max: Dict[str, Any]
403
441
  ) -> (pd.DataFrame, str): # type: ignore[syntax]
404
442
  date_range = pd.date_range(start=min_max["min"], end=min_max["max"], freq=min_frequency)
405
443
  date_df = pd.DataFrame(date_range, columns=[cls.time_id])
@@ -442,7 +480,7 @@ class Fill_time_series(Binary):
442
480
 
443
481
  @classmethod
444
482
  def fill_time_intervals(
445
- cls, data: pd.DataFrame, fill_type: str, frequency: str
483
+ cls, data: pd.DataFrame, fill_type: str, frequency: str
446
484
  ) -> pd.DataFrame:
447
485
  result_data = cls.time_filler(data, fill_type, frequency)
448
486
  not_na = result_data[cls.measures].notna().any(axis=1)
@@ -550,7 +588,7 @@ class Time_Shift(Binary):
550
588
 
551
589
  @classmethod
552
590
  def shift_period(
553
- cls, period_str: str, shift_value: int, frequency: Optional[int] = None
591
+ cls, period_str: str, shift_value: int, frequency: Optional[int] = None
554
592
  ) -> str:
555
593
  period_type = cls._get_period(period_str)
556
594
 
@@ -604,7 +642,7 @@ class Time_Aggregation(Time):
604
642
 
605
643
  @classmethod
606
644
  def dataset_validation(
607
- cls, operand: Dataset, period_from: Optional[str], period_to: str, conf: str
645
+ cls, operand: Dataset, period_from: Optional[str], period_to: str, conf: str
608
646
  ) -> Dataset:
609
647
  # TODO: Review with VTL TF as this makes no sense
610
648
 
@@ -641,7 +679,7 @@ class Time_Aggregation(Time):
641
679
 
642
680
  @classmethod
643
681
  def component_validation(
644
- cls, operand: DataComponent, period_from: Optional[str], period_to: str, conf: str
682
+ cls, operand: DataComponent, period_from: Optional[str], period_to: str, conf: str
645
683
  ) -> DataComponent:
646
684
  if operand.data_type not in cls.TIME_DATA_TYPES:
647
685
  raise SemanticError("1-1-19-8", op=cls.op, comp_type="time component")
@@ -654,7 +692,7 @@ class Time_Aggregation(Time):
654
692
 
655
693
  @classmethod
656
694
  def scalar_validation(
657
- cls, operand: Scalar, period_from: Optional[str], period_to: str, conf: str
695
+ cls, operand: Scalar, period_from: Optional[str], period_to: str, conf: str
658
696
  ) -> Scalar:
659
697
  if operand.data_type not in cls.TIME_DATA_TYPES:
660
698
  raise SemanticError("1-1-19-8", op=cls.op, comp_type="time scalar")
@@ -663,21 +701,18 @@ class Time_Aggregation(Time):
663
701
 
664
702
  @classmethod
665
703
  def _execute_time_aggregation(
666
- cls,
667
- value: str,
668
- data_type: Type[ScalarType],
669
- period_from: Optional[str],
670
- period_to: str,
671
- conf: str,
704
+ cls,
705
+ value: str,
706
+ data_type: Type[ScalarType],
707
+ period_from: Optional[str],
708
+ period_to: str,
709
+ conf: str,
672
710
  ) -> str:
673
711
  if data_type == TimePeriod: # Time period
674
712
  return _time_period_access(value, period_to)
675
713
 
676
714
  elif data_type == Date:
677
- if conf == "first":
678
- start = True
679
- else:
680
- start = False
715
+ start = conf == "first"
681
716
  # Date
682
717
  if period_to == "D":
683
718
  return value
@@ -687,7 +722,7 @@ class Time_Aggregation(Time):
687
722
 
688
723
  @classmethod
689
724
  def dataset_evaluation(
690
- cls, operand: Dataset, period_from: Optional[str], period_to: str, conf: str
725
+ cls, operand: Dataset, period_from: Optional[str], period_to: str, conf: str
691
726
  ) -> Dataset:
692
727
  result = cls.dataset_validation(operand, period_from, period_to, conf)
693
728
  result.data = operand.data.copy() if operand.data is not None else pd.DataFrame()
@@ -703,7 +738,7 @@ class Time_Aggregation(Time):
703
738
 
704
739
  @classmethod
705
740
  def component_evaluation(
706
- cls, operand: DataComponent, period_from: Optional[str], period_to: str, conf: str
741
+ cls, operand: DataComponent, period_from: Optional[str], period_to: str, conf: str
707
742
  ) -> DataComponent:
708
743
  result = cls.component_validation(operand, period_from, period_to, conf)
709
744
  if operand.data is not None:
@@ -717,7 +752,7 @@ class Time_Aggregation(Time):
717
752
 
718
753
  @classmethod
719
754
  def scalar_evaluation(
720
- cls, operand: Scalar, period_from: Optional[str], period_to: str, conf: str
755
+ cls, operand: Scalar, period_from: Optional[str], period_to: str, conf: str
721
756
  ) -> Scalar:
722
757
  result = cls.scalar_validation(operand, period_from, period_to, conf)
723
758
  result.value = cls._execute_time_aggregation(
@@ -727,11 +762,11 @@ class Time_Aggregation(Time):
727
762
 
728
763
  @classmethod
729
764
  def validate(
730
- cls,
731
- operand: Union[Dataset, DataComponent, Scalar],
732
- period_from: Optional[str],
733
- period_to: str,
734
- conf: str,
765
+ cls,
766
+ operand: Union[Dataset, DataComponent, Scalar],
767
+ period_from: Optional[str],
768
+ period_to: str,
769
+ conf: str,
735
770
  ) -> Union[Dataset, DataComponent, Scalar]:
736
771
  cls._check_params(period_from, period_to)
737
772
  if isinstance(operand, Dataset):
@@ -743,11 +778,11 @@ class Time_Aggregation(Time):
743
778
 
744
779
  @classmethod
745
780
  def evaluate(
746
- cls,
747
- operand: Union[Dataset, DataComponent, Scalar],
748
- period_from: Optional[str],
749
- period_to: str,
750
- conf: str,
781
+ cls,
782
+ operand: Union[Dataset, DataComponent, Scalar],
783
+ period_from: Optional[str],
784
+ period_to: str,
785
+ conf: str,
751
786
  ) -> Union[Dataset, DataComponent, Scalar]:
752
787
  cls._check_params(period_from, period_to)
753
788
  if isinstance(operand, Dataset):
@@ -784,3 +819,291 @@ class Current_Date(Time):
784
819
  result = cls.validate()
785
820
  result.value = date.today().isoformat()
786
821
  return result
822
+
823
+
824
+ class SimpleBinaryTime(Operators.Binary):
825
+ @classmethod
826
+ def validate_type_compatibility(cls, left: Any, right: Any) -> bool:
827
+ if left == Date and right == TimePeriod:
828
+ return False
829
+
830
+ if left == TimePeriod and right == Date:
831
+ return False
832
+
833
+ return not (left == TimePeriod and right == Date)
834
+
835
+ @classmethod
836
+ def validate(
837
+ cls, left_operand: Union[Dataset, DataComponent, Scalar],
838
+ right_operand: Union[Dataset, DataComponent, Scalar]
839
+ ) -> Union[Dataset, DataComponent, Scalar]:
840
+ if isinstance(left_operand, Dataset) or isinstance(right_operand, Dataset):
841
+ raise SemanticError("1-1-19-8", op=cls.op, comp_type="time dataset")
842
+ if not cls.validate_type_compatibility(left_operand.data_type, right_operand.data_type):
843
+ raise SemanticError(
844
+ "1-1-1-2", type_1=left_operand.data_type, type_2=right_operand.data_type,
845
+ type_check=cls.type_to_check
846
+ )
847
+ return super().validate(left_operand, right_operand)
848
+
849
+ @classmethod
850
+ def evaluate(
851
+ cls, left_operand: Union[Dataset, DataComponent, Scalar],
852
+ right_operand: Union[Dataset, DataComponent, Scalar]
853
+ ) -> Union[Dataset, DataComponent, Scalar]:
854
+ if isinstance(left_operand, Dataset) or isinstance(right_operand, Dataset):
855
+ raise SemanticError("1-1-19-8", op=cls.op, comp_type="time dataset")
856
+ else:
857
+ cls.validate(left_operand, right_operand)
858
+ return super().evaluate(left_operand, right_operand)
859
+
860
+
861
+ class Date_Diff(SimpleBinaryTime):
862
+ op = DATEDIFF
863
+ type_to_check = TimeInterval
864
+ return_type = Integer
865
+
866
+ @classmethod
867
+ def py_op(cls, x: Any, y: Any) -> int:
868
+ if (x.count("/") >= 1) or (y.count("/") >= 1):
869
+ raise SemanticError("1-1-19-8", op=cls.op, comp_type="time dataset")
870
+
871
+ if x.count("-") == 2:
872
+ fecha1 = datetime.strptime(x, '%Y-%m-%d').date()
873
+ else:
874
+ fecha1 = TimePeriodHandler(x).end_date(as_date=True) # type: ignore[assignment]
875
+
876
+ if y.count("-") == 2:
877
+ fecha2 = datetime.strptime(y, '%Y-%m-%d').date()
878
+ else:
879
+ fecha2 = TimePeriodHandler(y).end_date(as_date=True) # type: ignore[assignment]
880
+
881
+ return abs((fecha2 - fecha1).days)
882
+
883
+
884
+ class Date_Add(Parametrized):
885
+ op = DATE_ADD
886
+
887
+ @classmethod
888
+ def validate(cls,
889
+ operand: Union[Scalar, DataComponent, Dataset],
890
+ param_list: List[Scalar]
891
+ ) -> Union[Scalar, DataComponent, Dataset]:
892
+
893
+ expected_types = [Integer, String]
894
+ for i, param in enumerate(param_list):
895
+ error = 12 if not isinstance(param, Scalar) else 13 if ( # type: ignore[redundant-expr]
896
+ param.data_type != expected_types[i]) else None
897
+ if error is not None:
898
+ raise SemanticError(f"2-1-19-{error}",
899
+ op=cls.op,
900
+ type=param.__class__.__name__ if error == 12 else
901
+ param.data_type.__name__,
902
+ name="shiftNumber" if error == 12 else "periodInd",
903
+ expected="Scalar" if error == 12 else expected_types[i].__name__
904
+ )
905
+
906
+ if (isinstance(operand, (Scalar, DataComponent)) and
907
+ operand.data_type not in [Date, TimePeriod]):
908
+ unary_implicit_promotion(operand.data_type, Date)
909
+
910
+ if isinstance(operand, Scalar):
911
+ return Scalar(name=operand.name, data_type=operand.data_type, value=None)
912
+ if isinstance(operand, DataComponent):
913
+ return DataComponent(name=operand.name, data_type=operand.data_type, data=None)
914
+
915
+ if all(comp.data_type not in [Date, TimePeriod] for comp in operand.components.values()):
916
+ raise SemanticError("2-1-19-14", op=cls.op, name=operand.name)
917
+ return Dataset(name='result', components=operand.components.copy(), data=None)
918
+
919
+ @classmethod
920
+ def evaluate(cls,
921
+ operand: Union[Scalar, DataComponent, Dataset],
922
+ param_list: List[Scalar]
923
+ ) -> Union[Scalar, DataComponent, Dataset]:
924
+ result = cls.validate(operand, param_list)
925
+ shift, period = param_list[0].value, param_list[1].value
926
+ is_tp = isinstance(operand, (Scalar, DataComponent)) and operand.data_type == TimePeriod
927
+
928
+ if isinstance(result, Scalar) and isinstance(operand, Scalar) and operand.value is not None:
929
+ result.value = cls.py_op(operand.value, shift, period, is_tp)
930
+ elif (isinstance(result, DataComponent) and isinstance(operand, DataComponent) and
931
+ operand.data is not None):
932
+ result.data = operand.data.map(lambda x: cls.py_op(x, shift, period, is_tp),
933
+ na_action="ignore")
934
+ elif (isinstance(result, Dataset) and isinstance(operand, Dataset) and
935
+ operand.data is not None):
936
+ result.data = operand.data.copy()
937
+ for measure in operand.get_measures():
938
+ if measure.data_type in [Date, TimePeriod]:
939
+ result.data[measure.name] = result.data[measure.name].map(
940
+ lambda x: cls.py_op(x, shift, period, measure.data_type == TimePeriod),
941
+ na_action="ignore")
942
+ measure.data_type = Date
943
+
944
+ if isinstance(result, (Scalar, DataComponent)):
945
+ result.data_type = Date
946
+ return result
947
+
948
+ @classmethod
949
+ def py_op(cls,
950
+ date_str: str,
951
+ shift: int, period: str,
952
+ is_tp: bool = False
953
+ ) -> str:
954
+ if is_tp:
955
+ tp_value = TimePeriodHandler(date_str)
956
+ date = period_to_date(tp_value.year, tp_value.period_indicator, tp_value.period_number)
957
+ else:
958
+ date = datetime.strptime(date_str, "%Y-%m-%d")
959
+
960
+ if period in ['D', 'W']:
961
+ days_shift = shift * (7 if period == 'W' else 1)
962
+ return (date + timedelta(days=days_shift)).strftime("%Y-%m-%d")
963
+
964
+ month_shift = {'M': 1, 'Q': 3, 'S': 6, 'A': 12}[period] * shift
965
+ new_year = date.year + (date.month - 1 + month_shift) // 12
966
+ new_month = (date.month - 1 + month_shift) % 12 + 1
967
+ last_day = (datetime(new_year, new_month % 12 + 1, 1) - timedelta(days=1)).day
968
+ return date.replace(year=new_year, month=new_month,
969
+ day=min(date.day, last_day)).strftime("%Y-%m-%d")
970
+
971
+
972
+ class SimpleUnaryTime(Operators.Unary):
973
+
974
+ @classmethod
975
+ def validate(
976
+ cls, operand: Union[Dataset, DataComponent, Scalar]
977
+ ) -> Union[Dataset, DataComponent, Scalar]:
978
+ if isinstance(operand, Dataset):
979
+ raise SemanticError("1-1-19-8", op=cls.op, comp_type="time dataset")
980
+
981
+ # Limit the operand to Date and TimePeriod (cannot be implemented with type_to_check)
982
+ if operand.data_type == TimeInterval or operand.data_type not in (Date, TimePeriod):
983
+ raise SemanticError("1-1-19-10", op=cls.op)
984
+
985
+ return super().validate(operand)
986
+
987
+ @classmethod
988
+ def evaluate(
989
+ cls, operand: Union[Dataset, DataComponent, Scalar]
990
+ ) -> Union[Dataset, DataComponent, Scalar]:
991
+ cls.validate(operand)
992
+ return super().evaluate(operand)
993
+
994
+
995
+ class Year(SimpleUnaryTime):
996
+ op = YEAR
997
+
998
+ @classmethod
999
+ def py_op(cls, value: str) -> int:
1000
+ return int(value[:4])
1001
+
1002
+ return_type = Integer
1003
+
1004
+
1005
+ class Month(SimpleUnaryTime):
1006
+ op = MONTH
1007
+ return_type = Integer
1008
+
1009
+ @classmethod
1010
+ def py_op(cls, value: str) -> int:
1011
+ if value.count("-") == 2:
1012
+ return date.fromisoformat(value).month
1013
+
1014
+ result = TimePeriodHandler(value).start_date(as_date=True)
1015
+ return result.month # type: ignore[union-attr]
1016
+
1017
+
1018
+ class Day_of_Month(SimpleUnaryTime):
1019
+ op = DAYOFMONTH
1020
+ return_type = Integer
1021
+
1022
+ @classmethod
1023
+ def py_op(cls, value: str) -> int:
1024
+ if value.count("-") == 2:
1025
+ return date.fromisoformat(value).day
1026
+
1027
+ result = TimePeriodHandler(value).end_date(as_date=True)
1028
+ return result.day # type: ignore[union-attr]
1029
+
1030
+
1031
+ class Day_of_Year(SimpleUnaryTime):
1032
+ op = DAYOFYEAR
1033
+ return_type = Integer
1034
+
1035
+ @classmethod
1036
+ def py_op(cls, value: str) -> int:
1037
+ if value.count("-") == 2:
1038
+ day_y = datetime.strptime(value, "%Y-%m-%d")
1039
+ return day_y.timetuple().tm_yday
1040
+
1041
+ result = TimePeriodHandler(value).end_date(as_date=True)
1042
+ datetime_value = datetime(
1043
+ year=result.year, month=result.month, day=result.day # type: ignore[union-attr]
1044
+ )
1045
+ return datetime_value.timetuple().tm_yday
1046
+
1047
+
1048
+ class Day_to_Year(Operators.Unary):
1049
+ op = DAYTOYEAR
1050
+ return_type = String
1051
+
1052
+ @classmethod
1053
+ def py_op(cls, value: int) -> str:
1054
+ if value < 0:
1055
+ raise SemanticError("2-1-19-17", op=cls.op)
1056
+ years = 0
1057
+ days_remaining = value
1058
+ if value >= 365:
1059
+ years = value // 365
1060
+ days_remaining = value % 365
1061
+ return f"P{int(years)}Y{int(days_remaining)}D"
1062
+
1063
+
1064
+ class Day_to_Month(Operators.Unary):
1065
+ op = DAYTOMONTH
1066
+ return_type = String
1067
+
1068
+ @classmethod
1069
+ def py_op(cls, value: int) -> str:
1070
+ if value < 0:
1071
+ raise SemanticError("2-1-19-17", op=cls.op)
1072
+ months = 0
1073
+ days_remaining = value
1074
+ if value >= 30:
1075
+ months = value // 30
1076
+ days_remaining = value % 30
1077
+ return f"P{int(months)}M{int(days_remaining)}D"
1078
+
1079
+
1080
+ class Year_to_Day(Operators.Unary):
1081
+ op = YEARTODAY
1082
+ return_type = Integer
1083
+
1084
+ @classmethod
1085
+ def py_op(cls, value: str) -> int:
1086
+ if "/" in value:
1087
+ raise SemanticError("2-1-19-11", op=cls.op)
1088
+ if "Y" not in value:
1089
+ raise SemanticError("2-1-19-15", op=cls.op)
1090
+ index_y = value.index("Y")
1091
+ years = int(value[1:index_y])
1092
+ days = int(value[(index_y + 1): -1])
1093
+ return years * 365 + days
1094
+
1095
+
1096
+ class Month_to_Day(Operators.Unary):
1097
+ op = MONTHTODAY
1098
+ return_type = Integer
1099
+
1100
+ @classmethod
1101
+ def py_op(cls, value: str) -> int:
1102
+ if "/" in value:
1103
+ raise SemanticError("2-1-19-11", op=cls.op)
1104
+ if "M" not in value:
1105
+ raise SemanticError("2-1-19-16", op=cls.op)
1106
+ index_m = value.index("M")
1107
+ months = int(value[1:index_m])
1108
+ days = int(value[(index_m + 1): -1])
1109
+ return months * 30 + days
@@ -2,12 +2,12 @@ from copy import copy
2
2
  from typing import Any, Dict, Optional
3
3
 
4
4
  import pandas as pd
5
- from vtlengine.DataTypes import Boolean, Integer, Number, String, check_unary_implicit_promotion
6
- from vtlengine.Operators import Operator
7
5
 
8
6
  from vtlengine.AST.Grammar.tokens import CHECK, CHECK_HIERARCHY
7
+ from vtlengine.DataTypes import Boolean, Integer, Number, String, check_unary_implicit_promotion
9
8
  from vtlengine.Exceptions import SemanticError
10
9
  from vtlengine.Model import Component, Dataset, Role
10
+ from vtlengine.Operators import Operator
11
11
 
12
12
 
13
13
  # noinspection PyTypeChecker
@@ -191,16 +191,13 @@ class Check_Hierarchy(Validation):
191
191
 
192
192
  @classmethod
193
193
  def _generate_result_data(cls, rule_info: Dict[str, Any]) -> pd.DataFrame:
194
- df = None
194
+ df = pd.DataFrame()
195
195
  for rule_name, rule_data in rule_info.items():
196
196
  rule_df = rule_data["output"]
197
197
  rule_df["ruleid"] = rule_name
198
198
  rule_df["errorcode"] = rule_data["errorcode"]
199
199
  rule_df["errorlevel"] = rule_data["errorlevel"]
200
- if df is None:
201
- df = rule_df
202
- else:
203
- df = pd.concat([df, rule_df], ignore_index=True)
200
+ df = pd.concat([df, rule_df], ignore_index=True)
204
201
  if df is None:
205
202
  df = pd.DataFrame()
206
203
  return df