vtlengine 1.0.3rc2__py3-none-any.whl → 1.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of vtlengine might be problematic. Click here for more details.

Files changed (47) hide show
  1. vtlengine/API/_InternalApi.py +55 -20
  2. vtlengine/API/__init__.py +11 -2
  3. vtlengine/API/data/schema/json_schema_2.1.json +116 -0
  4. vtlengine/AST/ASTConstructor.py +5 -4
  5. vtlengine/AST/ASTConstructorModules/Expr.py +47 -48
  6. vtlengine/AST/ASTConstructorModules/ExprComponents.py +45 -23
  7. vtlengine/AST/ASTConstructorModules/Terminals.py +21 -11
  8. vtlengine/AST/ASTEncoders.py +1 -1
  9. vtlengine/AST/DAG/__init__.py +0 -3
  10. vtlengine/AST/Grammar/lexer.py +0 -1
  11. vtlengine/AST/Grammar/parser.py +185 -440
  12. vtlengine/AST/VtlVisitor.py +0 -1
  13. vtlengine/DataTypes/TimeHandling.py +50 -15
  14. vtlengine/DataTypes/__init__.py +79 -7
  15. vtlengine/Exceptions/__init__.py +3 -5
  16. vtlengine/Exceptions/messages.py +65 -105
  17. vtlengine/Interpreter/__init__.py +83 -38
  18. vtlengine/Model/__init__.py +7 -9
  19. vtlengine/Operators/Aggregation.py +13 -7
  20. vtlengine/Operators/Analytic.py +48 -9
  21. vtlengine/Operators/Assignment.py +0 -1
  22. vtlengine/Operators/CastOperator.py +44 -44
  23. vtlengine/Operators/Clause.py +16 -10
  24. vtlengine/Operators/Comparison.py +20 -12
  25. vtlengine/Operators/Conditional.py +30 -13
  26. vtlengine/Operators/General.py +9 -4
  27. vtlengine/Operators/HROperators.py +4 -14
  28. vtlengine/Operators/Join.py +15 -14
  29. vtlengine/Operators/Numeric.py +32 -26
  30. vtlengine/Operators/RoleSetter.py +6 -2
  31. vtlengine/Operators/Set.py +12 -8
  32. vtlengine/Operators/String.py +9 -9
  33. vtlengine/Operators/Time.py +136 -116
  34. vtlengine/Operators/Validation.py +10 -4
  35. vtlengine/Operators/__init__.py +56 -69
  36. vtlengine/Utils/__init__.py +6 -1
  37. vtlengine/files/output/__init__.py +0 -1
  38. vtlengine/files/output/_time_period_representation.py +2 -1
  39. vtlengine/files/parser/__init__.py +44 -10
  40. vtlengine/files/parser/_rfc_dialect.py +1 -1
  41. vtlengine/files/parser/_time_checking.py +4 -4
  42. {vtlengine-1.0.3rc2.dist-info → vtlengine-1.0.4.dist-info}/METADATA +9 -7
  43. vtlengine-1.0.4.dist-info/RECORD +58 -0
  44. {vtlengine-1.0.3rc2.dist-info → vtlengine-1.0.4.dist-info}/WHEEL +1 -1
  45. vtlengine/DataTypes/NumericTypesHandling.py +0 -38
  46. vtlengine-1.0.3rc2.dist-info/RECORD +0 -58
  47. {vtlengine-1.0.3rc2.dist-info → vtlengine-1.0.4.dist-info}/LICENSE.md +0 -0
@@ -33,7 +33,7 @@ from vtlengine.DataTypes import (
33
33
  unary_implicit_promotion,
34
34
  )
35
35
  from vtlengine.DataTypes.TimeHandling import (
36
- DURATION_MAPPING,
36
+ PERIOD_IND_MAPPING,
37
37
  TimePeriodHandler,
38
38
  date_to_period,
39
39
  period_to_date,
@@ -98,7 +98,8 @@ class Time(Operators.Operator):
98
98
  months_deltas = differences.apply(lambda x: x.days // 30)
99
99
  days_deltas = differences.apply(lambda x: x.days)
100
100
  min_months = min(
101
- (diff for diff in months_deltas if diff > 0 and diff % 12 != 0), default=None
101
+ (diff for diff in months_deltas if diff > 0 and diff % 12 != 0),
102
+ default=None,
102
103
  )
103
104
  min_days = min(
104
105
  (diff for diff in days_deltas if diff > 0 and diff % 365 != 0 and diff % 366 != 0),
@@ -118,7 +119,6 @@ class Time(Operators.Operator):
118
119
 
119
120
 
120
121
  class Unary(Time):
121
-
122
122
  @classmethod
123
123
  def validate(cls, operand: Any) -> Any:
124
124
  if not isinstance(operand, Dataset):
@@ -190,7 +190,10 @@ class Period_indicator(Unary):
190
190
  if comp.role == Role.IDENTIFIER
191
191
  }
192
192
  result_components["duration_var"] = Component(
193
- name="duration_var", data_type=Duration, role=Role.MEASURE, nullable=True
193
+ name="duration_var",
194
+ data_type=Duration,
195
+ role=Role.MEASURE,
196
+ nullable=True,
194
197
  )
195
198
  return Dataset(name="result", components=result_components, data=None)
196
199
  # DataComponent and Scalar validation
@@ -202,7 +205,7 @@ class Period_indicator(Unary):
202
205
 
203
206
  @classmethod
204
207
  def evaluate(
205
- cls, operand: Union[Dataset, DataComponent, Scalar, str]
208
+ cls, operand: Union[Dataset, DataComponent, Scalar, str]
206
209
  ) -> Union[Dataset, DataComponent, Scalar, str]:
207
210
  result = cls.validate(operand)
208
211
  if isinstance(operand, str):
@@ -226,7 +229,6 @@ class Period_indicator(Unary):
226
229
 
227
230
 
228
231
  class Parametrized(Time):
229
-
230
232
  @classmethod
231
233
  def validate(cls, operand: Any, param: Any) -> Any:
232
234
  pass
@@ -237,14 +239,12 @@ class Parametrized(Time):
237
239
 
238
240
 
239
241
  class Flow_to_stock(Unary):
240
-
241
242
  @classmethod
242
243
  def py_op(cls, x: Any) -> Any:
243
244
  return x.cumsum().fillna(x)
244
245
 
245
246
 
246
247
  class Stock_to_flow(Unary):
247
-
248
248
  @classmethod
249
249
  def py_op(cls, x: Any) -> Any:
250
250
  return x.diff().fillna(x)
@@ -299,7 +299,6 @@ class Fill_time_series(Binary):
299
299
 
300
300
  @classmethod
301
301
  def max_min_from_period(cls, data: pd.DataFrame, mode: str = "all") -> Dict[str, Any]:
302
-
303
302
  result_dict: Dict[Any, Any] = {}
304
303
  data = data.assign(
305
304
  Periods_col=data[cls.time_id].apply(cls._get_period),
@@ -369,7 +368,10 @@ class Fill_time_series(Binary):
369
368
  else:
370
369
  if period in period_limits["min"] and period in period_limits["max"]:
371
370
  vals = list(
372
- range(period_limits["min"][period], period_limits["max"][period] + 1)
371
+ range(
372
+ period_limits["min"][period],
373
+ period_limits["max"][period] + 1,
374
+ )
373
375
  )
374
376
  filled_data.extend(
375
377
  cls.fill_periods_rows(group_df, period, years, vals=vals)
@@ -385,7 +387,11 @@ class Fill_time_series(Binary):
385
387
 
386
388
  @classmethod
387
389
  def fill_periods_rows(
388
- cls, group_df: Any, period: str, years: List[int], vals: Optional[List[int]] = None
390
+ cls,
391
+ group_df: Any,
392
+ period: str,
393
+ years: List[int],
394
+ vals: Optional[List[int]] = None,
389
395
  ) -> List[Any]:
390
396
  rows = []
391
397
  for year in years:
@@ -398,7 +404,7 @@ class Fill_time_series(Binary):
398
404
 
399
405
  @classmethod
400
406
  def create_period_row(
401
- cls, group_df: Any, period: str, year: int, val: Optional[int] = None
407
+ cls, group_df: Any, period: str, year: int, val: Optional[int] = None
402
408
  ) -> Any:
403
409
  row = group_df.iloc[0].copy()
404
410
  row[cls.time_id] = f"{year}" if period == "A" else f"{year}-{period}{val:d}"
@@ -436,9 +442,7 @@ class Fill_time_series(Binary):
436
442
  date_format = None
437
443
  filled_data = []
438
444
 
439
- def create_filled_dates(
440
- group: Any, min_max: Dict[str, Any]
441
- ) -> (pd.DataFrame, str): # type: ignore[syntax]
445
+ def create_filled_dates(group: Any, min_max: Dict[str, Any]) -> (pd.DataFrame, str): # type: ignore[syntax]
442
446
  date_range = pd.date_range(start=min_max["min"], end=min_max["max"], freq=min_frequency)
443
447
  date_df = pd.DataFrame(date_range, columns=[cls.time_id])
444
448
  date_df[cls.other_ids] = group.iloc[0][cls.other_ids]
@@ -480,7 +484,7 @@ class Fill_time_series(Binary):
480
484
 
481
485
  @classmethod
482
486
  def fill_time_intervals(
483
- cls, data: pd.DataFrame, fill_type: str, frequency: str
487
+ cls, data: pd.DataFrame, fill_type: str, frequency: str
484
488
  ) -> pd.DataFrame:
485
489
  result_data = cls.time_filler(data, fill_type, frequency)
486
490
  not_na = result_data[cls.measures].notna().any(axis=1)
@@ -588,7 +592,7 @@ class Time_Shift(Binary):
588
592
 
589
593
  @classmethod
590
594
  def shift_period(
591
- cls, period_str: str, shift_value: int, frequency: Optional[int] = None
595
+ cls, period_str: str, shift_value: int, frequency: Optional[int] = None
592
596
  ) -> str:
593
597
  period_type = cls._get_period(period_str)
594
598
 
@@ -628,7 +632,7 @@ class Time_Aggregation(Time):
628
632
 
629
633
  @classmethod
630
634
  def _check_duration(cls, value: str) -> None:
631
- if value not in DURATION_MAPPING:
635
+ if value not in PERIOD_IND_MAPPING:
632
636
  raise SemanticError("1-1-19-3", op=cls.op, param="duration")
633
637
 
634
638
  @classmethod
@@ -636,13 +640,13 @@ class Time_Aggregation(Time):
636
640
  cls._check_duration(period_to)
637
641
  if period_from is not None:
638
642
  cls._check_duration(period_from)
639
- if DURATION_MAPPING[period_to] <= DURATION_MAPPING[period_from]:
643
+ if PERIOD_IND_MAPPING[period_to] <= PERIOD_IND_MAPPING[period_from]:
640
644
  # OPERATORS_TIMEOPERATORS.19
641
645
  raise SemanticError("1-1-19-4", op=cls.op, value_1=period_from, value_2=period_to)
642
646
 
643
647
  @classmethod
644
648
  def dataset_validation(
645
- cls, operand: Dataset, period_from: Optional[str], period_to: str, conf: str
649
+ cls, operand: Dataset, period_from: Optional[str], period_to: str, conf: str
646
650
  ) -> Dataset:
647
651
  # TODO: Review with VTL TF as this makes no sense
648
652
 
@@ -661,7 +665,10 @@ class Time_Aggregation(Time):
661
665
  count_time_types += 1
662
666
  if count_time_types != 1:
663
667
  raise SemanticError(
664
- "1-1-19-9", op=cls.op, comp_type="dataset", param="single time identifier"
668
+ "1-1-19-9",
669
+ op=cls.op,
670
+ comp_type="dataset",
671
+ param="single time identifier",
665
672
  )
666
673
 
667
674
  if count_time_types != 1:
@@ -679,7 +686,11 @@ class Time_Aggregation(Time):
679
686
 
680
687
  @classmethod
681
688
  def component_validation(
682
- cls, operand: DataComponent, period_from: Optional[str], period_to: str, conf: str
689
+ cls,
690
+ operand: DataComponent,
691
+ period_from: Optional[str],
692
+ period_to: str,
693
+ conf: str,
683
694
  ) -> DataComponent:
684
695
  if operand.data_type not in cls.TIME_DATA_TYPES:
685
696
  raise SemanticError("1-1-19-8", op=cls.op, comp_type="time component")
@@ -692,7 +703,7 @@ class Time_Aggregation(Time):
692
703
 
693
704
  @classmethod
694
705
  def scalar_validation(
695
- cls, operand: Scalar, period_from: Optional[str], period_to: str, conf: str
706
+ cls, operand: Scalar, period_from: Optional[str], period_to: str, conf: str
696
707
  ) -> Scalar:
697
708
  if operand.data_type not in cls.TIME_DATA_TYPES:
698
709
  raise SemanticError("1-1-19-8", op=cls.op, comp_type="time scalar")
@@ -701,12 +712,12 @@ class Time_Aggregation(Time):
701
712
 
702
713
  @classmethod
703
714
  def _execute_time_aggregation(
704
- cls,
705
- value: str,
706
- data_type: Type[ScalarType],
707
- period_from: Optional[str],
708
- period_to: str,
709
- conf: str,
715
+ cls,
716
+ value: str,
717
+ data_type: Type[ScalarType],
718
+ period_from: Optional[str],
719
+ period_to: str,
720
+ conf: str,
710
721
  ) -> str:
711
722
  if data_type == TimePeriod: # Time period
712
723
  return _time_period_access(value, period_to)
@@ -722,7 +733,7 @@ class Time_Aggregation(Time):
722
733
 
723
734
  @classmethod
724
735
  def dataset_evaluation(
725
- cls, operand: Dataset, period_from: Optional[str], period_to: str, conf: str
736
+ cls, operand: Dataset, period_from: Optional[str], period_to: str, conf: str
726
737
  ) -> Dataset:
727
738
  result = cls.dataset_validation(operand, period_from, period_to, conf)
728
739
  result.data = operand.data.copy() if operand.data is not None else pd.DataFrame()
@@ -738,7 +749,11 @@ class Time_Aggregation(Time):
738
749
 
739
750
  @classmethod
740
751
  def component_evaluation(
741
- cls, operand: DataComponent, period_from: Optional[str], period_to: str, conf: str
752
+ cls,
753
+ operand: DataComponent,
754
+ period_from: Optional[str],
755
+ period_to: str,
756
+ conf: str,
742
757
  ) -> DataComponent:
743
758
  result = cls.component_validation(operand, period_from, period_to, conf)
744
759
  if operand.data is not None:
@@ -752,7 +767,7 @@ class Time_Aggregation(Time):
752
767
 
753
768
  @classmethod
754
769
  def scalar_evaluation(
755
- cls, operand: Scalar, period_from: Optional[str], period_to: str, conf: str
770
+ cls, operand: Scalar, period_from: Optional[str], period_to: str, conf: str
756
771
  ) -> Scalar:
757
772
  result = cls.scalar_validation(operand, period_from, period_to, conf)
758
773
  result.value = cls._execute_time_aggregation(
@@ -762,11 +777,11 @@ class Time_Aggregation(Time):
762
777
 
763
778
  @classmethod
764
779
  def validate(
765
- cls,
766
- operand: Union[Dataset, DataComponent, Scalar],
767
- period_from: Optional[str],
768
- period_to: str,
769
- conf: str,
780
+ cls,
781
+ operand: Union[Dataset, DataComponent, Scalar],
782
+ period_from: Optional[str],
783
+ period_to: str,
784
+ conf: str,
770
785
  ) -> Union[Dataset, DataComponent, Scalar]:
771
786
  cls._check_params(period_from, period_to)
772
787
  if isinstance(operand, Dataset):
@@ -778,11 +793,11 @@ class Time_Aggregation(Time):
778
793
 
779
794
  @classmethod
780
795
  def evaluate(
781
- cls,
782
- operand: Union[Dataset, DataComponent, Scalar],
783
- period_from: Optional[str],
784
- period_to: str,
785
- conf: str,
796
+ cls,
797
+ operand: Union[Dataset, DataComponent, Scalar],
798
+ period_from: Optional[str],
799
+ period_to: str,
800
+ conf: str,
786
801
  ) -> Union[Dataset, DataComponent, Scalar]:
787
802
  cls._check_params(period_from, period_to)
788
803
  if isinstance(operand, Dataset):
@@ -809,7 +824,6 @@ def _date_access(v: str, to_param: str, start: bool) -> Any:
809
824
 
810
825
 
811
826
  class Current_Date(Time):
812
-
813
827
  @classmethod
814
828
  def validate(cls) -> Scalar:
815
829
  return Scalar(name="current_date", data_type=Date, value=None)
@@ -830,26 +844,30 @@ class SimpleBinaryTime(Operators.Binary):
830
844
  if left == TimePeriod and right == Date:
831
845
  return False
832
846
 
833
- return not (left == TimePeriod and right == Date)
847
+ return not (left == TimePeriod and right == Date)
834
848
 
835
849
  @classmethod
836
850
  def validate(
837
- cls, left_operand: Union[Dataset, DataComponent, Scalar],
838
- right_operand: Union[Dataset, DataComponent, Scalar]
851
+ cls,
852
+ left_operand: Union[Dataset, DataComponent, Scalar],
853
+ right_operand: Union[Dataset, DataComponent, Scalar],
839
854
  ) -> Union[Dataset, DataComponent, Scalar]:
840
855
  if isinstance(left_operand, Dataset) or isinstance(right_operand, Dataset):
841
856
  raise SemanticError("1-1-19-8", op=cls.op, comp_type="time dataset")
842
857
  if not cls.validate_type_compatibility(left_operand.data_type, right_operand.data_type):
843
858
  raise SemanticError(
844
- "1-1-1-2", type_1=left_operand.data_type, type_2=right_operand.data_type,
845
- type_check=cls.type_to_check
859
+ "1-1-1-2",
860
+ type_1=left_operand.data_type,
861
+ type_2=right_operand.data_type,
862
+ type_check=cls.type_to_check,
846
863
  )
847
864
  return super().validate(left_operand, right_operand)
848
865
 
849
866
  @classmethod
850
867
  def evaluate(
851
- cls, left_operand: Union[Dataset, DataComponent, Scalar],
852
- right_operand: Union[Dataset, DataComponent, Scalar]
868
+ cls,
869
+ left_operand: Union[Dataset, DataComponent, Scalar],
870
+ right_operand: Union[Dataset, DataComponent, Scalar],
853
871
  ) -> Union[Dataset, DataComponent, Scalar]:
854
872
  if isinstance(left_operand, Dataset) or isinstance(right_operand, Dataset):
855
873
  raise SemanticError("1-1-19-8", op=cls.op, comp_type="time dataset")
@@ -869,12 +887,12 @@ class Date_Diff(SimpleBinaryTime):
869
887
  raise SemanticError("1-1-19-8", op=cls.op, comp_type="time dataset")
870
888
 
871
889
  if x.count("-") == 2:
872
- fecha1 = datetime.strptime(x, '%Y-%m-%d').date()
890
+ fecha1 = datetime.strptime(x, "%Y-%m-%d").date()
873
891
  else:
874
892
  fecha1 = TimePeriodHandler(x).end_date(as_date=True) # type: ignore[assignment]
875
893
 
876
894
  if y.count("-") == 2:
877
- fecha2 = datetime.strptime(y, '%Y-%m-%d').date()
895
+ fecha2 = datetime.strptime(y, "%Y-%m-%d").date()
878
896
  else:
879
897
  fecha2 = TimePeriodHandler(y).end_date(as_date=True) # type: ignore[assignment]
880
898
 
@@ -885,26 +903,31 @@ class Date_Add(Parametrized):
885
903
  op = DATE_ADD
886
904
 
887
905
  @classmethod
888
- def validate(cls,
889
- operand: Union[Scalar, DataComponent, Dataset],
890
- param_list: List[Scalar]
891
- ) -> Union[Scalar, DataComponent, Dataset]:
892
-
906
+ def validate(
907
+ cls, operand: Union[Scalar, DataComponent, Dataset], param_list: List[Scalar]
908
+ ) -> Union[Scalar, DataComponent, Dataset]:
893
909
  expected_types = [Integer, String]
894
910
  for i, param in enumerate(param_list):
895
- error = 12 if not isinstance(param, Scalar) else 13 if ( # type: ignore[redundant-expr]
896
- param.data_type != expected_types[i]) else None
911
+ error = (
912
+ 12
913
+ if not isinstance(param, Scalar) # type: ignore[redundant-expr]
914
+ else 13
915
+ if (param.data_type != expected_types[i])
916
+ else None
917
+ )
897
918
  if error is not None:
898
- raise SemanticError(f"2-1-19-{error}",
899
- op=cls.op,
900
- type=param.__class__.__name__ if error == 12 else
901
- param.data_type.__name__,
902
- name="shiftNumber" if error == 12 else "periodInd",
903
- expected="Scalar" if error == 12 else expected_types[i].__name__
904
- )
905
-
906
- if (isinstance(operand, (Scalar, DataComponent)) and
907
- operand.data_type not in [Date, TimePeriod]):
919
+ raise SemanticError(
920
+ f"2-1-19-{error}",
921
+ op=cls.op,
922
+ type=(param.__class__.__name__ if error == 12 else param.data_type.__name__),
923
+ name="shiftNumber" if error == 12 else "periodInd",
924
+ expected="Scalar" if error == 12 else expected_types[i].__name__,
925
+ )
926
+
927
+ if isinstance(operand, (Scalar, DataComponent)) and operand.data_type not in [
928
+ Date,
929
+ TimePeriod,
930
+ ]:
908
931
  unary_implicit_promotion(operand.data_type, Date)
909
932
 
910
933
  if isinstance(operand, Scalar):
@@ -914,31 +937,38 @@ class Date_Add(Parametrized):
914
937
 
915
938
  if all(comp.data_type not in [Date, TimePeriod] for comp in operand.components.values()):
916
939
  raise SemanticError("2-1-19-14", op=cls.op, name=operand.name)
917
- return Dataset(name='result', components=operand.components.copy(), data=None)
940
+ return Dataset(name="result", components=operand.components.copy(), data=None)
918
941
 
919
942
  @classmethod
920
- def evaluate(cls,
921
- operand: Union[Scalar, DataComponent, Dataset],
922
- param_list: List[Scalar]
923
- ) -> Union[Scalar, DataComponent, Dataset]:
943
+ def evaluate(
944
+ cls, operand: Union[Scalar, DataComponent, Dataset], param_list: List[Scalar]
945
+ ) -> Union[Scalar, DataComponent, Dataset]:
924
946
  result = cls.validate(operand, param_list)
925
947
  shift, period = param_list[0].value, param_list[1].value
926
948
  is_tp = isinstance(operand, (Scalar, DataComponent)) and operand.data_type == TimePeriod
927
949
 
928
950
  if isinstance(result, Scalar) and isinstance(operand, Scalar) and operand.value is not None:
929
951
  result.value = cls.py_op(operand.value, shift, period, is_tp)
930
- elif (isinstance(result, DataComponent) and isinstance(operand, DataComponent) and
931
- operand.data is not None):
932
- result.data = operand.data.map(lambda x: cls.py_op(x, shift, period, is_tp),
933
- na_action="ignore")
934
- elif (isinstance(result, Dataset) and isinstance(operand, Dataset) and
935
- operand.data is not None):
952
+ elif (
953
+ isinstance(result, DataComponent)
954
+ and isinstance(operand, DataComponent)
955
+ and operand.data is not None
956
+ ):
957
+ result.data = operand.data.map(
958
+ lambda x: cls.py_op(x, shift, period, is_tp), na_action="ignore"
959
+ )
960
+ elif (
961
+ isinstance(result, Dataset)
962
+ and isinstance(operand, Dataset)
963
+ and operand.data is not None
964
+ ):
936
965
  result.data = operand.data.copy()
937
966
  for measure in operand.get_measures():
938
967
  if measure.data_type in [Date, TimePeriod]:
939
968
  result.data[measure.name] = result.data[measure.name].map(
940
969
  lambda x: cls.py_op(x, shift, period, measure.data_type == TimePeriod),
941
- na_action="ignore")
970
+ na_action="ignore",
971
+ )
942
972
  measure.data_type = Date
943
973
 
944
974
  if isinstance(result, (Scalar, DataComponent)):
@@ -946,47 +976,47 @@ class Date_Add(Parametrized):
946
976
  return result
947
977
 
948
978
  @classmethod
949
- def py_op(cls,
950
- date_str: str,
951
- shift: int, period: str,
952
- is_tp: bool = False
953
- ) -> str:
979
+ def py_op(cls, date_str: str, shift: int, period: str, is_tp: bool = False) -> str:
954
980
  if is_tp:
955
981
  tp_value = TimePeriodHandler(date_str)
956
982
  date = period_to_date(tp_value.year, tp_value.period_indicator, tp_value.period_number)
957
983
  else:
958
984
  date = datetime.strptime(date_str, "%Y-%m-%d")
959
985
 
960
- if period in ['D', 'W']:
961
- days_shift = shift * (7 if period == 'W' else 1)
986
+ if period in ["D", "W"]:
987
+ days_shift = shift * (7 if period == "W" else 1)
962
988
  return (date + timedelta(days=days_shift)).strftime("%Y-%m-%d")
963
989
 
964
- month_shift = {'M': 1, 'Q': 3, 'S': 6, 'A': 12}[period] * shift
990
+ month_shift = {"M": 1, "Q": 3, "S": 6, "A": 12}[period] * shift
965
991
  new_year = date.year + (date.month - 1 + month_shift) // 12
966
992
  new_month = (date.month - 1 + month_shift) % 12 + 1
967
993
  last_day = (datetime(new_year, new_month % 12 + 1, 1) - timedelta(days=1)).day
968
- return date.replace(year=new_year, month=new_month,
969
- day=min(date.day, last_day)).strftime("%Y-%m-%d")
994
+ return date.replace(year=new_year, month=new_month, day=min(date.day, last_day)).strftime(
995
+ "%Y-%m-%d"
996
+ )
970
997
 
971
998
 
972
999
  class SimpleUnaryTime(Operators.Unary):
973
-
974
1000
  @classmethod
975
1001
  def validate(
976
- cls, operand: Union[Dataset, DataComponent, Scalar]
1002
+ cls, operand: Union[Dataset, DataComponent, Scalar]
977
1003
  ) -> Union[Dataset, DataComponent, Scalar]:
978
1004
  if isinstance(operand, Dataset):
979
1005
  raise SemanticError("1-1-19-8", op=cls.op, comp_type="time dataset")
980
1006
 
981
1007
  # Limit the operand to Date and TimePeriod (cannot be implemented with type_to_check)
982
- if operand.data_type == TimeInterval or operand.data_type not in (Date, TimePeriod):
1008
+ if operand.data_type == TimeInterval or operand.data_type not in (
1009
+ Date,
1010
+ TimePeriod,
1011
+ Duration,
1012
+ ):
983
1013
  raise SemanticError("1-1-19-10", op=cls.op)
984
1014
 
985
1015
  return super().validate(operand)
986
1016
 
987
1017
  @classmethod
988
1018
  def evaluate(
989
- cls, operand: Union[Dataset, DataComponent, Scalar]
1019
+ cls, operand: Union[Dataset, DataComponent, Scalar]
990
1020
  ) -> Union[Dataset, DataComponent, Scalar]:
991
1021
  cls.validate(operand)
992
1022
  return super().evaluate(operand)
@@ -1040,19 +1070,21 @@ class Day_of_Year(SimpleUnaryTime):
1040
1070
 
1041
1071
  result = TimePeriodHandler(value).end_date(as_date=True)
1042
1072
  datetime_value = datetime(
1043
- year=result.year, month=result.month, day=result.day # type: ignore[union-attr]
1073
+ year=result.year, # type: ignore[union-attr]
1074
+ month=result.month, # type: ignore[union-attr]
1075
+ day=result.day, # type: ignore[union-attr]
1044
1076
  )
1045
1077
  return datetime_value.timetuple().tm_yday
1046
1078
 
1047
1079
 
1048
1080
  class Day_to_Year(Operators.Unary):
1049
1081
  op = DAYTOYEAR
1050
- return_type = String
1082
+ return_type = Duration
1051
1083
 
1052
1084
  @classmethod
1053
1085
  def py_op(cls, value: int) -> str:
1054
1086
  if value < 0:
1055
- raise SemanticError("2-1-19-17", op=cls.op)
1087
+ raise SemanticError("2-1-19-16", op=cls.op)
1056
1088
  years = 0
1057
1089
  days_remaining = value
1058
1090
  if value >= 365:
@@ -1063,12 +1095,12 @@ class Day_to_Year(Operators.Unary):
1063
1095
 
1064
1096
  class Day_to_Month(Operators.Unary):
1065
1097
  op = DAYTOMONTH
1066
- return_type = String
1098
+ return_type = Duration
1067
1099
 
1068
1100
  @classmethod
1069
1101
  def py_op(cls, value: int) -> str:
1070
1102
  if value < 0:
1071
- raise SemanticError("2-1-19-17", op=cls.op)
1103
+ raise SemanticError("2-1-19-16", op=cls.op)
1072
1104
  months = 0
1073
1105
  days_remaining = value
1074
1106
  if value >= 30:
@@ -1083,14 +1115,8 @@ class Year_to_Day(Operators.Unary):
1083
1115
 
1084
1116
  @classmethod
1085
1117
  def py_op(cls, value: str) -> int:
1086
- if "/" in value:
1087
- raise SemanticError("2-1-19-11", op=cls.op)
1088
- if "Y" not in value:
1089
- raise SemanticError("2-1-19-15", op=cls.op)
1090
- index_y = value.index("Y")
1091
- years = int(value[1:index_y])
1092
- days = int(value[(index_y + 1): -1])
1093
- return years * 365 + days
1118
+ days = Duration.to_days(value)
1119
+ return days
1094
1120
 
1095
1121
 
1096
1122
  class Month_to_Day(Operators.Unary):
@@ -1099,11 +1125,5 @@ class Month_to_Day(Operators.Unary):
1099
1125
 
1100
1126
  @classmethod
1101
1127
  def py_op(cls, value: str) -> int:
1102
- if "/" in value:
1103
- raise SemanticError("2-1-19-11", op=cls.op)
1104
- if "M" not in value:
1105
- raise SemanticError("2-1-19-16", op=cls.op)
1106
- index_m = value.index("M")
1107
- months = int(value[1:index_m])
1108
- days = int(value[(index_m + 1): -1])
1109
- return months * 30 + days
1128
+ days = Duration.to_days(value)
1129
+ return days
@@ -4,7 +4,13 @@ from typing import Any, Dict, Optional
4
4
  import pandas as pd
5
5
 
6
6
  from vtlengine.AST.Grammar.tokens import CHECK, CHECK_HIERARCHY
7
- from vtlengine.DataTypes import Boolean, Integer, Number, String, check_unary_implicit_promotion
7
+ from vtlengine.DataTypes import (
8
+ Boolean,
9
+ Integer,
10
+ Number,
11
+ String,
12
+ check_unary_implicit_promotion,
13
+ )
8
14
  from vtlengine.Exceptions import SemanticError
9
15
  from vtlengine.Model import Component, Dataset, Role
10
16
  from vtlengine.Operators import Operator
@@ -103,7 +109,6 @@ class Check(Operator):
103
109
 
104
110
  # noinspection PyTypeChecker
105
111
  class Validation(Operator):
106
-
107
112
  @classmethod
108
113
  def _generate_result_data(cls, rule_info: Dict[str, Any]) -> pd.DataFrame:
109
114
  rule_list_df = []
@@ -171,7 +176,6 @@ class Validation(Operator):
171
176
  elif output == "all":
172
177
  result.data = result.data[result.get_identifiers_names() + validation_measures]
173
178
  else: # output == 'all_measures'
174
-
175
179
  result.data = result.data[
176
180
  result.get_identifiers_names()
177
181
  + dataset_element.get_measures_names()
@@ -230,7 +234,9 @@ class Check_Hierarchy(Validation):
230
234
  )
231
235
  if dataset.components[component_name].role != Role.IDENTIFIER:
232
236
  raise SemanticError(
233
- "1-3-20", name=component_name, role=dataset.components[component_name].role.value
237
+ "1-3-20",
238
+ name=component_name,
239
+ role=dataset.components[component_name].role.value,
234
240
  )
235
241
  # Remove attributes from dataset
236
242
  if len(dataset.get_attributes()) > 0: