vtlengine 1.0.3rc3__py3-none-any.whl → 1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of vtlengine might be problematic. Click here for more details.

Files changed (53) hide show
  1. vtlengine/API/_InternalApi.py +288 -61
  2. vtlengine/API/__init__.py +269 -71
  3. vtlengine/API/data/schema/json_schema_2.1.json +116 -0
  4. vtlengine/AST/ASTComment.py +56 -0
  5. vtlengine/AST/ASTConstructor.py +76 -22
  6. vtlengine/AST/ASTConstructorModules/Expr.py +238 -120
  7. vtlengine/AST/ASTConstructorModules/ExprComponents.py +126 -61
  8. vtlengine/AST/ASTConstructorModules/Terminals.py +97 -42
  9. vtlengine/AST/ASTConstructorModules/__init__.py +50 -0
  10. vtlengine/AST/ASTEncoders.py +5 -1
  11. vtlengine/AST/ASTString.py +608 -0
  12. vtlengine/AST/ASTTemplate.py +28 -2
  13. vtlengine/AST/DAG/__init__.py +10 -4
  14. vtlengine/AST/Grammar/lexer.py +0 -1
  15. vtlengine/AST/Grammar/parser.py +185 -440
  16. vtlengine/AST/VtlVisitor.py +0 -1
  17. vtlengine/AST/__init__.py +127 -14
  18. vtlengine/DataTypes/TimeHandling.py +50 -15
  19. vtlengine/DataTypes/__init__.py +79 -7
  20. vtlengine/Exceptions/__init__.py +3 -5
  21. vtlengine/Exceptions/messages.py +74 -105
  22. vtlengine/Interpreter/__init__.py +136 -46
  23. vtlengine/Model/__init__.py +14 -11
  24. vtlengine/Operators/Aggregation.py +17 -9
  25. vtlengine/Operators/Analytic.py +64 -20
  26. vtlengine/Operators/Assignment.py +0 -1
  27. vtlengine/Operators/CastOperator.py +44 -44
  28. vtlengine/Operators/Clause.py +16 -10
  29. vtlengine/Operators/Comparison.py +20 -12
  30. vtlengine/Operators/Conditional.py +47 -15
  31. vtlengine/Operators/General.py +9 -4
  32. vtlengine/Operators/HROperators.py +4 -14
  33. vtlengine/Operators/Join.py +15 -14
  34. vtlengine/Operators/Numeric.py +32 -26
  35. vtlengine/Operators/RoleSetter.py +6 -2
  36. vtlengine/Operators/Set.py +12 -8
  37. vtlengine/Operators/String.py +9 -9
  38. vtlengine/Operators/Time.py +145 -124
  39. vtlengine/Operators/Validation.py +10 -4
  40. vtlengine/Operators/__init__.py +56 -69
  41. vtlengine/Utils/__init__.py +55 -1
  42. vtlengine/__extras_check.py +17 -0
  43. vtlengine/__init__.py +2 -2
  44. vtlengine/files/output/__init__.py +2 -1
  45. vtlengine/files/output/_time_period_representation.py +2 -1
  46. vtlengine/files/parser/__init__.py +52 -46
  47. vtlengine/files/parser/_time_checking.py +4 -4
  48. {vtlengine-1.0.3rc3.dist-info → vtlengine-1.1.dist-info}/METADATA +21 -17
  49. vtlengine-1.1.dist-info/RECORD +61 -0
  50. {vtlengine-1.0.3rc3.dist-info → vtlengine-1.1.dist-info}/WHEEL +1 -1
  51. vtlengine/DataTypes/NumericTypesHandling.py +0 -38
  52. vtlengine-1.0.3rc3.dist-info/RECORD +0 -58
  53. {vtlengine-1.0.3rc3.dist-info → vtlengine-1.1.dist-info}/LICENSE.md +0 -0
@@ -6,7 +6,6 @@ from vtlengine.AST.Grammar.parser import Parser
6
6
 
7
7
 
8
8
  class VtlVisitor(ParseTreeVisitor):
9
-
10
9
  # Visit a parse tree produced by Parser#start.
11
10
  def visitStart(self, ctx: Parser.StartContext):
12
11
  return self.visitChildren(ctx)
vtlengine/AST/__init__.py CHANGED
@@ -11,7 +11,7 @@ from dataclasses import dataclass
11
11
  from typing import Any, Dict, List, Optional, Type, Union
12
12
 
13
13
  from vtlengine.DataTypes import ScalarType
14
- from vtlengine.Model import Role
14
+ from vtlengine.Model import Dataset, Role
15
15
 
16
16
 
17
17
  @dataclass
@@ -20,6 +20,11 @@ class AST:
20
20
  AST: (children)
21
21
  """
22
22
 
23
+ line_start: int
24
+ column_start: int
25
+ line_stop: int
26
+ column_stop: int
27
+
23
28
  @classmethod
24
29
  def __all_annotations(cls) -> Dict[str, Any]:
25
30
  class_attributes = {}
@@ -47,6 +52,30 @@ class AST:
47
52
 
48
53
  __repr__ = __str__
49
54
 
55
+ def ast_equality(self, other):
56
+ if not isinstance(other, self.__class__):
57
+ return False
58
+ for k in self.__all_annotations():
59
+ if (
60
+ getattr(self, k) != getattr(other, k)
61
+ and k not in AST.__annotations__
62
+ and k != "children" # We do not want to compare the children order here
63
+ ):
64
+ return False
65
+ return True
66
+
67
+ __eq__ = ast_equality
68
+
69
+
70
+ @dataclass
71
+ class Comment(AST):
72
+ """
73
+ Comment: (value)
74
+ """
75
+
76
+ value: str
77
+ __eq__ = AST.ast_equality
78
+
50
79
 
51
80
  @dataclass
52
81
  class Start(AST):
@@ -56,6 +85,8 @@ class Start(AST):
56
85
 
57
86
  children: List[AST]
58
87
 
88
+ __eq__ = AST.ast_equality
89
+
59
90
 
60
91
  @dataclass
61
92
  class Assignment(AST):
@@ -67,6 +98,8 @@ class Assignment(AST):
67
98
  op: str
68
99
  right: AST
69
100
 
101
+ __eq__ = AST.ast_equality
102
+
70
103
 
71
104
  @dataclass
72
105
  class PersistentAssignment(Assignment):
@@ -85,7 +118,9 @@ class VarID(AST):
85
118
  Could be: DATASET or a COMPONENT.
86
119
  """
87
120
 
88
- value: Any
121
+ value: str
122
+
123
+ __eq__ = AST.ast_equality
89
124
 
90
125
 
91
126
  @dataclass
@@ -100,6 +135,8 @@ class UnaryOp(AST):
100
135
  op: str
101
136
  operand: AST
102
137
 
138
+ __eq__ = AST.ast_equality
139
+
103
140
 
104
141
  @dataclass
105
142
  class BinOp(AST):
@@ -113,6 +150,8 @@ class BinOp(AST):
113
150
  op: str
114
151
  right: AST
115
152
 
153
+ __eq__ = AST.ast_equality
154
+
116
155
 
117
156
  @dataclass
118
157
  class MulOp(AST):
@@ -124,6 +163,8 @@ class MulOp(AST):
124
163
  op: str
125
164
  children: List[AST]
126
165
 
166
+ __eq__ = AST.ast_equality
167
+
127
168
 
128
169
  @dataclass
129
170
  class ParamOp(AST):
@@ -137,12 +178,16 @@ class ParamOp(AST):
137
178
  children: List[AST]
138
179
  params: List[AST]
139
180
 
181
+ __eq__ = AST.ast_equality
182
+
140
183
 
141
184
  @dataclass
142
185
  class UDOCall(AST):
143
186
  op: str
144
187
  params: List[AST]
145
188
 
189
+ __eq__ = AST.ast_equality
190
+
146
191
 
147
192
  @dataclass
148
193
  class JoinOp(AST):
@@ -156,9 +201,11 @@ class JoinOp(AST):
156
201
 
157
202
  op: str
158
203
  clauses: List[AST]
159
- using: Optional[List[AST]]
204
+ using: Optional[List[str]]
160
205
  isLast: bool = False
161
206
 
207
+ __eq__ = AST.ast_equality
208
+
162
209
 
163
210
  @dataclass
164
211
  class Constant(AST):
@@ -172,6 +219,8 @@ class Constant(AST):
172
219
  type_: str
173
220
  value: Optional[Union[str, int, float, bool]]
174
221
 
222
+ __eq__ = AST.ast_equality
223
+
175
224
 
176
225
  @dataclass
177
226
  class ParamConstant(Constant):
@@ -184,6 +233,8 @@ class ParamConstant(Constant):
184
233
  type_: str
185
234
  value: str
186
235
 
236
+ __eq__ = AST.ast_equality
237
+
187
238
 
188
239
  @dataclass
189
240
  class Identifier(AST):
@@ -194,6 +245,8 @@ class Identifier(AST):
194
245
  value: str
195
246
  kind: str
196
247
 
248
+ __eq__ = AST.ast_equality
249
+
197
250
 
198
251
  @dataclass
199
252
  class ID(AST):
@@ -206,6 +259,8 @@ class ID(AST):
206
259
  type_: str
207
260
  value: str
208
261
 
262
+ __eq__ = AST.ast_equality
263
+
209
264
 
210
265
  @dataclass
211
266
  class Collection(AST):
@@ -220,6 +275,8 @@ class Collection(AST):
220
275
  children: List[AST]
221
276
  kind: str = "Set"
222
277
 
278
+ __eq__ = AST.ast_equality
279
+
223
280
 
224
281
  @dataclass
225
282
  class Windowing(AST):
@@ -234,11 +291,13 @@ class Windowing(AST):
234
291
  """
235
292
 
236
293
  type_: str
237
- start: str
294
+ start: Union[int, str]
238
295
  start_mode: str
239
296
  stop: Union[int, str]
240
297
  stop_mode: str
241
298
 
299
+ __eq__ = AST.ast_equality
300
+
242
301
 
243
302
  @dataclass
244
303
  class OrderBy(AST):
@@ -249,6 +308,8 @@ class OrderBy(AST):
249
308
  if self.order not in ["asc", "desc"]:
250
309
  raise ValueError(f"Invalid order: {self.order}")
251
310
 
311
+ __eq__ = AST.ast_equality
312
+
252
313
 
253
314
  @dataclass
254
315
  class Analytic(AST):
@@ -274,6 +335,8 @@ class Analytic(AST):
274
335
  if self.partition_by is None and self.order_by is None:
275
336
  raise ValueError("Partition by or order by must be provided on Analytic.")
276
337
 
338
+ __eq__ = AST.ast_equality
339
+
277
340
 
278
341
  @dataclass
279
342
  class RegularAggregation(AST):
@@ -288,6 +351,8 @@ class RegularAggregation(AST):
288
351
  dataset: Optional[AST] = None
289
352
  isLast: bool = False
290
353
 
354
+ __eq__ = AST.ast_equality
355
+
291
356
 
292
357
  @dataclass
293
358
  class RenameNode(AST):
@@ -298,6 +363,8 @@ class RenameNode(AST):
298
363
  old_name: str
299
364
  new_name: str
300
365
 
366
+ __eq__ = AST.ast_equality
367
+
301
368
 
302
369
  @dataclass
303
370
  class Aggregation(AST):
@@ -316,6 +383,8 @@ class Aggregation(AST):
316
383
  grouping: Optional[List[AST]] = None
317
384
  having_clause: Optional[AST] = None
318
385
 
386
+ __eq__ = AST.ast_equality
387
+
319
388
 
320
389
  @dataclass
321
390
  class TimeAggregation(AST):
@@ -331,6 +400,8 @@ class TimeAggregation(AST):
331
400
  operand: Optional[AST] = None
332
401
  conf: Optional[str] = None
333
402
 
403
+ __eq__ = AST.ast_equality
404
+
334
405
 
335
406
  @dataclass
336
407
  class If(AST):
@@ -342,15 +413,14 @@ class If(AST):
342
413
  thenOp: AST
343
414
  elseOp: AST
344
415
 
416
+ __eq__ = AST.ast_equality
417
+
345
418
 
346
- class CaseObj:
419
+ @dataclass
420
+ class CaseObj(AST):
347
421
  condition: AST
348
422
  thenOp: AST
349
423
 
350
- def __init__(self, condition: AST, thenOp: AST):
351
- self.condition = condition
352
- self.thenOp = thenOp
353
-
354
424
 
355
425
  @dataclass
356
426
  class Case(AST):
@@ -361,6 +431,8 @@ class Case(AST):
361
431
  cases: List[CaseObj]
362
432
  elseOp: AST
363
433
 
434
+ __eq__ = AST.ast_equality
435
+
364
436
 
365
437
  @dataclass
366
438
  class Validation(AST):
@@ -375,6 +447,8 @@ class Validation(AST):
375
447
  imbalance: Optional[AST]
376
448
  invalid: bool
377
449
 
450
+ __eq__ = AST.ast_equality
451
+
378
452
 
379
453
  @dataclass
380
454
  class ComponentType(AST):
@@ -386,6 +460,8 @@ class ComponentType(AST):
386
460
  data_type: Optional[Type[ScalarType]] = None
387
461
  role: Optional[Role] = None
388
462
 
463
+ __eq__ = AST.ast_equality
464
+
389
465
 
390
466
  @dataclass
391
467
  class ASTScalarType(AST):
@@ -400,6 +476,8 @@ class DatasetType(AST):
400
476
 
401
477
  components: List[ComponentType]
402
478
 
479
+ __eq__ = AST.ast_equality
480
+
403
481
 
404
482
  @dataclass
405
483
  class Types(AST):
@@ -418,6 +496,8 @@ class Types(AST):
418
496
  nullable: Optional[bool]
419
497
  name: Optional[str] = None
420
498
 
499
+ __eq__ = AST.ast_equality
500
+
421
501
 
422
502
  @dataclass
423
503
  class Argument(AST):
@@ -429,6 +509,8 @@ class Argument(AST):
429
509
  type_: Type[ScalarType]
430
510
  default: Optional[AST]
431
511
 
512
+ __eq__ = AST.ast_equality
513
+
432
514
 
433
515
  @dataclass
434
516
  class Operator(AST):
@@ -441,6 +523,8 @@ class Operator(AST):
441
523
  output_type: str
442
524
  expression: AST
443
525
 
526
+ __eq__ = AST.ast_equality
527
+
444
528
 
445
529
  # TODO: Is this class necessary?
446
530
  @dataclass
@@ -452,6 +536,8 @@ class DefIdentifier(AST):
452
536
  value: str
453
537
  kind: str
454
538
 
539
+ __eq__ = AST.ast_equality
540
+
455
541
 
456
542
  @dataclass
457
543
  class DPRIdentifier(AST):
@@ -463,6 +549,8 @@ class DPRIdentifier(AST):
463
549
  kind: str
464
550
  alias: Optional[str] = None
465
551
 
552
+ __eq__ = AST.ast_equality
553
+
466
554
 
467
555
  # TODO: Are HRBinOp and HRUnOp necessary?
468
556
  @dataclass
@@ -476,6 +564,8 @@ class HRBinOp(AST):
476
564
  op: str
477
565
  right: DefIdentifier
478
566
 
567
+ __eq__ = AST.ast_equality
568
+
479
569
 
480
570
  @dataclass
481
571
  class HRUnOp(AST):
@@ -487,6 +577,8 @@ class HRUnOp(AST):
487
577
  op: str
488
578
  operand: DefIdentifier
489
579
 
580
+ __eq__ = AST.ast_equality
581
+
490
582
 
491
583
  # TODO: Unify HRule and DPRule?
492
584
  @dataclass
@@ -497,8 +589,10 @@ class HRule(AST):
497
589
 
498
590
  name: Optional[str]
499
591
  rule: HRBinOp
500
- erCode: Optional[Constant]
501
- erLevel: Optional[Constant]
592
+ erCode: Optional[str]
593
+ erLevel: Optional[int]
594
+
595
+ __eq__ = AST.ast_equality
502
596
 
503
597
 
504
598
  @dataclass
@@ -509,8 +603,10 @@ class DPRule(AST):
509
603
 
510
604
  name: Optional[str]
511
605
  rule: HRBinOp
512
- erCode: Optional[Constant]
513
- erLevel: Optional[Constant]
606
+ erCode: Optional[str]
607
+ erLevel: Optional[int]
608
+
609
+ __eq__ = AST.ast_equality
514
610
 
515
611
 
516
612
  # TODO: Unify HRuleset and DPRuleset?
@@ -525,6 +621,8 @@ class HRuleset(AST):
525
621
  element: DefIdentifier
526
622
  rules: List[HRule]
527
623
 
624
+ __eq__ = AST.ast_equality
625
+
528
626
 
529
627
  @dataclass
530
628
  class DPRuleset(AST):
@@ -537,6 +635,8 @@ class DPRuleset(AST):
537
635
  params: Union[DefIdentifier, list]
538
636
  rules: List[DPRule]
539
637
 
638
+ __eq__ = AST.ast_equality
639
+
540
640
 
541
641
  @dataclass
542
642
  class EvalOp(AST):
@@ -548,9 +648,11 @@ class EvalOp(AST):
548
648
 
549
649
  name: str
550
650
  operands: List[AST]
551
- output: Optional[str]
651
+ output: Optional[Dataset]
552
652
  language: Optional[str]
553
653
 
654
+ __eq__ = AST.ast_equality
655
+
554
656
 
555
657
  @dataclass
556
658
  class NoOp(AST):
@@ -559,3 +661,14 @@ class NoOp(AST):
559
661
  """
560
662
 
561
663
  pass
664
+
665
+
666
+ @dataclass
667
+ class ParFunction(AST):
668
+ """
669
+ ParFunction: (operand)
670
+ """
671
+
672
+ operand: AST
673
+
674
+ __eq__ = AST.ast_equality
@@ -9,9 +9,9 @@ import pandas as pd
9
9
 
10
10
  from vtlengine.Exceptions import SemanticError
11
11
 
12
- DURATION_MAPPING = {"A": 6, "S": 5, "Q": 4, "M": 3, "W": 2, "D": 1}
12
+ PERIOD_IND_MAPPING = {"A": 6, "S": 5, "Q": 4, "M": 3, "W": 2, "D": 1}
13
13
 
14
- DURATION_MAPPING_REVERSED = {6: "A", 5: "S", 4: "Q", 3: "M", 2: "W", 1: "D"}
14
+ PERIOD_IND_MAPPING_REVERSE = {6: "A", 5: "S", 4: "Q", 3: "M", 2: "W", 1: "D"}
15
15
 
16
16
  PERIOD_INDICATORS = ["A", "S", "Q", "M", "W", "D"]
17
17
 
@@ -32,15 +32,16 @@ def date_to_period(date_value: date, period_indicator: str) -> Any:
32
32
  return TimePeriodHandler(f"{date_value.year}D{date_value.timetuple().tm_yday}")
33
33
 
34
34
 
35
- def period_to_date(year: int,
36
- period_indicator: str,
37
- period_number: int,
38
- start: bool = False
39
- ) -> date:
35
+ def period_to_date(
36
+ year: int, period_indicator: str, period_number: int, start: bool = False
37
+ ) -> date:
40
38
  if period_indicator == "A":
41
39
  return date(year, 1, 1) if start else date(year, 12, 31)
42
40
  periods = {
43
- "S": [(date(year, 1, 1), date(year, 6, 30)), (date(year, 7, 1), date(year, 12, 31))],
41
+ "S": [
42
+ (date(year, 1, 1), date(year, 6, 30)),
43
+ (date(year, 7, 1), date(year, 12, 31)),
44
+ ],
44
45
  "Q": [
45
46
  (date(year, 1, 1), date(year, 3, 31)),
46
47
  (date(year, 4, 1), date(year, 6, 30)),
@@ -202,6 +203,10 @@ class TimePeriodHandler:
202
203
  raise SemanticError("2-1-19-2", period=value)
203
204
  self._period_indicator = value
204
205
 
206
+ @property
207
+ def period_magnitude(self) -> int:
208
+ return PERIOD_IND_MAPPING[self.period_indicator]
209
+
205
210
  @property
206
211
  def period_number(self) -> int:
207
212
  return self._period_number
@@ -229,16 +234,46 @@ class TimePeriodHandler:
229
234
  # raise ValueError(f'Invalid day {value} for year {self.year}.')
230
235
  self._period_number = value
231
236
 
237
+ @property
238
+ def period_dates(self) -> tuple[date, date]:
239
+ return (
240
+ period_to_date(self.year, self.period_indicator, self.period_number, start=True),
241
+ period_to_date(self.year, self.period_indicator, self.period_number, start=False),
242
+ )
243
+
232
244
  def _meta_comparison(self, other: Any, py_op: Any) -> Optional[bool]:
233
245
  if pd.isnull(other):
234
246
  return None
247
+
248
+ if py_op in (operator.eq, operator.ne):
249
+ return py_op(str(self), str(other))
250
+
251
+ if py_op in (operator.ge, operator.le) and str(self) == str(other):
252
+ return True
253
+
235
254
  if isinstance(other, str):
236
- if len(other) == 0:
237
- return False
238
255
  other = TimePeriodHandler(other)
239
- return py_op(
240
- DURATION_MAPPING[self.period_indicator], DURATION_MAPPING[other.period_indicator]
241
- )
256
+
257
+ self_lapse, other_lapse = self.period_dates, other.period_dates
258
+ is_lt_or_le = py_op in [operator.lt, operator.le]
259
+ is_gt_or_ge = py_op in [operator.gt, operator.ge]
260
+
261
+ if is_lt_or_le or is_gt_or_ge:
262
+ idx = 0 if is_lt_or_le else 1
263
+ if self_lapse[idx] != other_lapse[idx]:
264
+ return (
265
+ self_lapse[idx] < other_lapse[idx]
266
+ if is_lt_or_le
267
+ else self_lapse[idx] > other_lapse[idx]
268
+ )
269
+ if self.period_magnitude != other.period_magnitude:
270
+ return (
271
+ self.period_magnitude < other.period_magnitude
272
+ if is_lt_or_le
273
+ else self.period_magnitude > other.period_magnitude
274
+ )
275
+
276
+ return False
242
277
 
243
278
  def start_date(self, as_date: bool = False) -> Union[date, str]:
244
279
  """
@@ -453,7 +488,7 @@ def shift_period(x: TimePeriodHandler, shift_param: int) -> TimePeriodHandler:
453
488
  def sort_time_period(series: Any) -> Any:
454
489
  values_sorted = sorted(
455
490
  series.to_list(),
456
- key=lambda s: (s.year, DURATION_MAPPING[s.period_indicator], s.period_number),
491
+ key=lambda s: (s.year, PERIOD_IND_MAPPING[s.period_indicator], s.period_number),
457
492
  )
458
493
  return pd.Series(values_sorted, name=series.name)
459
494
 
@@ -478,7 +513,7 @@ def generate_period_range(
478
513
 
479
514
 
480
515
  def check_max_date(str_: Optional[str]) -> Optional[str]:
481
- if pd.isnull(str_) or str_ == "nan" or str_ == "NaT":
516
+ if pd.isnull(str_) or str_ == "nan" or str_ == "NaT" or str_ is None:
482
517
  return None
483
518
 
484
519
  if len(str_) == 9 and str_[7] == "-":
@@ -1,8 +1,13 @@
1
+ import re
1
2
  from typing import Any, Dict, Optional, Set, Type, Union
2
3
 
3
4
  import pandas as pd
4
5
 
5
- from vtlengine.DataTypes.TimeHandling import check_max_date, date_to_period_str, str_period_to_date
6
+ from vtlengine.DataTypes.TimeHandling import (
7
+ check_max_date,
8
+ date_to_period_str,
9
+ str_period_to_date,
10
+ )
6
11
  from vtlengine.Exceptions import SemanticError
7
12
 
8
13
  DTYPE_MAPPING: Dict[str, str] = {
@@ -397,10 +402,19 @@ class TimePeriod(TimeInterval):
397
402
 
398
403
 
399
404
  class Duration(ScalarType):
405
+ iso8601_duration_pattern = r"^P((\d+Y)?(\d+M)?(\d+D)?)$"
406
+
407
+ @classmethod
408
+ def validate_duration(cls, value: Any) -> bool:
409
+ try:
410
+ match = re.match(cls.iso8601_duration_pattern, value)
411
+ return bool(match)
412
+ except Exception:
413
+ raise Exception("Must be valid")
400
414
 
401
415
  @classmethod
402
416
  def implicit_cast(cls, value: Any, from_type: Any) -> str:
403
- if from_type in {Duration, String}:
417
+ if from_type == String and cls.validate_duration(value):
404
418
  return value
405
419
 
406
420
  raise SemanticError(
@@ -412,7 +426,7 @@ class Duration(ScalarType):
412
426
 
413
427
  @classmethod
414
428
  def explicit_cast(cls, value: Any, from_type: Any) -> Any:
415
- if from_type == String:
429
+ if from_type == String and cls.validate_duration(value):
416
430
  return value
417
431
 
418
432
  raise SemanticError(
@@ -422,6 +436,31 @@ class Duration(ScalarType):
422
436
  type_2=SCALAR_TYPES_CLASS_REVERSE[cls],
423
437
  )
424
438
 
439
+ @classmethod
440
+ def to_days(cls, value: Any) -> int:
441
+ if not cls.validate_duration(value):
442
+ raise SemanticError(
443
+ "2-1-19-15", "{op} can only be applied according to the iso 8601 format mask"
444
+ )
445
+
446
+ match = re.match(cls.iso8601_duration_pattern, value)
447
+
448
+ years = 0
449
+ months = 0
450
+ days = 0
451
+
452
+ years_str = match.group(2) # type: ignore[union-attr]
453
+ months_str = match.group(3) # type: ignore[union-attr]
454
+ days_str = match.group(4) # type: ignore[union-attr]
455
+ if years_str:
456
+ years = int(years_str[:-1])
457
+ if months_str:
458
+ months = int(months_str[:-1])
459
+ if days_str:
460
+ days = int(days_str[:-1])
461
+ total_days = years * 365 + months * 30 + days
462
+ return int(total_days)
463
+
425
464
 
426
465
  class Boolean(ScalarType):
427
466
  """ """
@@ -549,7 +588,17 @@ IMPLICIT_TYPE_PROMOTION_MAPPING: Dict[Type[ScalarType], Any] = {
549
588
  TimePeriod: {TimeInterval, TimePeriod},
550
589
  Duration: {Duration},
551
590
  Boolean: {String, Boolean},
552
- Null: {String, Number, Integer, TimeInterval, Date, TimePeriod, Duration, Boolean, Null},
591
+ Null: {
592
+ String,
593
+ Number,
594
+ Integer,
595
+ TimeInterval,
596
+ Date,
597
+ TimePeriod,
598
+ Duration,
599
+ Boolean,
600
+ Null,
601
+ },
553
602
  }
554
603
 
555
604
  # TODO: Implicit are valid as cast without mask
@@ -564,7 +613,17 @@ EXPLICIT_WITHOUT_MASK_TYPE_PROMOTION_MAPPING: Dict[Type[ScalarType], Any] = {
564
613
  TimePeriod: {TimePeriod, String},
565
614
  Duration: {Duration, String},
566
615
  Boolean: {Integer, Number, String, Boolean},
567
- Null: {String, Number, Integer, TimeInterval, Date, TimePeriod, Duration, Boolean, Null},
616
+ Null: {
617
+ String,
618
+ Number,
619
+ Integer,
620
+ TimeInterval,
621
+ Date,
622
+ TimePeriod,
623
+ Duration,
624
+ Boolean,
625
+ Null,
626
+ },
568
627
  }
569
628
 
570
629
  EXPLICIT_WITH_MASK_TYPE_PROMOTION_MAPPING: Dict[Type[ScalarType], Any] = {
@@ -576,7 +635,17 @@ EXPLICIT_WITH_MASK_TYPE_PROMOTION_MAPPING: Dict[Type[ScalarType], Any] = {
576
635
  TimePeriod: {Date},
577
636
  Duration: {String},
578
637
  Boolean: {},
579
- Null: {String, Number, Integer, TimeInterval, Date, TimePeriod, Duration, Boolean, Null},
638
+ Null: {
639
+ String,
640
+ Number,
641
+ Integer,
642
+ TimeInterval,
643
+ Date,
644
+ TimePeriod,
645
+ Duration,
646
+ Boolean,
647
+ Null,
648
+ },
580
649
  }
581
650
 
582
651
 
@@ -634,7 +703,10 @@ def binary_implicit_promotion(
634
703
 
635
704
 
636
705
  def check_binary_implicit_promotion(
637
- left: Type[ScalarType], right: Any, type_to_check: Any = None, return_type: Any = None
706
+ left: Type[ScalarType],
707
+ right: Any,
708
+ type_to_check: Any = None,
709
+ return_type: Any = None,
638
710
  ) -> bool:
639
711
  """
640
712
  Validates the compatibility between the types of the operands and the operator
@@ -124,7 +124,7 @@ class InputValidationException(VTLEngineException):
124
124
  lino: Optional[str] = None,
125
125
  colno: Optional[str] = None,
126
126
  code: Optional[str] = None,
127
- **kwargs: Any
127
+ **kwargs: Any,
128
128
  ) -> None:
129
129
  if code is not None:
130
130
  message = centralised_messages[code].format(**kwargs)
@@ -143,7 +143,7 @@ def check_key(field: str, dict_keys: Any, key: str) -> None:
143
143
  def find_closest_key(dict_keys: Any, key: str) -> Optional[str]:
144
144
  closest_key = None
145
145
  max_distance = 3
146
- min_distance = float('inf')
146
+ min_distance = float("inf")
147
147
 
148
148
  for dict_key in dict_keys:
149
149
  distance = key_distance(key, dict_key)
@@ -167,8 +167,6 @@ def key_distance(key: str, objetive: str) -> int:
167
167
  for i in range(1, len(key) + 1):
168
168
  for j in range(1, len(objetive) + 1):
169
169
  cost = 0 if key[i - 1] == objetive[j - 1] else 1
170
- dp[i][j] = min(dp[i - 1][j] + 1,
171
- dp[i][j - 1] + 1,
172
- dp[i - 1][j - 1] + cost)
170
+ dp[i][j] = min(dp[i - 1][j] + 1, dp[i][j - 1] + 1, dp[i - 1][j - 1] + cost)
173
171
 
174
172
  return dp[-1][-1]