pytrilogy 0.0.2.49__py3-none-any.whl → 0.0.2.50__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pytrilogy might be problematic. Click here for more details.
- {pytrilogy-0.0.2.49.dist-info → pytrilogy-0.0.2.50.dist-info}/METADATA +1 -1
- {pytrilogy-0.0.2.49.dist-info → pytrilogy-0.0.2.50.dist-info}/RECORD +38 -38
- trilogy/__init__.py +1 -1
- trilogy/core/enums.py +11 -0
- trilogy/core/functions.py +4 -1
- trilogy/core/models.py +11 -0
- trilogy/core/processing/concept_strategies_v3.py +0 -3
- trilogy/core/processing/node_generators/common.py +0 -2
- trilogy/core/processing/node_generators/filter_node.py +0 -3
- trilogy/core/processing/node_generators/group_node.py +0 -1
- trilogy/core/processing/node_generators/group_to_node.py +0 -2
- trilogy/core/processing/node_generators/multiselect_node.py +0 -2
- trilogy/core/processing/node_generators/node_merge_node.py +0 -1
- trilogy/core/processing/node_generators/rowset_node.py +0 -1
- trilogy/core/processing/node_generators/select_merge_node.py +138 -59
- trilogy/core/processing/node_generators/union_node.py +0 -1
- trilogy/core/processing/node_generators/unnest_node.py +0 -2
- trilogy/core/processing/node_generators/window_node.py +0 -2
- trilogy/core/processing/nodes/base_node.py +0 -3
- trilogy/core/processing/nodes/filter_node.py +0 -3
- trilogy/core/processing/nodes/group_node.py +0 -3
- trilogy/core/processing/nodes/merge_node.py +0 -3
- trilogy/core/processing/nodes/select_node_v2.py +0 -4
- trilogy/core/processing/nodes/union_node.py +0 -3
- trilogy/core/processing/nodes/unnest_node.py +0 -3
- trilogy/core/processing/nodes/window_node.py +0 -3
- trilogy/core/processing/utility.py +3 -0
- trilogy/core/query_processor.py +0 -1
- trilogy/dialect/base.py +14 -2
- trilogy/dialect/duckdb.py +7 -0
- trilogy/hooks/graph_hook.py +14 -0
- trilogy/parsing/common.py +14 -5
- trilogy/parsing/parse_engine.py +31 -0
- trilogy/parsing/trilogy.lark +3 -1
- {pytrilogy-0.0.2.49.dist-info → pytrilogy-0.0.2.50.dist-info}/LICENSE.md +0 -0
- {pytrilogy-0.0.2.49.dist-info → pytrilogy-0.0.2.50.dist-info}/WHEEL +0 -0
- {pytrilogy-0.0.2.49.dist-info → pytrilogy-0.0.2.50.dist-info}/entry_points.txt +0 -0
- {pytrilogy-0.0.2.49.dist-info → pytrilogy-0.0.2.50.dist-info}/top_level.txt +0 -0
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
trilogy/__init__.py,sha256=
|
|
1
|
+
trilogy/__init__.py,sha256=IN9QOl63ICFDvmAtr97Xo-KPeIdHaN9JuYuIIsxV0Mk,291
|
|
2
2
|
trilogy/compiler.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
3
|
trilogy/constants.py,sha256=qZ1d0hoKPPV2HHCoFwPYTVB7b6bXjpWvXd3lE-zEhy8,1494
|
|
4
4
|
trilogy/engine.py,sha256=yOPnR7XCjWG82Gym_LLZBkYKKJdLCvqdCyt8zguNcnM,1103
|
|
@@ -8,55 +8,55 @@ trilogy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
8
8
|
trilogy/utility.py,sha256=eguES83XhmSOAQSBu5xq4aAXimiZFrxcUu81zDL22ug,707
|
|
9
9
|
trilogy/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
10
|
trilogy/core/constants.py,sha256=7XaCpZn5mQmjTobbeBn56SzPWq9eMNDfzfsRU-fP0VE,171
|
|
11
|
-
trilogy/core/enums.py,sha256=
|
|
11
|
+
trilogy/core/enums.py,sha256=6pGjEXNJPB1ngbDQRJjxRi4NmKM8NZQ5-iwnZhrdo5U,7281
|
|
12
12
|
trilogy/core/env_processor.py,sha256=Pt4lmJfbShBbeSe5M7_FrTk5krrOziiAA__Slnettvc,2585
|
|
13
13
|
trilogy/core/environment_helpers.py,sha256=CSmQyEXE6EZ4XFYuQQITUHuWXxXGo9AL4UsTnu0404A,7159
|
|
14
14
|
trilogy/core/ergonomics.py,sha256=ASLDd0RqKWrZiG3XcKHo8nyTjaB_8xfE9t4NZ1UvGpc,1639
|
|
15
15
|
trilogy/core/exceptions.py,sha256=1c1lQCwSw4_5CQS3q7scOkXU8GQvullJXfPHubprl90,617
|
|
16
|
-
trilogy/core/functions.py,sha256=
|
|
16
|
+
trilogy/core/functions.py,sha256=hDlwLxQUskT9iRcIic1lfACQnxMLNM5ASdHRPi0ghyw,10835
|
|
17
17
|
trilogy/core/graph_models.py,sha256=mameUTiuCajtihDw_2-W218xyJlvTusOWrEKP1yAWgk,2003
|
|
18
18
|
trilogy/core/internal.py,sha256=-CykZknaWieFh5begaQJ4EgGP9qJccGg4XXdmBirxEc,1074
|
|
19
|
-
trilogy/core/models.py,sha256=
|
|
19
|
+
trilogy/core/models.py,sha256=WE75DKSyqx-hFUxIuc4oPlq1VV4eyN-pLeHuKfhlsWc,166832
|
|
20
20
|
trilogy/core/optimization.py,sha256=Jy3tVJNeqhpK6VSyTvgIWKCao6y-VCZ7mYA69MIF6L0,7989
|
|
21
|
-
trilogy/core/query_processor.py,sha256=
|
|
21
|
+
trilogy/core/query_processor.py,sha256=V-TqybYO0kCY8O7Nk58OBhb7_eRPs_EqAwaQv-EYLSY,18615
|
|
22
22
|
trilogy/core/optimizations/__init__.py,sha256=EBanqTXEzf1ZEYjAneIWoIcxtMDite5-n2dQ5xcfUtg,356
|
|
23
23
|
trilogy/core/optimizations/base_optimization.py,sha256=P4kF-eCXkBxO-5c6tLHhMZ4ODRH1A04hb_6ovkaVyLw,505
|
|
24
24
|
trilogy/core/optimizations/inline_constant.py,sha256=c-YHOg6eAufL4EaCf4-0PbY_D4skBHW0ldR55_phsMA,1277
|
|
25
25
|
trilogy/core/optimizations/inline_datasource.py,sha256=LsngRKBy-LYcx1sfo1-rnDym_ly73YV9WkEngSjpFx8,3943
|
|
26
26
|
trilogy/core/optimizations/predicate_pushdown.py,sha256=XPWEBv8jXnc0OL2JDPNwFvJ5AtOE7dLzJK0LzdmdZMo,9252
|
|
27
27
|
trilogy/core/processing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
28
|
-
trilogy/core/processing/concept_strategies_v3.py,sha256=
|
|
28
|
+
trilogy/core/processing/concept_strategies_v3.py,sha256=Uxi9OMg52OLrYNW76SAYXIvnI9UFAFauao34ZO7uD3o,37053
|
|
29
29
|
trilogy/core/processing/graph_utils.py,sha256=stbYnDxnK-1kbo9L4XNU85FQhWCP-oZYO7LCXhAdC5M,1198
|
|
30
|
-
trilogy/core/processing/utility.py,sha256=
|
|
30
|
+
trilogy/core/processing/utility.py,sha256=STqSHP8fWTVmaIUCfHAb9Hke_fzOG2pTbmWIdYS4cvc,18787
|
|
31
31
|
trilogy/core/processing/node_generators/__init__.py,sha256=s_YV1OYc336DuS9591259qjI_K_CtOCuhkf4t2aOgYs,733
|
|
32
32
|
trilogy/core/processing/node_generators/basic_node.py,sha256=VqVyb4wXI_B2OmfwtpsypimzcevoPe_pnstlKLU3S5s,2878
|
|
33
|
-
trilogy/core/processing/node_generators/common.py,sha256=
|
|
34
|
-
trilogy/core/processing/node_generators/filter_node.py,sha256=
|
|
35
|
-
trilogy/core/processing/node_generators/group_node.py,sha256=
|
|
36
|
-
trilogy/core/processing/node_generators/group_to_node.py,sha256=
|
|
37
|
-
trilogy/core/processing/node_generators/multiselect_node.py,sha256=
|
|
38
|
-
trilogy/core/processing/node_generators/node_merge_node.py,sha256=
|
|
39
|
-
trilogy/core/processing/node_generators/rowset_node.py,sha256=
|
|
40
|
-
trilogy/core/processing/node_generators/select_merge_node.py,sha256=
|
|
33
|
+
trilogy/core/processing/node_generators/common.py,sha256=4rFEBWUpZ01WcdQEi_8fa9QbyxHIiQ781W4nuhVLaxc,8881
|
|
34
|
+
trilogy/core/processing/node_generators/filter_node.py,sha256=2ucE8shC3gyKjPilkfkA0FW0ZHhfcGy2wBQfquuHNlM,7548
|
|
35
|
+
trilogy/core/processing/node_generators/group_node.py,sha256=dD2qlFzLRxYni9_1fHbewoe8AzKExyrDJRfeTwy7XQ4,4863
|
|
36
|
+
trilogy/core/processing/node_generators/group_to_node.py,sha256=8ToptIWQoJttquEPrRTMvU33jCJQI-VJxVObN8W8QJk,2511
|
|
37
|
+
trilogy/core/processing/node_generators/multiselect_node.py,sha256=jOaSOX80tprgYAG9XQ8lL93lt8QbdoAgFhTqF6lgdPY,6484
|
|
38
|
+
trilogy/core/processing/node_generators/node_merge_node.py,sha256=p0NrUxXVsQSoFuP2JCmNZg4muaQV7TXwW11ECEe7pjA,13988
|
|
39
|
+
trilogy/core/processing/node_generators/rowset_node.py,sha256=a9FxQb1qobeewrRNd-X_fFJ_aRLP1WcytK5-Bgu6wjs,5284
|
|
40
|
+
trilogy/core/processing/node_generators/select_merge_node.py,sha256=7jp8byYFoeglBinXtQLKrGoJ6VEQPDCR4R7lMbIh634,15288
|
|
41
41
|
trilogy/core/processing/node_generators/select_node.py,sha256=bjTylBa-vYbmzpuSpphmIo_Oi78YZpI8ppHnN9KDYDk,1795
|
|
42
|
-
trilogy/core/processing/node_generators/union_node.py,sha256=
|
|
43
|
-
trilogy/core/processing/node_generators/unnest_node.py,sha256=
|
|
44
|
-
trilogy/core/processing/node_generators/window_node.py,sha256=
|
|
42
|
+
trilogy/core/processing/node_generators/union_node.py,sha256=MfJjF2m0ARl0oUH9QT1awzPv0e3yA3mXK1XqAvUTgKw,2504
|
|
43
|
+
trilogy/core/processing/node_generators/unnest_node.py,sha256=8El2B1mzC9vIUSk-m94xHvaJwAf5GtCAGfTxGDSiqmU,2229
|
|
44
|
+
trilogy/core/processing/node_generators/window_node.py,sha256=5htRRxaxw6EnS-2TVoQIiy4bkNSoBefBpj2DVBtBo-w,3484
|
|
45
45
|
trilogy/core/processing/nodes/__init__.py,sha256=WNUmYmZF3uqF2qiJ1L7y0u9qiVD9YnluKds0wA5opJE,4813
|
|
46
|
-
trilogy/core/processing/nodes/base_node.py,sha256=
|
|
47
|
-
trilogy/core/processing/nodes/filter_node.py,sha256=
|
|
48
|
-
trilogy/core/processing/nodes/group_node.py,sha256=
|
|
49
|
-
trilogy/core/processing/nodes/merge_node.py,sha256=
|
|
50
|
-
trilogy/core/processing/nodes/select_node_v2.py,sha256=
|
|
51
|
-
trilogy/core/processing/nodes/union_node.py,sha256=
|
|
52
|
-
trilogy/core/processing/nodes/unnest_node.py,sha256=
|
|
53
|
-
trilogy/core/processing/nodes/window_node.py,sha256=
|
|
46
|
+
trilogy/core/processing/nodes/base_node.py,sha256=izspnhnzyGKF1KuUpAQmZByxE85bPrrrUj3Q18EDwxM,16515
|
|
47
|
+
trilogy/core/processing/nodes/filter_node.py,sha256=j7icDAXJ7oFPkHTOQVmm9QbZxrhhYEUGJj2lSiguXKA,2292
|
|
48
|
+
trilogy/core/processing/nodes/group_node.py,sha256=g67rsj5DK9-fna0ppVpfuTkwaQzwhKwAur4HNnpNwgU,7438
|
|
49
|
+
trilogy/core/processing/nodes/merge_node.py,sha256=eiqGEvO8UgN-YJ7mlkNlodR4vAGsUJ7d5y2hytvcgDU,14866
|
|
50
|
+
trilogy/core/processing/nodes/select_node_v2.py,sha256=t3ln9Kxeml8mVTnLgtNPvavb5TLTRtfkJ0nyxh7UYUs,8212
|
|
51
|
+
trilogy/core/processing/nodes/union_node.py,sha256=1QgOWkjJ-ADFdanoRzi0EM5buhuzJbmlda9BAUGp4mM,1352
|
|
52
|
+
trilogy/core/processing/nodes/unnest_node.py,sha256=0TFANwqVPaVpUR6SF5uweGTlXfEnagXRBBZU6dUwtcY,2101
|
|
53
|
+
trilogy/core/processing/nodes/window_node.py,sha256=yYwWuOq1Uwm-xEl8lFH_urm-YXaAGAgNhE20MEoD5QQ,1163
|
|
54
54
|
trilogy/dialect/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
55
|
-
trilogy/dialect/base.py,sha256=
|
|
55
|
+
trilogy/dialect/base.py,sha256=DR7cHoL5rbRBnsj6PCq5wK8GHH-l5szpKXUaxMqx1Mw,38568
|
|
56
56
|
trilogy/dialect/bigquery.py,sha256=mKC3zoEU232h9RtIXJjqiZ72lWH8a6S28p6wAZKrAfg,2952
|
|
57
57
|
trilogy/dialect/common.py,sha256=b0E6JqdKaaSzThLiFa9jwUg4YnXahf-3bqmzOn5z-6E,3827
|
|
58
58
|
trilogy/dialect/config.py,sha256=UiBY2tBbNk9owx-zxP_3lN9lErEUXhXIU_bcXA18AvU,2992
|
|
59
|
-
trilogy/dialect/duckdb.py,sha256=
|
|
59
|
+
trilogy/dialect/duckdb.py,sha256=O-2k0zaJKnr_McdU6iqBHcufCtHwsIKanAnpBD5o33A,3685
|
|
60
60
|
trilogy/dialect/enums.py,sha256=iaghGgOl6zRr4RxRn4TxRnxZU9iSYJG6hN5wqYiBRNQ,3948
|
|
61
61
|
trilogy/dialect/postgres.py,sha256=VH4EB4myjIeZTHeFU6vK00GxY9c53rCBjg2mLbdaCEE,3254
|
|
62
62
|
trilogy/dialect/presto.py,sha256=y2BMOXvpKh1_cXnpGhG0sjhGP-pNVLkf760Hz_pNw_s,3386
|
|
@@ -64,22 +64,22 @@ trilogy/dialect/snowflake.py,sha256=wmao9p26jX5yIX5SC8sRAZTXkPGTvq6ixO693QTfhz8,
|
|
|
64
64
|
trilogy/dialect/sql_server.py,sha256=7iFpo2xztQ4ZJVwJ5n8kntWreymRzz035iClGZp3Nyc,3117
|
|
65
65
|
trilogy/hooks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
66
66
|
trilogy/hooks/base_hook.py,sha256=gD6_sjzTzchpLIn3CvJzkM9IvaWNfQUra3yDh9-s8qQ,1125
|
|
67
|
-
trilogy/hooks/graph_hook.py,sha256=
|
|
67
|
+
trilogy/hooks/graph_hook.py,sha256=6YAHlVnak03JtWEox2oAroMdRdN7ETUN7SqwQXhdTdE,2870
|
|
68
68
|
trilogy/hooks/query_debugger.py,sha256=FoDh2bu2NiwLusVhKa5El_l8EKaqfET7zn55GP0TkOE,4644
|
|
69
69
|
trilogy/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
70
70
|
trilogy/parsing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
71
|
-
trilogy/parsing/common.py,sha256=
|
|
71
|
+
trilogy/parsing/common.py,sha256=Cm8GPznLphvc44K9EVpo_iVBRtWuH0zFMa8ekYfmVBg,10887
|
|
72
72
|
trilogy/parsing/config.py,sha256=Z-DaefdKhPDmSXLgg5V4pebhSB0h590vI0_VtHnlukI,111
|
|
73
73
|
trilogy/parsing/exceptions.py,sha256=92E5i2frv5hj9wxObJZsZqj5T6bglvPzvdvco_vW1Zk,38
|
|
74
74
|
trilogy/parsing/helpers.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
75
|
-
trilogy/parsing/parse_engine.py,sha256=
|
|
75
|
+
trilogy/parsing/parse_engine.py,sha256=BLO79wqweLptVq66VKAHjF68ggE_gd4XFtsWv9HTtGU,68671
|
|
76
76
|
trilogy/parsing/render.py,sha256=Dr0QKIaAUi9uxfZZJVNV-todKoTA-tsWXNXPJ4Ohjn0,15650
|
|
77
|
-
trilogy/parsing/trilogy.lark,sha256=
|
|
77
|
+
trilogy/parsing/trilogy.lark,sha256=EazfEvYPuvkPkNjUnVzFi0uD9baavugbSI8CyfawShk,12573
|
|
78
78
|
trilogy/scripts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
79
79
|
trilogy/scripts/trilogy.py,sha256=DQDW81E5mDMWFP8oPw8q-IyrR2JGxQSDWgUWe2VTSRQ,3731
|
|
80
|
-
pytrilogy-0.0.2.
|
|
81
|
-
pytrilogy-0.0.2.
|
|
82
|
-
pytrilogy-0.0.2.
|
|
83
|
-
pytrilogy-0.0.2.
|
|
84
|
-
pytrilogy-0.0.2.
|
|
85
|
-
pytrilogy-0.0.2.
|
|
80
|
+
pytrilogy-0.0.2.50.dist-info/LICENSE.md,sha256=5ZRvtTyCCFwz1THxDTjAu3Lidds9WjPvvzgVwPSYNDo,1042
|
|
81
|
+
pytrilogy-0.0.2.50.dist-info/METADATA,sha256=uqgnhuCqk3kTbmMRDbHps-acUl4qTZoGJUMDLhALK4w,8426
|
|
82
|
+
pytrilogy-0.0.2.50.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
|
83
|
+
pytrilogy-0.0.2.50.dist-info/entry_points.txt,sha256=0petKryjvvtEfTlbZC1AuMFumH_WQ9v8A19LvoS6G6c,54
|
|
84
|
+
pytrilogy-0.0.2.50.dist-info/top_level.txt,sha256=cAy__NW_eMAa_yT9UnUNlZLFfxcg6eimUAZ184cdNiE,8
|
|
85
|
+
pytrilogy-0.0.2.50.dist-info/RECORD,,
|
trilogy/__init__.py
CHANGED
trilogy/core/enums.py
CHANGED
|
@@ -120,6 +120,8 @@ class FunctionType(Enum):
|
|
|
120
120
|
|
|
121
121
|
ALIAS = "alias"
|
|
122
122
|
|
|
123
|
+
PARENTHETICAL = "parenthetical"
|
|
124
|
+
|
|
123
125
|
# Generic
|
|
124
126
|
CASE = "case"
|
|
125
127
|
CAST = "cast"
|
|
@@ -135,6 +137,8 @@ class FunctionType(Enum):
|
|
|
135
137
|
ATTR_ACCESS = "attr_access"
|
|
136
138
|
STRUCT = "struct"
|
|
137
139
|
ARRAY = "array"
|
|
140
|
+
DATE_LITERAL = "date_literal"
|
|
141
|
+
DATETIME_LITERAL = "datetime_literal"
|
|
138
142
|
|
|
139
143
|
# TEXT AND MAYBE MORE
|
|
140
144
|
SPLIT = "split"
|
|
@@ -260,6 +264,13 @@ class ComparisonOperator(Enum):
|
|
|
260
264
|
CONTAINS = "contains"
|
|
261
265
|
ELSE = "else"
|
|
262
266
|
|
|
267
|
+
def __eq__(self, other):
|
|
268
|
+
if isinstance(other, str):
|
|
269
|
+
return self.value == other
|
|
270
|
+
if not isinstance(other, ComparisonOperator):
|
|
271
|
+
return False
|
|
272
|
+
return self.value == other.value
|
|
273
|
+
|
|
263
274
|
@classmethod
|
|
264
275
|
def _missing_(cls, value):
|
|
265
276
|
if not isinstance(value, list) and " " in str(value):
|
trilogy/core/functions.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from datetime import date, datetime
|
|
1
2
|
from typing import Optional
|
|
2
3
|
|
|
3
4
|
from trilogy.constants import MagicConstants
|
|
@@ -17,6 +18,8 @@ from trilogy.core.models import (
|
|
|
17
18
|
arg_to_datatype,
|
|
18
19
|
)
|
|
19
20
|
|
|
21
|
+
GENERIC_ARGS = Concept | Function | str | int | float | date | datetime
|
|
22
|
+
|
|
20
23
|
|
|
21
24
|
def create_function_derived_concept(
|
|
22
25
|
name: str,
|
|
@@ -262,7 +265,7 @@ def get_attr_datatype(
|
|
|
262
265
|
return arg.datatype
|
|
263
266
|
|
|
264
267
|
|
|
265
|
-
def AttrAccess(args: list[
|
|
268
|
+
def AttrAccess(args: list[GENERIC_ARGS]):
|
|
266
269
|
return Function(
|
|
267
270
|
operator=FunctionType.ATTR_ACCESS,
|
|
268
271
|
arguments=args,
|
trilogy/core/models.py
CHANGED
|
@@ -5,6 +5,7 @@ import hashlib
|
|
|
5
5
|
import os
|
|
6
6
|
from abc import ABC
|
|
7
7
|
from collections import UserDict, UserList, defaultdict
|
|
8
|
+
from datetime import date, datetime
|
|
8
9
|
from enum import Enum
|
|
9
10
|
from functools import cached_property
|
|
10
11
|
from pathlib import Path
|
|
@@ -1264,6 +1265,8 @@ class Function(Mergeable, Namespaced, SelectContext, BaseModel):
|
|
|
1264
1265
|
int,
|
|
1265
1266
|
float,
|
|
1266
1267
|
str,
|
|
1268
|
+
date,
|
|
1269
|
+
datetime,
|
|
1267
1270
|
MapWrapper[Any, Any],
|
|
1268
1271
|
DataType,
|
|
1269
1272
|
ListType,
|
|
@@ -3868,6 +3871,8 @@ class Comparison(
|
|
|
3868
3871
|
float,
|
|
3869
3872
|
list,
|
|
3870
3873
|
bool,
|
|
3874
|
+
datetime,
|
|
3875
|
+
date,
|
|
3871
3876
|
Function,
|
|
3872
3877
|
Concept,
|
|
3873
3878
|
"Conditional",
|
|
@@ -3884,6 +3889,8 @@ class Comparison(
|
|
|
3884
3889
|
float,
|
|
3885
3890
|
list,
|
|
3886
3891
|
bool,
|
|
3892
|
+
date,
|
|
3893
|
+
datetime,
|
|
3887
3894
|
Concept,
|
|
3888
3895
|
Function,
|
|
3889
3896
|
"Conditional",
|
|
@@ -5008,5 +5015,9 @@ def arg_to_datatype(arg) -> DataType | ListType | StructType | MapType | Numeric
|
|
|
5008
5015
|
return ListType(type=wrapper.type)
|
|
5009
5016
|
elif isinstance(arg, MapWrapper):
|
|
5010
5017
|
return MapType(key_type=arg.key_type, value_type=arg.value_type)
|
|
5018
|
+
elif isinstance(arg, datetime):
|
|
5019
|
+
return DataType.DATETIME
|
|
5020
|
+
elif isinstance(arg, date):
|
|
5021
|
+
return DataType.DATE
|
|
5011
5022
|
else:
|
|
5012
5023
|
raise ValueError(f"Cannot parse arg datatype for arg of raw type {type(arg)}")
|
|
@@ -359,7 +359,6 @@ def generate_node(
|
|
|
359
359
|
input_concepts=[],
|
|
360
360
|
output_concepts=constant_targets,
|
|
361
361
|
environment=environment,
|
|
362
|
-
g=g,
|
|
363
362
|
parents=[],
|
|
364
363
|
depth=depth + 1,
|
|
365
364
|
)
|
|
@@ -906,7 +905,6 @@ def _search_concepts(
|
|
|
906
905
|
input_concepts=non_virtual,
|
|
907
906
|
output_concepts=non_virtual,
|
|
908
907
|
environment=environment,
|
|
909
|
-
g=g,
|
|
910
908
|
parents=stack,
|
|
911
909
|
depth=depth,
|
|
912
910
|
)
|
|
@@ -987,7 +985,6 @@ def source_query_concepts(
|
|
|
987
985
|
x for x in root.output_concepts if x.address not in root.hidden_concepts
|
|
988
986
|
],
|
|
989
987
|
environment=environment,
|
|
990
|
-
g=g,
|
|
991
988
|
parents=[root],
|
|
992
989
|
partial_concepts=root.partial_concepts,
|
|
993
990
|
)
|
|
@@ -130,7 +130,6 @@ def gen_property_enrichment_node(
|
|
|
130
130
|
),
|
|
131
131
|
output_concepts=base_node.output_concepts + extra_properties,
|
|
132
132
|
environment=environment,
|
|
133
|
-
g=g,
|
|
134
133
|
parents=[
|
|
135
134
|
base_node,
|
|
136
135
|
]
|
|
@@ -209,7 +208,6 @@ def gen_enrichment_node(
|
|
|
209
208
|
input_concepts=unique(join_keys + extra_required + non_hidden, "address"),
|
|
210
209
|
output_concepts=unique(join_keys + extra_required + non_hidden, "address"),
|
|
211
210
|
environment=environment,
|
|
212
|
-
g=g,
|
|
213
211
|
parents=[enrich_node, base_node],
|
|
214
212
|
force_group=False,
|
|
215
213
|
preexisting_conditions=conditions.conditional if conditions else None,
|
|
@@ -117,7 +117,6 @@ def gen_filter_node(
|
|
|
117
117
|
input_concepts=row_parent.output_concepts,
|
|
118
118
|
output_concepts=[concept] + row_parent.output_concepts,
|
|
119
119
|
environment=row_parent.environment,
|
|
120
|
-
g=row_parent.g,
|
|
121
120
|
parents=[row_parent],
|
|
122
121
|
depth=row_parent.depth,
|
|
123
122
|
partial_concepts=row_parent.partial_concepts,
|
|
@@ -161,7 +160,6 @@ def gen_filter_node(
|
|
|
161
160
|
),
|
|
162
161
|
output_concepts=[concept, immediate_parent] + parent_row_concepts,
|
|
163
162
|
environment=environment,
|
|
164
|
-
g=g,
|
|
165
163
|
parents=core_parents,
|
|
166
164
|
grain=Grain(
|
|
167
165
|
components=[immediate_parent] + parent_row_concepts,
|
|
@@ -202,7 +200,6 @@ def gen_filter_node(
|
|
|
202
200
|
]
|
|
203
201
|
+ local_optional,
|
|
204
202
|
environment=environment,
|
|
205
|
-
g=g,
|
|
206
203
|
parents=[
|
|
207
204
|
# this node fetches only what we need to filter
|
|
208
205
|
filter_node,
|
|
@@ -45,7 +45,6 @@ def gen_group_to_node(
|
|
|
45
45
|
output_concepts=parent_concepts + [concept],
|
|
46
46
|
input_concepts=parent_concepts,
|
|
47
47
|
environment=environment,
|
|
48
|
-
g=g,
|
|
49
48
|
parents=parents,
|
|
50
49
|
depth=depth,
|
|
51
50
|
)
|
|
@@ -76,7 +75,6 @@ def gen_group_to_node(
|
|
|
76
75
|
+ [x for x in parent_concepts if x.address != concept.address],
|
|
77
76
|
output_concepts=[concept] + local_optional,
|
|
78
77
|
environment=environment,
|
|
79
|
-
g=g,
|
|
80
78
|
parents=[
|
|
81
79
|
# this node gets the group
|
|
82
80
|
group_node,
|
|
@@ -108,7 +108,6 @@ def gen_multiselect_node(
|
|
|
108
108
|
input_concepts=[x for y in base_parents for x in y.output_concepts],
|
|
109
109
|
output_concepts=[x for y in base_parents for x in y.output_concepts],
|
|
110
110
|
environment=environment,
|
|
111
|
-
g=g,
|
|
112
111
|
depth=depth,
|
|
113
112
|
parents=base_parents,
|
|
114
113
|
node_joins=node_joins,
|
|
@@ -178,7 +177,6 @@ def gen_multiselect_node(
|
|
|
178
177
|
input_concepts=enrich_node.output_concepts + node.output_concepts,
|
|
179
178
|
output_concepts=node.output_concepts + local_optional,
|
|
180
179
|
environment=environment,
|
|
181
|
-
g=g,
|
|
182
180
|
depth=depth,
|
|
183
181
|
parents=[
|
|
184
182
|
# this node gets the multiselect
|
|
@@ -13,6 +13,9 @@ from trilogy.core.models import (
|
|
|
13
13
|
LooseConceptList,
|
|
14
14
|
WhereClause,
|
|
15
15
|
)
|
|
16
|
+
from trilogy.core.processing.node_generators.select_helpers.datasource_injection import (
|
|
17
|
+
get_union_sources,
|
|
18
|
+
)
|
|
16
19
|
from trilogy.core.processing.nodes import (
|
|
17
20
|
ConstantNode,
|
|
18
21
|
GroupNode,
|
|
@@ -35,38 +38,66 @@ def extract_address(node: str):
|
|
|
35
38
|
def get_graph_partial_nodes(
|
|
36
39
|
g: nx.DiGraph, conditions: WhereClause | None
|
|
37
40
|
) -> dict[str, list[str]]:
|
|
38
|
-
datasources: dict[str, Datasource] = nx.get_node_attributes(
|
|
41
|
+
datasources: dict[str, Datasource | list[Datasource]] = nx.get_node_attributes(
|
|
42
|
+
g, "datasource"
|
|
43
|
+
)
|
|
39
44
|
partial: dict[str, list[str]] = {}
|
|
40
45
|
for node in g.nodes:
|
|
41
46
|
if node in datasources:
|
|
42
47
|
ds = datasources[node]
|
|
43
|
-
|
|
44
|
-
|
|
48
|
+
if not isinstance(ds, list):
|
|
49
|
+
if ds.non_partial_for and conditions == ds.non_partial_for:
|
|
50
|
+
partial[node] = []
|
|
51
|
+
continue
|
|
52
|
+
partial[node] = [concept_to_node(c) for c in ds.partial_concepts]
|
|
53
|
+
ds = [ds]
|
|
54
|
+
# assume union sources have no partial
|
|
55
|
+
else:
|
|
45
56
|
partial[node] = []
|
|
46
57
|
|
|
47
58
|
return partial
|
|
48
59
|
|
|
49
60
|
|
|
50
61
|
def get_graph_grain_length(g: nx.DiGraph) -> dict[str, int]:
|
|
51
|
-
datasources: dict[str, Datasource] = nx.get_node_attributes(
|
|
52
|
-
|
|
62
|
+
datasources: dict[str, Datasource | list[Datasource]] = nx.get_node_attributes(
|
|
63
|
+
g, "datasource"
|
|
64
|
+
)
|
|
65
|
+
grain_length: dict[str, int] = {}
|
|
53
66
|
for node in g.nodes:
|
|
54
67
|
if node in datasources:
|
|
55
|
-
|
|
56
|
-
|
|
68
|
+
lookup = datasources[node]
|
|
69
|
+
if not isinstance(lookup, list):
|
|
70
|
+
lookup = [lookup]
|
|
71
|
+
assert isinstance(lookup, list)
|
|
72
|
+
grain_length[node] = sum(len(x.grain.components) for x in lookup)
|
|
73
|
+
return grain_length
|
|
57
74
|
|
|
58
75
|
|
|
59
76
|
def create_pruned_concept_graph(
|
|
60
77
|
g: nx.DiGraph,
|
|
61
78
|
all_concepts: List[Concept],
|
|
79
|
+
datasources: list[Datasource],
|
|
62
80
|
accept_partial: bool = False,
|
|
63
81
|
conditions: WhereClause | None = None,
|
|
64
82
|
) -> nx.DiGraph:
|
|
65
83
|
orig_g = g
|
|
66
84
|
g = g.copy()
|
|
85
|
+
|
|
86
|
+
union_options = get_union_sources(datasources, all_concepts)
|
|
87
|
+
for ds_list in union_options:
|
|
88
|
+
node_address = "ds~" + "-".join([x.name for x in ds_list])
|
|
89
|
+
common: set[Concept] = set.intersection(
|
|
90
|
+
*[set(x.output_concepts) for x in ds_list]
|
|
91
|
+
)
|
|
92
|
+
g.add_node(node_address, datasource=ds_list)
|
|
93
|
+
for c in common:
|
|
94
|
+
g.add_edge(node_address, concept_to_node(c))
|
|
95
|
+
|
|
67
96
|
target_addresses = set([c.address for c in all_concepts])
|
|
68
97
|
concepts: dict[str, Concept] = nx.get_node_attributes(orig_g, "concept")
|
|
69
|
-
|
|
98
|
+
datasource_map: dict[str, Datasource | list[Datasource]] = nx.get_node_attributes(
|
|
99
|
+
orig_g, "datasource"
|
|
100
|
+
)
|
|
70
101
|
relevant_concepts_pre = {
|
|
71
102
|
n: x.address
|
|
72
103
|
for n in g.nodes()
|
|
@@ -81,13 +112,13 @@ def create_pruned_concept_graph(
|
|
|
81
112
|
to_remove = []
|
|
82
113
|
for edge in g.edges:
|
|
83
114
|
if (
|
|
84
|
-
edge[0] in
|
|
115
|
+
edge[0] in datasource_map
|
|
85
116
|
and (pnodes := partial.get(edge[0], []))
|
|
86
117
|
and edge[1] in pnodes
|
|
87
118
|
):
|
|
88
119
|
to_remove.append(edge)
|
|
89
120
|
if (
|
|
90
|
-
edge[1] in
|
|
121
|
+
edge[1] in datasource_map
|
|
91
122
|
and (pnodes := partial.get(edge[1], []))
|
|
92
123
|
and edge[0] in pnodes
|
|
93
124
|
):
|
|
@@ -136,7 +167,9 @@ def create_pruned_concept_graph(
|
|
|
136
167
|
for edge in orig_g.edges():
|
|
137
168
|
if edge[0] in relevant and edge[1] in relevant:
|
|
138
169
|
g.add_edge(edge[0], edge[1])
|
|
139
|
-
|
|
170
|
+
# if we have no ds nodes at all, for non constant, we can't find it
|
|
171
|
+
if not any([n.startswith("ds~") for n in g.nodes]):
|
|
172
|
+
return None
|
|
140
173
|
return g
|
|
141
174
|
|
|
142
175
|
|
|
@@ -190,6 +223,54 @@ def resolve_subgraphs(
|
|
|
190
223
|
return pruned_subgraphs
|
|
191
224
|
|
|
192
225
|
|
|
226
|
+
def create_datasource_node(
|
|
227
|
+
datasource: Datasource,
|
|
228
|
+
all_concepts: List[Concept],
|
|
229
|
+
accept_partial: bool,
|
|
230
|
+
environment: Environment,
|
|
231
|
+
depth: int,
|
|
232
|
+
conditions: WhereClause | None = None,
|
|
233
|
+
) -> tuple[StrategyNode, bool]:
|
|
234
|
+
target_grain = Grain(components=all_concepts)
|
|
235
|
+
force_group = False
|
|
236
|
+
if not datasource.grain.issubset(target_grain):
|
|
237
|
+
force_group = True
|
|
238
|
+
partial_concepts = [
|
|
239
|
+
c.concept
|
|
240
|
+
for c in datasource.columns
|
|
241
|
+
if not c.is_complete and c.concept.address in all_concepts
|
|
242
|
+
]
|
|
243
|
+
partial_lcl = LooseConceptList(concepts=partial_concepts)
|
|
244
|
+
nullable_concepts = [
|
|
245
|
+
c.concept
|
|
246
|
+
for c in datasource.columns
|
|
247
|
+
if c.is_nullable and c.concept.address in all_concepts
|
|
248
|
+
]
|
|
249
|
+
nullable_lcl = LooseConceptList(concepts=nullable_concepts)
|
|
250
|
+
partial_is_full = conditions and (conditions == datasource.non_partial_for)
|
|
251
|
+
return (
|
|
252
|
+
SelectNode(
|
|
253
|
+
input_concepts=[c.concept for c in datasource.columns],
|
|
254
|
+
output_concepts=all_concepts,
|
|
255
|
+
environment=environment,
|
|
256
|
+
parents=[],
|
|
257
|
+
depth=depth,
|
|
258
|
+
partial_concepts=(
|
|
259
|
+
[] if partial_is_full else [c for c in all_concepts if c in partial_lcl]
|
|
260
|
+
),
|
|
261
|
+
nullable_concepts=[c for c in all_concepts if c in nullable_lcl],
|
|
262
|
+
accept_partial=accept_partial,
|
|
263
|
+
datasource=datasource,
|
|
264
|
+
grain=Grain(components=all_concepts),
|
|
265
|
+
conditions=datasource.where.conditional if datasource.where else None,
|
|
266
|
+
preexisting_conditions=(
|
|
267
|
+
conditions.conditional if partial_is_full and conditions else None
|
|
268
|
+
),
|
|
269
|
+
),
|
|
270
|
+
force_group,
|
|
271
|
+
)
|
|
272
|
+
|
|
273
|
+
|
|
193
274
|
def create_select_node(
|
|
194
275
|
ds_name: str,
|
|
195
276
|
subgraph: list[str],
|
|
@@ -199,12 +280,11 @@ def create_select_node(
|
|
|
199
280
|
depth: int,
|
|
200
281
|
conditions: WhereClause | None = None,
|
|
201
282
|
) -> StrategyNode:
|
|
202
|
-
|
|
283
|
+
|
|
203
284
|
all_concepts = [
|
|
204
285
|
environment.concepts[extract_address(c)] for c in subgraph if c.startswith("c~")
|
|
205
286
|
]
|
|
206
287
|
|
|
207
|
-
all_lcl = LooseConceptList(concepts=all_concepts)
|
|
208
288
|
if all([c.derivation == PurposeLineage.CONSTANT for c in all_concepts]):
|
|
209
289
|
logger.info(
|
|
210
290
|
f"{padding(depth)}{LOGGER_PREFIX} All concepts {[x.address for x in all_concepts]} are constants, returning constant node"
|
|
@@ -213,7 +293,6 @@ def create_select_node(
|
|
|
213
293
|
output_concepts=all_concepts,
|
|
214
294
|
input_concepts=[],
|
|
215
295
|
environment=environment,
|
|
216
|
-
g=g,
|
|
217
296
|
parents=[],
|
|
218
297
|
depth=depth,
|
|
219
298
|
# no partial for constants
|
|
@@ -221,41 +300,44 @@ def create_select_node(
|
|
|
221
300
|
force_group=False,
|
|
222
301
|
)
|
|
223
302
|
|
|
224
|
-
datasource =
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
if
|
|
228
|
-
force_group =
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
303
|
+
datasource: dict[str, Datasource | list[Datasource]] = nx.get_node_attributes(
|
|
304
|
+
g, "datasource"
|
|
305
|
+
)[ds_name]
|
|
306
|
+
if isinstance(datasource, Datasource):
|
|
307
|
+
bcandidate, force_group = create_datasource_node(
|
|
308
|
+
datasource,
|
|
309
|
+
all_concepts,
|
|
310
|
+
accept_partial,
|
|
311
|
+
environment,
|
|
312
|
+
depth,
|
|
313
|
+
conditions=conditions,
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
elif isinstance(datasource, list):
|
|
317
|
+
from trilogy.core.processing.nodes.union_node import UnionNode
|
|
318
|
+
|
|
319
|
+
force_group = False
|
|
320
|
+
parents = []
|
|
321
|
+
for x in datasource:
|
|
322
|
+
subnode, fg = create_datasource_node(
|
|
323
|
+
x,
|
|
324
|
+
all_concepts,
|
|
325
|
+
accept_partial,
|
|
326
|
+
environment,
|
|
327
|
+
depth,
|
|
328
|
+
conditions=conditions,
|
|
329
|
+
)
|
|
330
|
+
parents.append(subnode)
|
|
331
|
+
force_group = force_group or fg
|
|
332
|
+
bcandidate = UnionNode(
|
|
333
|
+
output_concepts=all_concepts,
|
|
334
|
+
input_concepts=all_concepts,
|
|
335
|
+
environment=environment,
|
|
336
|
+
parents=parents,
|
|
337
|
+
depth=depth,
|
|
338
|
+
)
|
|
339
|
+
else:
|
|
340
|
+
raise ValueError(f"Unknown datasource type {datasource}")
|
|
259
341
|
|
|
260
342
|
# we need to nest the group node one further
|
|
261
343
|
if force_group is True:
|
|
@@ -263,14 +345,11 @@ def create_select_node(
|
|
|
263
345
|
output_concepts=all_concepts,
|
|
264
346
|
input_concepts=all_concepts,
|
|
265
347
|
environment=environment,
|
|
266
|
-
g=g,
|
|
267
348
|
parents=[bcandidate],
|
|
268
349
|
depth=depth,
|
|
269
350
|
partial_concepts=bcandidate.partial_concepts,
|
|
270
351
|
nullable_concepts=bcandidate.nullable_concepts,
|
|
271
|
-
preexisting_conditions=
|
|
272
|
-
conditions.conditional if partial_is_full and conditions else None
|
|
273
|
-
),
|
|
352
|
+
preexisting_conditions=bcandidate.preexisting_conditions,
|
|
274
353
|
)
|
|
275
354
|
else:
|
|
276
355
|
candidate = bcandidate
|
|
@@ -292,7 +371,6 @@ def gen_select_merge_node(
|
|
|
292
371
|
output_concepts=constants,
|
|
293
372
|
input_concepts=[],
|
|
294
373
|
environment=environment,
|
|
295
|
-
g=g,
|
|
296
374
|
parents=[],
|
|
297
375
|
depth=depth,
|
|
298
376
|
partial_concepts=[],
|
|
@@ -300,7 +378,11 @@ def gen_select_merge_node(
|
|
|
300
378
|
)
|
|
301
379
|
for attempt in [False, True]:
|
|
302
380
|
pruned_concept_graph = create_pruned_concept_graph(
|
|
303
|
-
g,
|
|
381
|
+
g,
|
|
382
|
+
non_constant,
|
|
383
|
+
accept_partial=attempt,
|
|
384
|
+
conditions=conditions,
|
|
385
|
+
datasources=list(environment.datasources.values()),
|
|
304
386
|
)
|
|
305
387
|
if pruned_concept_graph:
|
|
306
388
|
logger.info(
|
|
@@ -321,7 +403,7 @@ def gen_select_merge_node(
|
|
|
321
403
|
create_select_node(
|
|
322
404
|
k,
|
|
323
405
|
subgraph,
|
|
324
|
-
g=
|
|
406
|
+
g=pruned_concept_graph,
|
|
325
407
|
accept_partial=accept_partial,
|
|
326
408
|
environment=environment,
|
|
327
409
|
depth=depth,
|
|
@@ -338,7 +420,6 @@ def gen_select_merge_node(
|
|
|
338
420
|
output_concepts=constants,
|
|
339
421
|
input_concepts=[],
|
|
340
422
|
environment=environment,
|
|
341
|
-
g=g,
|
|
342
423
|
parents=[],
|
|
343
424
|
depth=depth,
|
|
344
425
|
partial_concepts=[],
|
|
@@ -361,7 +442,6 @@ def gen_select_merge_node(
|
|
|
361
442
|
output_concepts=all_concepts,
|
|
362
443
|
input_concepts=non_constant,
|
|
363
444
|
environment=environment,
|
|
364
|
-
g=g,
|
|
365
445
|
depth=depth,
|
|
366
446
|
parents=parents,
|
|
367
447
|
preexisting_conditions=preexisting_conditions,
|
|
@@ -372,7 +452,6 @@ def gen_select_merge_node(
|
|
|
372
452
|
output_concepts=all_concepts,
|
|
373
453
|
input_concepts=all_concepts,
|
|
374
454
|
environment=environment,
|
|
375
|
-
g=g,
|
|
376
455
|
parents=[base],
|
|
377
456
|
depth=depth,
|
|
378
457
|
preexisting_conditions=preexisting_conditions,
|
|
@@ -46,7 +46,6 @@ def gen_unnest_node(
|
|
|
46
46
|
input_concepts=arguments + non_equivalent_optional,
|
|
47
47
|
output_concepts=[concept] + local_optional,
|
|
48
48
|
environment=environment,
|
|
49
|
-
g=g,
|
|
50
49
|
parents=([parent] if (arguments or local_optional) else []),
|
|
51
50
|
)
|
|
52
51
|
# we need to sometimes nest an unnest node,
|
|
@@ -56,7 +55,6 @@ def gen_unnest_node(
|
|
|
56
55
|
input_concepts=base.output_concepts,
|
|
57
56
|
output_concepts=base.output_concepts,
|
|
58
57
|
environment=environment,
|
|
59
|
-
g=g,
|
|
60
58
|
parents=[base],
|
|
61
59
|
preexisting_conditions=conditions.conditional if conditions else None,
|
|
62
60
|
)
|
|
@@ -86,7 +86,6 @@ def gen_window_node(
|
|
|
86
86
|
input_concepts=parent_concepts + targets + non_equivalent_optional,
|
|
87
87
|
output_concepts=[concept] + parent_concepts + local_optional,
|
|
88
88
|
environment=environment,
|
|
89
|
-
g=g,
|
|
90
89
|
parents=[
|
|
91
90
|
parent_node,
|
|
92
91
|
],
|
|
@@ -98,7 +97,6 @@ def gen_window_node(
|
|
|
98
97
|
input_concepts=[concept] + local_optional,
|
|
99
98
|
output_concepts=[concept] + local_optional,
|
|
100
99
|
environment=environment,
|
|
101
|
-
g=g,
|
|
102
100
|
parents=[_window_node],
|
|
103
101
|
preexisting_conditions=conditions.conditional if conditions else None,
|
|
104
102
|
)
|
|
@@ -156,7 +156,6 @@ class StrategyNode:
|
|
|
156
156
|
input_concepts: List[Concept],
|
|
157
157
|
output_concepts: List[Concept],
|
|
158
158
|
environment: Environment,
|
|
159
|
-
g,
|
|
160
159
|
whole_grain: bool = False,
|
|
161
160
|
parents: List["StrategyNode"] | None = None,
|
|
162
161
|
partial_concepts: List[Concept] | None = None,
|
|
@@ -178,7 +177,6 @@ class StrategyNode:
|
|
|
178
177
|
self.output_lcl = LooseConceptList(concepts=self.output_concepts)
|
|
179
178
|
|
|
180
179
|
self.environment = environment
|
|
181
|
-
self.g = g
|
|
182
180
|
self.whole_grain = whole_grain
|
|
183
181
|
self.parents = parents or []
|
|
184
182
|
self.resolution_cache: Optional[QueryDatasource] = None
|
|
@@ -399,7 +397,6 @@ class StrategyNode:
|
|
|
399
397
|
input_concepts=list(self.input_concepts),
|
|
400
398
|
output_concepts=list(self.output_concepts),
|
|
401
399
|
environment=self.environment,
|
|
402
|
-
g=self.g,
|
|
403
400
|
whole_grain=self.whole_grain,
|
|
404
401
|
parents=list(self.parents),
|
|
405
402
|
partial_concepts=list(self.partial_concepts),
|
|
@@ -27,7 +27,6 @@ class FilterNode(StrategyNode):
|
|
|
27
27
|
input_concepts: List[Concept],
|
|
28
28
|
output_concepts: List[Concept],
|
|
29
29
|
environment,
|
|
30
|
-
g,
|
|
31
30
|
whole_grain: bool = False,
|
|
32
31
|
parents: List["StrategyNode"] | None = None,
|
|
33
32
|
depth: int = 0,
|
|
@@ -41,7 +40,6 @@ class FilterNode(StrategyNode):
|
|
|
41
40
|
super().__init__(
|
|
42
41
|
output_concepts=output_concepts,
|
|
43
42
|
environment=environment,
|
|
44
|
-
g=g,
|
|
45
43
|
whole_grain=whole_grain,
|
|
46
44
|
parents=parents,
|
|
47
45
|
depth=depth,
|
|
@@ -59,7 +57,6 @@ class FilterNode(StrategyNode):
|
|
|
59
57
|
input_concepts=list(self.input_concepts),
|
|
60
58
|
output_concepts=list(self.output_concepts),
|
|
61
59
|
environment=self.environment,
|
|
62
|
-
g=self.g,
|
|
63
60
|
whole_grain=self.whole_grain,
|
|
64
61
|
parents=self.parents,
|
|
65
62
|
depth=self.depth,
|
|
@@ -32,7 +32,6 @@ class GroupNode(StrategyNode):
|
|
|
32
32
|
output_concepts: List[Concept],
|
|
33
33
|
input_concepts: List[Concept],
|
|
34
34
|
environment: Environment,
|
|
35
|
-
g,
|
|
36
35
|
whole_grain: bool = False,
|
|
37
36
|
parents: List["StrategyNode"] | None = None,
|
|
38
37
|
depth: int = 0,
|
|
@@ -48,7 +47,6 @@ class GroupNode(StrategyNode):
|
|
|
48
47
|
input_concepts=input_concepts,
|
|
49
48
|
output_concepts=output_concepts,
|
|
50
49
|
environment=environment,
|
|
51
|
-
g=g,
|
|
52
50
|
whole_grain=whole_grain,
|
|
53
51
|
parents=parents,
|
|
54
52
|
depth=depth,
|
|
@@ -178,7 +176,6 @@ class GroupNode(StrategyNode):
|
|
|
178
176
|
input_concepts=list(self.input_concepts),
|
|
179
177
|
output_concepts=list(self.output_concepts),
|
|
180
178
|
environment=self.environment,
|
|
181
|
-
g=self.g,
|
|
182
179
|
whole_grain=self.whole_grain,
|
|
183
180
|
parents=self.parents,
|
|
184
181
|
depth=self.depth,
|
|
@@ -103,7 +103,6 @@ class MergeNode(StrategyNode):
|
|
|
103
103
|
input_concepts: List[Concept],
|
|
104
104
|
output_concepts: List[Concept],
|
|
105
105
|
environment,
|
|
106
|
-
g,
|
|
107
106
|
whole_grain: bool = False,
|
|
108
107
|
parents: List["StrategyNode"] | None = None,
|
|
109
108
|
node_joins: List[NodeJoin] | None = None,
|
|
@@ -124,7 +123,6 @@ class MergeNode(StrategyNode):
|
|
|
124
123
|
input_concepts=input_concepts,
|
|
125
124
|
output_concepts=output_concepts,
|
|
126
125
|
environment=environment,
|
|
127
|
-
g=g,
|
|
128
126
|
whole_grain=whole_grain,
|
|
129
127
|
parents=parents,
|
|
130
128
|
depth=depth,
|
|
@@ -364,7 +362,6 @@ class MergeNode(StrategyNode):
|
|
|
364
362
|
input_concepts=list(self.input_concepts),
|
|
365
363
|
output_concepts=list(self.output_concepts),
|
|
366
364
|
environment=self.environment,
|
|
367
|
-
g=self.g,
|
|
368
365
|
whole_grain=self.whole_grain,
|
|
369
366
|
parents=self.parents,
|
|
370
367
|
depth=self.depth,
|
|
@@ -34,7 +34,6 @@ class SelectNode(StrategyNode):
|
|
|
34
34
|
input_concepts: List[Concept],
|
|
35
35
|
output_concepts: List[Concept],
|
|
36
36
|
environment: Environment,
|
|
37
|
-
g,
|
|
38
37
|
datasource: Datasource | None = None,
|
|
39
38
|
whole_grain: bool = False,
|
|
40
39
|
parents: List["StrategyNode"] | None = None,
|
|
@@ -52,7 +51,6 @@ class SelectNode(StrategyNode):
|
|
|
52
51
|
input_concepts=input_concepts,
|
|
53
52
|
output_concepts=output_concepts,
|
|
54
53
|
environment=environment,
|
|
55
|
-
g=g,
|
|
56
54
|
whole_grain=whole_grain,
|
|
57
55
|
parents=parents,
|
|
58
56
|
depth=depth,
|
|
@@ -197,7 +195,6 @@ class SelectNode(StrategyNode):
|
|
|
197
195
|
input_concepts=list(self.input_concepts),
|
|
198
196
|
output_concepts=list(self.output_concepts),
|
|
199
197
|
environment=self.environment,
|
|
200
|
-
g=self.g,
|
|
201
198
|
datasource=self.datasource,
|
|
202
199
|
depth=self.depth,
|
|
203
200
|
parents=self.parents,
|
|
@@ -221,7 +218,6 @@ class ConstantNode(SelectNode):
|
|
|
221
218
|
input_concepts=list(self.input_concepts),
|
|
222
219
|
output_concepts=list(self.output_concepts),
|
|
223
220
|
environment=self.environment,
|
|
224
|
-
g=self.g,
|
|
225
221
|
datasource=self.datasource,
|
|
226
222
|
depth=self.depth,
|
|
227
223
|
partial_concepts=list(self.partial_concepts),
|
|
@@ -18,7 +18,6 @@ class UnionNode(StrategyNode):
|
|
|
18
18
|
input_concepts: List[Concept],
|
|
19
19
|
output_concepts: List[Concept],
|
|
20
20
|
environment,
|
|
21
|
-
g,
|
|
22
21
|
whole_grain: bool = False,
|
|
23
22
|
parents: List["StrategyNode"] | None = None,
|
|
24
23
|
depth: int = 0,
|
|
@@ -27,7 +26,6 @@ class UnionNode(StrategyNode):
|
|
|
27
26
|
input_concepts=input_concepts,
|
|
28
27
|
output_concepts=output_concepts,
|
|
29
28
|
environment=environment,
|
|
30
|
-
g=g,
|
|
31
29
|
whole_grain=whole_grain,
|
|
32
30
|
parents=parents,
|
|
33
31
|
depth=depth,
|
|
@@ -43,7 +41,6 @@ class UnionNode(StrategyNode):
|
|
|
43
41
|
input_concepts=list(self.input_concepts),
|
|
44
42
|
output_concepts=list(self.output_concepts),
|
|
45
43
|
environment=self.environment,
|
|
46
|
-
g=self.g,
|
|
47
44
|
whole_grain=self.whole_grain,
|
|
48
45
|
parents=self.parents,
|
|
49
46
|
depth=self.depth,
|
|
@@ -23,7 +23,6 @@ class UnnestNode(StrategyNode):
|
|
|
23
23
|
input_concepts: List[Concept],
|
|
24
24
|
output_concepts: List[Concept],
|
|
25
25
|
environment,
|
|
26
|
-
g,
|
|
27
26
|
whole_grain: bool = False,
|
|
28
27
|
parents: List["StrategyNode"] | None = None,
|
|
29
28
|
depth: int = 0,
|
|
@@ -32,7 +31,6 @@ class UnnestNode(StrategyNode):
|
|
|
32
31
|
input_concepts=input_concepts,
|
|
33
32
|
output_concepts=output_concepts,
|
|
34
33
|
environment=environment,
|
|
35
|
-
g=g,
|
|
36
34
|
whole_grain=whole_grain,
|
|
37
35
|
parents=parents,
|
|
38
36
|
depth=depth,
|
|
@@ -62,7 +60,6 @@ class UnnestNode(StrategyNode):
|
|
|
62
60
|
input_concepts=list(self.input_concepts),
|
|
63
61
|
output_concepts=list(self.output_concepts),
|
|
64
62
|
environment=self.environment,
|
|
65
|
-
g=self.g,
|
|
66
63
|
whole_grain=self.whole_grain,
|
|
67
64
|
parents=self.parents,
|
|
68
65
|
depth=self.depth,
|
|
@@ -12,7 +12,6 @@ class WindowNode(StrategyNode):
|
|
|
12
12
|
input_concepts: List[Concept],
|
|
13
13
|
output_concepts: List[Concept],
|
|
14
14
|
environment,
|
|
15
|
-
g,
|
|
16
15
|
whole_grain: bool = False,
|
|
17
16
|
parents: List["StrategyNode"] | None = None,
|
|
18
17
|
depth: int = 0,
|
|
@@ -21,7 +20,6 @@ class WindowNode(StrategyNode):
|
|
|
21
20
|
input_concepts=input_concepts,
|
|
22
21
|
output_concepts=output_concepts,
|
|
23
22
|
environment=environment,
|
|
24
|
-
g=g,
|
|
25
23
|
whole_grain=whole_grain,
|
|
26
24
|
parents=parents,
|
|
27
25
|
depth=depth,
|
|
@@ -36,7 +34,6 @@ class WindowNode(StrategyNode):
|
|
|
36
34
|
input_concepts=list(self.input_concepts),
|
|
37
35
|
output_concepts=list(self.output_concepts),
|
|
38
36
|
environment=self.environment,
|
|
39
|
-
g=self.g,
|
|
40
37
|
whole_grain=self.whole_grain,
|
|
41
38
|
parents=self.parents,
|
|
42
39
|
depth=self.depth,
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from dataclasses import dataclass
|
|
2
|
+
from datetime import date, datetime
|
|
2
3
|
from enum import Enum
|
|
3
4
|
from logging import Logger
|
|
4
5
|
from typing import Any, Dict, List, Set, Tuple
|
|
@@ -380,6 +381,8 @@ def is_scalar_condition(
|
|
|
380
381
|
int
|
|
381
382
|
| str
|
|
382
383
|
| float
|
|
384
|
+
| date
|
|
385
|
+
| datetime
|
|
383
386
|
| list[Any]
|
|
384
387
|
| WindowItem
|
|
385
388
|
| FilterItem
|
trilogy/core/query_processor.py
CHANGED
trilogy/dialect/base.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from datetime import date, datetime
|
|
1
2
|
from typing import Any, Callable, Dict, List, Optional, Sequence, Union
|
|
2
3
|
|
|
3
4
|
from jinja2 import Template
|
|
@@ -102,13 +103,15 @@ WINDOW_FUNCTION_MAP = {
|
|
|
102
103
|
WindowType.AVG: window_factory("avg", include_concept=True),
|
|
103
104
|
}
|
|
104
105
|
|
|
105
|
-
DATATYPE_MAP = {
|
|
106
|
+
DATATYPE_MAP: dict[DataType, str] = {
|
|
106
107
|
DataType.STRING: "string",
|
|
107
108
|
DataType.INTEGER: "int",
|
|
108
109
|
DataType.FLOAT: "float",
|
|
109
110
|
DataType.BOOL: "bool",
|
|
110
111
|
DataType.NUMERIC: "numeric",
|
|
111
112
|
DataType.MAP: "map",
|
|
113
|
+
DataType.DATE: "date",
|
|
114
|
+
DataType.DATETIME: "datetime",
|
|
112
115
|
}
|
|
113
116
|
|
|
114
117
|
|
|
@@ -131,6 +134,7 @@ FUNCTION_MAP = {
|
|
|
131
134
|
FunctionType.SPLIT: lambda x: f"split({x[0]}, {x[1]})",
|
|
132
135
|
FunctionType.IS_NULL: lambda x: f"isnull({x[0]})",
|
|
133
136
|
FunctionType.BOOL: lambda x: f"CASE WHEN {x[0]} THEN TRUE ELSE FALSE END",
|
|
137
|
+
FunctionType.PARENTHETICAL: lambda x: f"({x[0]})",
|
|
134
138
|
# Complex
|
|
135
139
|
FunctionType.INDEX_ACCESS: lambda x: f"{x[0]}[{x[1]}]",
|
|
136
140
|
FunctionType.MAP_ACCESS: lambda x: f"{x[0]}[{x[1]}][1]",
|
|
@@ -138,6 +142,8 @@ FUNCTION_MAP = {
|
|
|
138
142
|
FunctionType.ATTR_ACCESS: lambda x: f"""{x[0]}.{x[1].replace("'", "")}""",
|
|
139
143
|
FunctionType.STRUCT: lambda x: f"{{{', '.join(struct_arg(x))}}}",
|
|
140
144
|
FunctionType.ARRAY: lambda x: f"[{', '.join(x)}]",
|
|
145
|
+
FunctionType.DATE_LITERAL: lambda x: f"date '{x}'",
|
|
146
|
+
FunctionType.DATETIME_LITERAL: lambda x: f"datetime '{x}'",
|
|
141
147
|
# math
|
|
142
148
|
FunctionType.ADD: lambda x: " + ".join(x),
|
|
143
149
|
FunctionType.SUBTRACT: lambda x: " - ".join(x),
|
|
@@ -454,6 +460,8 @@ class BaseDialect:
|
|
|
454
460
|
list,
|
|
455
461
|
bool,
|
|
456
462
|
float,
|
|
463
|
+
date,
|
|
464
|
+
datetime,
|
|
457
465
|
DataType,
|
|
458
466
|
Function,
|
|
459
467
|
Parenthetical,
|
|
@@ -612,7 +620,7 @@ class BaseDialect:
|
|
|
612
620
|
elif isinstance(e, list):
|
|
613
621
|
return f"{self.FUNCTION_MAP[FunctionType.ARRAY]([self.render_expr(x, cte=cte, cte_map=cte_map, raise_invalid=raise_invalid) for x in e])}"
|
|
614
622
|
elif isinstance(e, DataType):
|
|
615
|
-
return
|
|
623
|
+
return self.DATATYPE_MAP.get(e, e.value)
|
|
616
624
|
elif isinstance(e, DatePart):
|
|
617
625
|
return str(e.value)
|
|
618
626
|
elif isinstance(e, NumericType):
|
|
@@ -620,6 +628,10 @@ class BaseDialect:
|
|
|
620
628
|
elif isinstance(e, MagicConstants):
|
|
621
629
|
if e == MagicConstants.NULL:
|
|
622
630
|
return "null"
|
|
631
|
+
elif isinstance(e, date):
|
|
632
|
+
return self.FUNCTION_MAP[FunctionType.DATE_LITERAL](e)
|
|
633
|
+
elif isinstance(e, datetime):
|
|
634
|
+
return self.FUNCTION_MAP[FunctionType.DATETIME_LITERAL](e)
|
|
623
635
|
else:
|
|
624
636
|
raise ValueError(f"Unable to render type {type(e)} {e}")
|
|
625
637
|
|
trilogy/dialect/duckdb.py
CHANGED
|
@@ -3,6 +3,7 @@ from typing import Any, Callable, Mapping
|
|
|
3
3
|
from jinja2 import Template
|
|
4
4
|
|
|
5
5
|
from trilogy.core.enums import FunctionType, UnnestMode, WindowType
|
|
6
|
+
from trilogy.core.models import DataType
|
|
6
7
|
from trilogy.dialect.base import BaseDialect
|
|
7
8
|
|
|
8
9
|
WINDOW_FUNCTION_MAP: Mapping[WindowType, Callable[[Any, Any, Any], str]] = {}
|
|
@@ -30,6 +31,8 @@ FUNCTION_MAP = {
|
|
|
30
31
|
FunctionType.DATE_PART: lambda x: f"date_part('{x[1]}', {x[0]})",
|
|
31
32
|
FunctionType.DATE_DIFF: lambda x: f"date_diff('{x[2]}', {x[0]}, {x[1]})",
|
|
32
33
|
FunctionType.CONCAT: lambda x: f"({' || '.join(x)})",
|
|
34
|
+
FunctionType.DATE_LITERAL: lambda x: f"date '{x}'",
|
|
35
|
+
FunctionType.DATETIME_LITERAL: lambda x: f"datetime '{x}'",
|
|
33
36
|
}
|
|
34
37
|
|
|
35
38
|
# if an aggregate function is called on a source that is at the same grain as the aggregate
|
|
@@ -44,6 +47,9 @@ FUNCTION_GRAIN_MATCH_MAP = {
|
|
|
44
47
|
FunctionType.MIN: lambda args: f"{args[0]}",
|
|
45
48
|
}
|
|
46
49
|
|
|
50
|
+
DATATYPE_MAP: dict[DataType, str] = {}
|
|
51
|
+
|
|
52
|
+
|
|
47
53
|
DUCKDB_TEMPLATE = Template(
|
|
48
54
|
"""{%- if output %}
|
|
49
55
|
CREATE OR REPLACE TABLE {{ output.address.location }} AS
|
|
@@ -84,6 +90,7 @@ class DuckDBDialect(BaseDialect):
|
|
|
84
90
|
**BaseDialect.FUNCTION_GRAIN_MATCH_MAP,
|
|
85
91
|
**FUNCTION_GRAIN_MATCH_MAP,
|
|
86
92
|
}
|
|
93
|
+
DATATYPE_MAP = {**BaseDialect.DATATYPE_MAP, **DATATYPE_MAP}
|
|
87
94
|
QUOTE_CHARACTER = '"'
|
|
88
95
|
SQL_TEMPLATE = DUCKDB_TEMPLATE
|
|
89
96
|
UNNEST_MODE = UnnestMode.DIRECT
|
trilogy/hooks/graph_hook.py
CHANGED
|
@@ -1,7 +1,19 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
from os import environ
|
|
3
|
+
|
|
1
4
|
import networkx as nx
|
|
2
5
|
|
|
3
6
|
from trilogy.hooks.base_hook import BaseHook
|
|
4
7
|
|
|
8
|
+
if not environ.get("TCL_LIBRARY"):
|
|
9
|
+
minor = sys.version_info.minor
|
|
10
|
+
if minor == 13:
|
|
11
|
+
environ["TCL_LIBRARY"] = r"C:\Program Files\Python313\tcl\tcl8.6"
|
|
12
|
+
elif minor == 12:
|
|
13
|
+
environ["TCL_LIBRARY"] = r"C:\Program Files\Python312\tcl\tcl8.6"
|
|
14
|
+
else:
|
|
15
|
+
pass
|
|
16
|
+
|
|
5
17
|
|
|
6
18
|
class GraphHook(BaseHook):
|
|
7
19
|
def __init__(self):
|
|
@@ -11,6 +23,8 @@ class GraphHook(BaseHook):
|
|
|
11
23
|
except ImportError:
|
|
12
24
|
raise ImportError("GraphHook requires matplotlib and scipy to be installed")
|
|
13
25
|
|
|
26
|
+
# https://github.com/python/cpython/issues/125235#issuecomment-2412948604
|
|
27
|
+
|
|
14
28
|
def query_graph_built(
|
|
15
29
|
self,
|
|
16
30
|
graph: nx.DiGraph,
|
trilogy/parsing/common.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from datetime import date, datetime
|
|
1
2
|
from typing import List, Tuple
|
|
2
3
|
|
|
3
4
|
from trilogy.constants import (
|
|
@@ -37,14 +38,22 @@ def process_function_args(
|
|
|
37
38
|
args,
|
|
38
39
|
meta: Meta | None,
|
|
39
40
|
environment: Environment,
|
|
40
|
-
):
|
|
41
|
-
final: List[Concept | Function] = []
|
|
41
|
+
) -> List[Concept | Function | str | int | float | date | datetime]:
|
|
42
|
+
final: List[Concept | Function | str | int | float | date | datetime] = []
|
|
42
43
|
for arg in args:
|
|
43
44
|
# if a function has an anonymous function argument
|
|
44
45
|
# create an implicit concept
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
46
|
+
if isinstance(arg, Parenthetical):
|
|
47
|
+
processed = process_function_args([arg.content], meta, environment)
|
|
48
|
+
final.append(
|
|
49
|
+
Function(
|
|
50
|
+
operator=FunctionType.PARENTHETICAL,
|
|
51
|
+
arguments=processed,
|
|
52
|
+
output_datatype=arg_to_datatype(processed[0]),
|
|
53
|
+
output_purpose=function_args_to_output_purpose(processed),
|
|
54
|
+
)
|
|
55
|
+
)
|
|
56
|
+
elif isinstance(arg, Function):
|
|
48
57
|
# if it's not an aggregate function, we can skip the virtual concepts
|
|
49
58
|
# to simplify anonymous function handling
|
|
50
59
|
if (
|
trilogy/parsing/parse_engine.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from dataclasses import dataclass
|
|
2
|
+
from datetime import date, datetime
|
|
2
3
|
from os.path import dirname, join
|
|
3
4
|
from pathlib import Path
|
|
4
5
|
from re import IGNORECASE
|
|
@@ -669,6 +670,12 @@ class ParseToObjects(Transformer):
|
|
|
669
670
|
)
|
|
670
671
|
for column in columns:
|
|
671
672
|
column.concept = column.concept.with_grain(datasource.grain)
|
|
673
|
+
if datasource.where:
|
|
674
|
+
for x in datasource.where.concept_arguments:
|
|
675
|
+
if x.address not in datasource.output_concepts:
|
|
676
|
+
raise ValueError(
|
|
677
|
+
f"Datasource {name} where condition depends on concept {x.address} that does not exist on the datasource, line {meta.line}."
|
|
678
|
+
)
|
|
672
679
|
self.environment.add_datasource(datasource, meta=meta)
|
|
673
680
|
return datasource
|
|
674
681
|
|
|
@@ -1783,6 +1790,30 @@ class ParseToObjects(Transformer):
|
|
|
1783
1790
|
@v_args(meta=True)
|
|
1784
1791
|
def fcast(self, meta, args) -> Function:
|
|
1785
1792
|
args = process_function_args(args, meta=meta, environment=self.environment)
|
|
1793
|
+
if isinstance(args[0], str):
|
|
1794
|
+
processed: date | datetime | int | float | bool | str
|
|
1795
|
+
if args[1] == DataType.DATE:
|
|
1796
|
+
processed = date.fromisoformat(args[0])
|
|
1797
|
+
elif args[1] == DataType.DATETIME:
|
|
1798
|
+
processed = datetime.fromisoformat(args[0])
|
|
1799
|
+
elif args[1] == DataType.TIMESTAMP:
|
|
1800
|
+
processed = datetime.fromisoformat(args[0])
|
|
1801
|
+
elif args[1] == DataType.INTEGER:
|
|
1802
|
+
processed = int(args[0])
|
|
1803
|
+
elif args[1] == DataType.FLOAT:
|
|
1804
|
+
processed = float(args[0])
|
|
1805
|
+
elif args[1] == DataType.BOOL:
|
|
1806
|
+
processed = args[0].capitalize() == "True"
|
|
1807
|
+
elif args[1] == DataType.STRING:
|
|
1808
|
+
processed = args[0]
|
|
1809
|
+
else:
|
|
1810
|
+
raise SyntaxError(f"Invalid cast type {args[1]}")
|
|
1811
|
+
return Function(
|
|
1812
|
+
operator=FunctionType.CONSTANT,
|
|
1813
|
+
output_datatype=args[1],
|
|
1814
|
+
output_purpose=Purpose.CONSTANT,
|
|
1815
|
+
arguments=[processed],
|
|
1816
|
+
)
|
|
1786
1817
|
output_datatype = args[1]
|
|
1787
1818
|
return Function(
|
|
1788
1819
|
operator=FunctionType.CAST,
|
trilogy/parsing/trilogy.lark
CHANGED
|
@@ -193,7 +193,9 @@
|
|
|
193
193
|
_math_functions: fmul | fdiv | fadd | fsub | fround | fmod | fabs
|
|
194
194
|
|
|
195
195
|
//generic
|
|
196
|
-
|
|
196
|
+
_fcast_primary: "cast"i "(" expr "as"i data_type ")"
|
|
197
|
+
_fcast_alt: expr "::" data_type
|
|
198
|
+
fcast: _fcast_primary | _fcast_alt
|
|
197
199
|
concat: ("concat"i "(" (expr ",")* expr ")") | (expr "||" expr)
|
|
198
200
|
fcoalesce: "coalesce"i "(" (expr ",")* expr ")"
|
|
199
201
|
fcase_when: "WHEN"i conditional "THEN"i expr
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|