pytrilogy 0.0.3.15__tar.gz → 0.0.3.17__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pytrilogy might be problematic. Click here for more details.
- {pytrilogy-0.0.3.15/pytrilogy.egg-info → pytrilogy-0.0.3.17}/PKG-INFO +1 -1
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17/pytrilogy.egg-info}/PKG-INFO +1 -1
- pytrilogy-0.0.3.17/tests/test_typing.py +145 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/__init__.py +1 -1
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/authoring/__init__.py +3 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/functions.py +13 -12
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/models/author.py +30 -19
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/models/core.py +23 -3
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/concept_strategies_v3.py +1 -1
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/select_merge_node.py +3 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/statements/author.py +0 -2
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/dialect/base.py +1 -1
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/parsing/common.py +4 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/parsing/parse_engine.py +70 -2
- pytrilogy-0.0.3.15/tests/test_typing.py +0 -42
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/LICENSE.md +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/README.md +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/pyproject.toml +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/pytrilogy.egg-info/SOURCES.txt +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/pytrilogy.egg-info/dependency_links.txt +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/pytrilogy.egg-info/entry_points.txt +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/pytrilogy.egg-info/requires.txt +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/pytrilogy.egg-info/top_level.txt +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/setup.cfg +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/setup.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_datatypes.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_declarations.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_derived_concepts.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_discovery_nodes.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_enums.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_environment.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_executor.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_functions.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_imports.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_metadata.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_models.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_multi_join_assignments.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_parse_engine.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_parsing.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_partial_handling.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_query_processing.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_query_render.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_select.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_show.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_statements.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_undefined_concept.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_user_functions.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/tests/test_where_clause.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/compiler.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/constants.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/__init__.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/constants.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/enums.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/env_processor.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/environment_helpers.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/ergonomics.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/exceptions.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/graph_models.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/internal.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/models/__init__.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/models/build.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/models/build_environment.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/models/datasource.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/models/environment.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/models/execute.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/optimization.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/optimizations/__init__.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/optimizations/base_optimization.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/optimizations/inline_constant.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/optimizations/inline_datasource.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/optimizations/predicate_pushdown.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/__init__.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/graph_utils.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/__init__.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/basic_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/common.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/filter_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/group_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/group_to_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/multiselect_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/node_merge_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/rowset_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/select_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/synonym_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/union_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/unnest_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/window_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/nodes/__init__.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/nodes/base_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/nodes/filter_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/nodes/group_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/nodes/merge_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/nodes/select_node_v2.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/nodes/union_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/nodes/unnest_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/nodes/window_node.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/utility.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/query_processor.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/statements/__init__.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/statements/build.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/statements/common.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/statements/execute.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/dialect/__init__.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/dialect/bigquery.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/dialect/common.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/dialect/config.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/dialect/dataframe.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/dialect/duckdb.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/dialect/enums.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/dialect/postgres.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/dialect/presto.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/dialect/snowflake.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/dialect/sql_server.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/engine.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/executor.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/hooks/__init__.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/hooks/base_hook.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/hooks/graph_hook.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/hooks/query_debugger.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/metadata/__init__.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/parser.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/parsing/__init__.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/parsing/config.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/parsing/exceptions.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/parsing/helpers.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/parsing/render.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/parsing/trilogy.lark +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/py.typed +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/render.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/scripts/__init__.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/scripts/trilogy.py +0 -0
- {pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/utility.py +0 -0
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
from decimal import Decimal
|
|
2
|
+
|
|
3
|
+
from trilogy import Dialects
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def test_typing():
|
|
7
|
+
env = Dialects.DUCK_DB.default_executor()
|
|
8
|
+
env.environment.parse(
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
type email string;
|
|
12
|
+
|
|
13
|
+
key customer_id int;
|
|
14
|
+
property customer_id.email string::email;
|
|
15
|
+
|
|
16
|
+
def is_valid_email(email) -> contains(email, '@');
|
|
17
|
+
|
|
18
|
+
datasource customers (
|
|
19
|
+
id:customer_id,
|
|
20
|
+
email: email
|
|
21
|
+
)
|
|
22
|
+
grain (customer_id)
|
|
23
|
+
query '''
|
|
24
|
+
select 1 as id, 'bright@gmail.com' as email
|
|
25
|
+
union all
|
|
26
|
+
select 2 as id, 'funky@hotmail.com' as email
|
|
27
|
+
''';
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
"""
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
results = env.execute_query(
|
|
34
|
+
"""SELECT
|
|
35
|
+
customer_id,
|
|
36
|
+
@is_valid_email(email)->valid;"""
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
for row in results.fetchall():
|
|
40
|
+
assert row.valid is True
|
|
41
|
+
|
|
42
|
+
assert "email" in env.environment.data_types
|
|
43
|
+
|
|
44
|
+
assert env.environment.concepts["email"].datatype.traits == ["email"]
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def test_typing_aggregate():
|
|
48
|
+
env = Dialects.DUCK_DB.default_executor()
|
|
49
|
+
env.environment.parse(
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
type money float;
|
|
53
|
+
|
|
54
|
+
key revenue float::money;
|
|
55
|
+
key revenue_two float::money;
|
|
56
|
+
key multiplier float;
|
|
57
|
+
|
|
58
|
+
datasource orders (
|
|
59
|
+
revenue:revenue,
|
|
60
|
+
revenue_two:revenue_two,
|
|
61
|
+
multiplier:multiplier
|
|
62
|
+
)
|
|
63
|
+
grain (revenue)
|
|
64
|
+
query '''
|
|
65
|
+
select 5.0 as revenue, 3.3 as revenue_two, 2.0 as multiplier
|
|
66
|
+
union all
|
|
67
|
+
select 10.0 as revenue, 13.1 as revenue_two, 3.0 as multiplier
|
|
68
|
+
''';
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
"""
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
results = env.execute_query(
|
|
75
|
+
"""SELECT
|
|
76
|
+
sum(revenue)->direct_total,
|
|
77
|
+
sum(revenue*multiplier)->total,
|
|
78
|
+
direct_total-total -> diff;"""
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
for row in results.fetchall():
|
|
82
|
+
assert row.total == Decimal("40.00")
|
|
83
|
+
|
|
84
|
+
assert "money" in env.environment.data_types
|
|
85
|
+
assert env.environment.concepts["direct_total"].datatype.traits == ["money"]
|
|
86
|
+
assert env.environment.concepts["total"].datatype.traits == ["money"]
|
|
87
|
+
assert env.environment.concepts["diff"].datatype.traits == [
|
|
88
|
+
"money"
|
|
89
|
+
], env.environment.concepts["diff"].datatype
|
|
90
|
+
|
|
91
|
+
results = env.execute_query(
|
|
92
|
+
"""SELECT
|
|
93
|
+
revenue+revenue_two->add_total,
|
|
94
|
+
revenue-revenue_two->sub_total
|
|
95
|
+
;"""
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
assert "money" in env.environment.data_types
|
|
99
|
+
assert env.environment.concepts["add_total"].datatype.traits == ["money"]
|
|
100
|
+
assert env.environment.concepts["sub_total"].datatype.traits == ["money"]
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def test_custom_function_typing():
|
|
104
|
+
env = Dialects.DUCK_DB.default_executor()
|
|
105
|
+
env.environment.parse(
|
|
106
|
+
"""
|
|
107
|
+
|
|
108
|
+
type money float;
|
|
109
|
+
|
|
110
|
+
key revenue float::money;
|
|
111
|
+
|
|
112
|
+
def revenue_times_2(revenue) -> revenue*2;
|
|
113
|
+
|
|
114
|
+
def revenue_times_multiplier(revenue, multiplier) -> revenue*multiplier;
|
|
115
|
+
|
|
116
|
+
datasource orders (
|
|
117
|
+
revenue:revenue,
|
|
118
|
+
|
|
119
|
+
)
|
|
120
|
+
grain (revenue)
|
|
121
|
+
query '''
|
|
122
|
+
select 5.0 as revenue, 3.3 as revenue_two, 2.0 as multiplier
|
|
123
|
+
union all
|
|
124
|
+
select 10.0 as revenue, 13.1 as revenue_two, 3.0 as multiplier
|
|
125
|
+
''';
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
"""
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
_ = env.execute_query(
|
|
132
|
+
"""
|
|
133
|
+
with scaled as
|
|
134
|
+
SELECT
|
|
135
|
+
@revenue_times_2(revenue)->revenue
|
|
136
|
+
;
|
|
137
|
+
|
|
138
|
+
SELECT
|
|
139
|
+
sum(
|
|
140
|
+
round( @revenue_times_multiplier(lag 1 @revenue_times_2(scaled.revenue), 2.0), 2)
|
|
141
|
+
)->total;
|
|
142
|
+
"""
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
assert env.environment.concepts["total"].datatype.traits == ["money"]
|
|
@@ -40,6 +40,7 @@ from trilogy.core.models.core import (
|
|
|
40
40
|
MapType,
|
|
41
41
|
StructType,
|
|
42
42
|
)
|
|
43
|
+
from trilogy.core.models.datasource import Datasource, DatasourceMetadata
|
|
43
44
|
from trilogy.core.models.environment import Environment
|
|
44
45
|
from trilogy.core.statements.author import (
|
|
45
46
|
ConceptDeclarationStatement,
|
|
@@ -100,4 +101,6 @@ __all__ = [
|
|
|
100
101
|
"MultiSelectStatement",
|
|
101
102
|
"PersistStatement",
|
|
102
103
|
"RawSQLStatement",
|
|
104
|
+
"Datasource",
|
|
105
|
+
"DatasourceMetadata",
|
|
103
106
|
]
|
|
@@ -29,6 +29,7 @@ from trilogy.core.models.core import (
|
|
|
29
29
|
MapType,
|
|
30
30
|
NumericType,
|
|
31
31
|
StructType,
|
|
32
|
+
TraitDataType,
|
|
32
33
|
arg_to_datatype,
|
|
33
34
|
merge_datatypes,
|
|
34
35
|
)
|
|
@@ -94,6 +95,10 @@ def get_cast_output_type(
|
|
|
94
95
|
return args[1]
|
|
95
96
|
|
|
96
97
|
|
|
98
|
+
def get_output_type_at_index(args, index: int):
|
|
99
|
+
return arg_to_datatype(args[index])
|
|
100
|
+
|
|
101
|
+
|
|
97
102
|
def validate_case_output(
|
|
98
103
|
args: list[Any],
|
|
99
104
|
) -> DataType:
|
|
@@ -140,6 +145,9 @@ def get_date_trunc_output(
|
|
|
140
145
|
|
|
141
146
|
|
|
142
147
|
FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
|
|
148
|
+
FunctionType.PARENTHETICAL: FunctionConfig(
|
|
149
|
+
arg_count=1,
|
|
150
|
+
),
|
|
143
151
|
FunctionType.UNNEST: FunctionConfig(
|
|
144
152
|
valid_inputs={
|
|
145
153
|
DataType.ARRAY,
|
|
@@ -173,7 +181,6 @@ FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
|
|
|
173
181
|
DataType.BOOL,
|
|
174
182
|
},
|
|
175
183
|
output_purpose=Purpose.METRIC,
|
|
176
|
-
output_type=DataType.INTEGER,
|
|
177
184
|
arg_count=1,
|
|
178
185
|
),
|
|
179
186
|
FunctionType.MIN: FunctionConfig(
|
|
@@ -186,7 +193,6 @@ FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
|
|
|
186
193
|
DataType.TIMESTAMP,
|
|
187
194
|
},
|
|
188
195
|
output_purpose=Purpose.METRIC,
|
|
189
|
-
output_type=DataType.INTEGER,
|
|
190
196
|
arg_count=1,
|
|
191
197
|
),
|
|
192
198
|
FunctionType.SPLIT: FunctionConfig(
|
|
@@ -237,7 +243,6 @@ FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
|
|
|
237
243
|
FunctionType.ABS: FunctionConfig(
|
|
238
244
|
valid_inputs={DataType.INTEGER, DataType.FLOAT, DataType.NUMBER},
|
|
239
245
|
output_purpose=Purpose.PROPERTY,
|
|
240
|
-
output_type=DataType.INTEGER,
|
|
241
246
|
arg_count=1,
|
|
242
247
|
),
|
|
243
248
|
FunctionType.COALESCE: FunctionConfig(
|
|
@@ -519,7 +524,6 @@ FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
|
|
|
519
524
|
DataType.NUMERIC,
|
|
520
525
|
},
|
|
521
526
|
output_purpose=Purpose.PROPERTY,
|
|
522
|
-
output_type=DataType.INTEGER,
|
|
523
527
|
arg_count=InfiniteFunctionArgs,
|
|
524
528
|
),
|
|
525
529
|
FunctionType.SUBTRACT: FunctionConfig(
|
|
@@ -530,7 +534,6 @@ FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
|
|
|
530
534
|
DataType.NUMERIC,
|
|
531
535
|
},
|
|
532
536
|
output_purpose=Purpose.PROPERTY,
|
|
533
|
-
output_type=DataType.INTEGER,
|
|
534
537
|
arg_count=InfiniteFunctionArgs,
|
|
535
538
|
),
|
|
536
539
|
FunctionType.MULTIPLY: FunctionConfig(
|
|
@@ -541,7 +544,6 @@ FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
|
|
|
541
544
|
DataType.NUMERIC,
|
|
542
545
|
},
|
|
543
546
|
output_purpose=Purpose.PROPERTY,
|
|
544
|
-
output_type=DataType.INTEGER,
|
|
545
547
|
arg_count=InfiniteFunctionArgs,
|
|
546
548
|
),
|
|
547
549
|
FunctionType.DIVIDE: FunctionConfig(
|
|
@@ -552,7 +554,6 @@ FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
|
|
|
552
554
|
DataType.NUMERIC,
|
|
553
555
|
},
|
|
554
556
|
output_purpose=Purpose.PROPERTY,
|
|
555
|
-
output_type=DataType.INTEGER,
|
|
556
557
|
arg_count=InfiniteFunctionArgs,
|
|
557
558
|
),
|
|
558
559
|
FunctionType.MOD: FunctionConfig(
|
|
@@ -570,7 +571,7 @@ FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
|
|
|
570
571
|
{DataType.INTEGER},
|
|
571
572
|
],
|
|
572
573
|
output_purpose=Purpose.PROPERTY,
|
|
573
|
-
|
|
574
|
+
output_type_function=lambda args: get_output_type_at_index(args, 0),
|
|
574
575
|
arg_count=2,
|
|
575
576
|
),
|
|
576
577
|
FunctionType.CUSTOM: FunctionConfig(
|
|
@@ -621,13 +622,11 @@ FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
|
|
|
621
622
|
FunctionType.SUM: FunctionConfig(
|
|
622
623
|
valid_inputs={DataType.INTEGER, DataType.FLOAT, DataType.NUMBER},
|
|
623
624
|
output_purpose=Purpose.METRIC,
|
|
624
|
-
output_type=DataType.INTEGER,
|
|
625
625
|
arg_count=1,
|
|
626
626
|
),
|
|
627
627
|
FunctionType.AVG: FunctionConfig(
|
|
628
628
|
valid_inputs={DataType.INTEGER, DataType.FLOAT, DataType.NUMBER},
|
|
629
629
|
output_purpose=Purpose.METRIC,
|
|
630
|
-
output_type=DataType.INTEGER,
|
|
631
630
|
arg_count=1,
|
|
632
631
|
),
|
|
633
632
|
FunctionType.UNIX_TO_TIMESTAMP: FunctionConfig(
|
|
@@ -641,7 +640,7 @@ FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
|
|
|
641
640
|
EXCLUDED_FUNCTIONS = {
|
|
642
641
|
FunctionType.CUSTOM,
|
|
643
642
|
FunctionType.ALIAS,
|
|
644
|
-
FunctionType.PARENTHETICAL,
|
|
643
|
+
# FunctionType.PARENTHETICAL,
|
|
645
644
|
# Temporary
|
|
646
645
|
FunctionType.DATE_LITERAL,
|
|
647
646
|
FunctionType.DATETIME_LITERAL,
|
|
@@ -686,8 +685,10 @@ class FunctionFactory:
|
|
|
686
685
|
full_args = []
|
|
687
686
|
final_output_type: CONCRETE_TYPES
|
|
688
687
|
if config.output_type_function:
|
|
688
|
+
|
|
689
689
|
final_output_type = config.output_type_function(full_args)
|
|
690
690
|
elif not base_output_type:
|
|
691
|
+
|
|
691
692
|
final_output_type = merge_datatypes([arg_to_datatype(x) for x in full_args])
|
|
692
693
|
elif base_output_type:
|
|
693
694
|
final_output_type = base_output_type
|
|
@@ -715,7 +716,7 @@ def create_function_derived_concept(
|
|
|
715
716
|
operator: FunctionType,
|
|
716
717
|
arguments: list[Concept],
|
|
717
718
|
output_type: Optional[
|
|
718
|
-
DataType | ListType | StructType | MapType | NumericType
|
|
719
|
+
DataType | ListType | StructType | MapType | NumericType | TraitDataType
|
|
719
720
|
] = None,
|
|
720
721
|
output_purpose: Optional[Purpose] = None,
|
|
721
722
|
) -> Concept:
|
|
@@ -1370,7 +1370,7 @@ class WindowItem(DataTyped, ConceptArgs, Mergeable, Namespaced, BaseModel):
|
|
|
1370
1370
|
|
|
1371
1371
|
|
|
1372
1372
|
def get_basic_type(
|
|
1373
|
-
type: DataType | ListType | StructType | MapType | NumericType,
|
|
1373
|
+
type: DataType | ListType | StructType | MapType | NumericType | TraitDataType,
|
|
1374
1374
|
) -> DataType:
|
|
1375
1375
|
if isinstance(type, ListType):
|
|
1376
1376
|
return DataType.LIST
|
|
@@ -1380,6 +1380,8 @@ def get_basic_type(
|
|
|
1380
1380
|
return DataType.MAP
|
|
1381
1381
|
if isinstance(type, NumericType):
|
|
1382
1382
|
return DataType.NUMERIC
|
|
1383
|
+
if isinstance(type, TraitDataType):
|
|
1384
|
+
return type.type
|
|
1383
1385
|
return type
|
|
1384
1386
|
|
|
1385
1387
|
|
|
@@ -1531,7 +1533,9 @@ def get_concept_arguments(expr) -> List["ConceptRef"]:
|
|
|
1531
1533
|
class Function(DataTyped, ConceptArgs, Mergeable, Namespaced, BaseModel):
|
|
1532
1534
|
operator: FunctionType
|
|
1533
1535
|
arg_count: int = Field(default=1)
|
|
1534
|
-
output_datatype:
|
|
1536
|
+
output_datatype: (
|
|
1537
|
+
DataType | ListType | StructType | MapType | NumericType | TraitDataType
|
|
1538
|
+
)
|
|
1535
1539
|
output_purpose: Purpose
|
|
1536
1540
|
valid_inputs: Optional[
|
|
1537
1541
|
Union[
|
|
@@ -1629,23 +1633,30 @@ class Function(DataTyped, ConceptArgs, Mergeable, Namespaced, BaseModel):
|
|
|
1629
1633
|
return v
|
|
1630
1634
|
|
|
1631
1635
|
def with_reference_replacement(self, source: str, target: Expr):
|
|
1636
|
+
from trilogy.core.functions import arg_to_datatype, merge_datatypes
|
|
1637
|
+
|
|
1638
|
+
nargs = [
|
|
1639
|
+
(
|
|
1640
|
+
c.with_reference_replacement(
|
|
1641
|
+
source,
|
|
1642
|
+
target,
|
|
1643
|
+
)
|
|
1644
|
+
if isinstance(
|
|
1645
|
+
c,
|
|
1646
|
+
Mergeable,
|
|
1647
|
+
)
|
|
1648
|
+
else c
|
|
1649
|
+
)
|
|
1650
|
+
for c in self.arguments
|
|
1651
|
+
]
|
|
1652
|
+
if self.output_datatype == DataType.UNKNOWN:
|
|
1653
|
+
new_output = merge_datatypes([arg_to_datatype(x) for x in nargs])
|
|
1654
|
+
else:
|
|
1655
|
+
new_output = self.output_datatype
|
|
1632
1656
|
return Function.model_construct(
|
|
1633
1657
|
operator=self.operator,
|
|
1634
|
-
arguments=
|
|
1635
|
-
|
|
1636
|
-
c.with_reference_replacement(
|
|
1637
|
-
source,
|
|
1638
|
-
target,
|
|
1639
|
-
)
|
|
1640
|
-
if isinstance(
|
|
1641
|
-
c,
|
|
1642
|
-
Mergeable,
|
|
1643
|
-
)
|
|
1644
|
-
else c
|
|
1645
|
-
)
|
|
1646
|
-
for c in self.arguments
|
|
1647
|
-
],
|
|
1648
|
-
output_datatype=self.output_datatype,
|
|
1658
|
+
arguments=nargs,
|
|
1659
|
+
output_datatype=new_output,
|
|
1649
1660
|
output_purpose=self.output_purpose,
|
|
1650
1661
|
valid_inputs=self.valid_inputs,
|
|
1651
1662
|
arg_count=self.arg_count,
|
|
@@ -1777,7 +1788,7 @@ class AggregateWrapper(Mergeable, DataTyped, ConceptArgs, Namespaced, BaseModel)
|
|
|
1777
1788
|
)
|
|
1778
1789
|
|
|
1779
1790
|
|
|
1780
|
-
class FilterItem(Namespaced, ConceptArgs, BaseModel):
|
|
1791
|
+
class FilterItem(DataTyped, Namespaced, ConceptArgs, BaseModel):
|
|
1781
1792
|
content: ConceptRef
|
|
1782
1793
|
where: "WhereClause"
|
|
1783
1794
|
|
|
@@ -1837,7 +1848,7 @@ class RowsetLineage(Namespaced, Mergeable, BaseModel):
|
|
|
1837
1848
|
)
|
|
1838
1849
|
|
|
1839
1850
|
|
|
1840
|
-
class RowsetItem(Mergeable, ConceptArgs, Namespaced, BaseModel):
|
|
1851
|
+
class RowsetItem(Mergeable, DataTyped, ConceptArgs, Namespaced, BaseModel):
|
|
1841
1852
|
content: ConceptRef
|
|
1842
1853
|
rowset: RowsetLineage
|
|
1843
1854
|
|
|
@@ -51,7 +51,13 @@ class Addressable(ABC):
|
|
|
51
51
|
|
|
52
52
|
|
|
53
53
|
TYPEDEF_TYPES = Union[
|
|
54
|
-
"DataType",
|
|
54
|
+
"DataType",
|
|
55
|
+
"MapType",
|
|
56
|
+
"ListType",
|
|
57
|
+
"NumericType",
|
|
58
|
+
"StructType",
|
|
59
|
+
"DataTyped",
|
|
60
|
+
"TraitDataType",
|
|
55
61
|
]
|
|
56
62
|
|
|
57
63
|
CONCRETE_TYPES = Union[
|
|
@@ -60,6 +66,7 @@ CONCRETE_TYPES = Union[
|
|
|
60
66
|
"ListType",
|
|
61
67
|
"NumericType",
|
|
62
68
|
"StructType",
|
|
69
|
+
"TraitDataType",
|
|
63
70
|
]
|
|
64
71
|
|
|
65
72
|
KT = TypeVar("KT")
|
|
@@ -101,6 +108,16 @@ class TraitDataType(BaseModel):
|
|
|
101
108
|
type: DataType
|
|
102
109
|
traits: list[str]
|
|
103
110
|
|
|
111
|
+
def __hash__(self):
|
|
112
|
+
return hash(self.type)
|
|
113
|
+
|
|
114
|
+
def __eq__(self, other):
|
|
115
|
+
if isinstance(other, DataType):
|
|
116
|
+
return self.type == other
|
|
117
|
+
elif isinstance(other, TraitDataType):
|
|
118
|
+
return self.type == other.type and self.traits == other.traits
|
|
119
|
+
return False
|
|
120
|
+
|
|
104
121
|
@property
|
|
105
122
|
def data_type(self):
|
|
106
123
|
return self.type
|
|
@@ -312,8 +329,10 @@ def dict_to_map_wrapper(arg):
|
|
|
312
329
|
|
|
313
330
|
|
|
314
331
|
def merge_datatypes(
|
|
315
|
-
inputs: list[
|
|
316
|
-
|
|
332
|
+
inputs: list[
|
|
333
|
+
DataType | ListType | StructType | MapType | NumericType | TraitDataType
|
|
334
|
+
],
|
|
335
|
+
) -> DataType | ListType | StructType | MapType | NumericType | TraitDataType:
|
|
317
336
|
"""This is a temporary hack for doing between
|
|
318
337
|
allowable datatype transformation matrix"""
|
|
319
338
|
if len(inputs) == 1:
|
|
@@ -348,6 +367,7 @@ def is_compatible_datatype(left, right):
|
|
|
348
367
|
|
|
349
368
|
|
|
350
369
|
def arg_to_datatype(arg) -> CONCRETE_TYPES:
|
|
370
|
+
|
|
351
371
|
if isinstance(arg, MagicConstants):
|
|
352
372
|
if arg == MagicConstants.NULL:
|
|
353
373
|
return DataType.NULL
|
|
@@ -395,6 +395,7 @@ def generate_node(
|
|
|
395
395
|
environment=environment,
|
|
396
396
|
parents=[],
|
|
397
397
|
depth=depth + 1,
|
|
398
|
+
preexisting_conditions=conditions.conditional if conditions else None,
|
|
398
399
|
)
|
|
399
400
|
elif concept.derivation == Derivation.BASIC:
|
|
400
401
|
# this is special case handling for group bys
|
|
@@ -786,7 +787,6 @@ def _search_concepts(
|
|
|
786
787
|
)
|
|
787
788
|
# if anything we need to get is in the filter set and it's a computed value
|
|
788
789
|
# we need to get _everything_ in this loop
|
|
789
|
-
logger.info(f"{[x.address for x in conditions.row_arguments]}")
|
|
790
790
|
if any(
|
|
791
791
|
[
|
|
792
792
|
x.derivation not in (Derivation.ROOT, Derivation.CONSTANT)
|
|
@@ -405,6 +405,7 @@ def create_select_node(
|
|
|
405
405
|
# no partial for constants
|
|
406
406
|
partial_concepts=[],
|
|
407
407
|
force_group=False,
|
|
408
|
+
preexisting_conditions=conditions.conditional if conditions else None,
|
|
408
409
|
)
|
|
409
410
|
|
|
410
411
|
datasource: dict[str, BuildDatasource | list[BuildDatasource]] = (
|
|
@@ -491,6 +492,7 @@ def gen_select_merge_node(
|
|
|
491
492
|
depth=depth,
|
|
492
493
|
partial_concepts=[],
|
|
493
494
|
force_group=False,
|
|
495
|
+
preexisting_conditions=conditions.conditional if conditions else None,
|
|
494
496
|
)
|
|
495
497
|
for attempt in [False, True]:
|
|
496
498
|
pruned_concept_graph = create_pruned_concept_graph(
|
|
@@ -542,6 +544,7 @@ def gen_select_merge_node(
|
|
|
542
544
|
depth=depth,
|
|
543
545
|
partial_concepts=[],
|
|
544
546
|
force_group=False,
|
|
547
|
+
preexisting_conditions=conditions.conditional if conditions else None,
|
|
545
548
|
)
|
|
546
549
|
)
|
|
547
550
|
|
|
@@ -134,7 +134,6 @@ class SelectStatement(HasUUID, SelectTypeMixin, BaseModel):
|
|
|
134
134
|
output.grain = output.calculate_grain(environment)
|
|
135
135
|
|
|
136
136
|
for x in selection:
|
|
137
|
-
|
|
138
137
|
if x.is_undefined and environment.concepts.fail_on_missing:
|
|
139
138
|
environment.concepts.raise_undefined(
|
|
140
139
|
x.concept.address, meta.line_number if meta else None
|
|
@@ -158,7 +157,6 @@ class SelectStatement(HasUUID, SelectTypeMixin, BaseModel):
|
|
|
158
157
|
output.local_concepts[x.content.address] = environment.concepts[
|
|
159
158
|
x.content.address
|
|
160
159
|
] # .set_select_grain(output.grain, environment)
|
|
161
|
-
|
|
162
160
|
output.validate_syntax(environment)
|
|
163
161
|
return output
|
|
164
162
|
|
|
@@ -100,6 +100,8 @@ def process_function_arg(
|
|
|
100
100
|
concept.metadata.line_number = meta.line
|
|
101
101
|
environment.add_concept(concept, meta=meta)
|
|
102
102
|
return concept
|
|
103
|
+
elif isinstance(arg, ConceptRef):
|
|
104
|
+
return environment.concepts[arg.address]
|
|
103
105
|
return arg
|
|
104
106
|
|
|
105
107
|
|
|
@@ -192,6 +194,8 @@ def concept_is_relevant(
|
|
|
192
194
|
) -> bool:
|
|
193
195
|
if isinstance(concept, UndefinedConcept):
|
|
194
196
|
|
|
197
|
+
return False
|
|
198
|
+
if concept.datatype == DataType.UNKNOWN:
|
|
195
199
|
return False
|
|
196
200
|
if isinstance(concept, ConceptRef):
|
|
197
201
|
if environment:
|
|
@@ -4,7 +4,7 @@ from enum import Enum
|
|
|
4
4
|
from os.path import dirname, join
|
|
5
5
|
from pathlib import Path
|
|
6
6
|
from re import IGNORECASE
|
|
7
|
-
from typing import List, Optional, Tuple, Union
|
|
7
|
+
from typing import Any, List, Optional, Tuple, Union
|
|
8
8
|
|
|
9
9
|
from lark import Lark, ParseTree, Transformer, Tree, v_args
|
|
10
10
|
from lark.exceptions import (
|
|
@@ -68,6 +68,7 @@ from trilogy.core.models.author import (
|
|
|
68
68
|
OrderBy,
|
|
69
69
|
OrderItem,
|
|
70
70
|
Parenthetical,
|
|
71
|
+
RowsetItem,
|
|
71
72
|
SubselectComparison,
|
|
72
73
|
WhereClause,
|
|
73
74
|
Window,
|
|
@@ -77,6 +78,7 @@ from trilogy.core.models.author import (
|
|
|
77
78
|
)
|
|
78
79
|
from trilogy.core.models.core import (
|
|
79
80
|
DataType,
|
|
81
|
+
DataTyped,
|
|
80
82
|
ListType,
|
|
81
83
|
ListWrapper,
|
|
82
84
|
MapType,
|
|
@@ -223,6 +225,52 @@ def unwrap_transformation(
|
|
|
223
225
|
)
|
|
224
226
|
|
|
225
227
|
|
|
228
|
+
def rehydrate_lineage(
|
|
229
|
+
lineage: Any, environment: Environment, function_factory: FunctionFactory
|
|
230
|
+
) -> Any:
|
|
231
|
+
"""Fix datatype propagation. This is a hack to fix the fact that we don't know the datatypes of functions until we've parsed all concepts"""
|
|
232
|
+
if isinstance(lineage, Function):
|
|
233
|
+
rehydrated = [
|
|
234
|
+
rehydrate_lineage(x, environment, function_factory)
|
|
235
|
+
for x in lineage.arguments
|
|
236
|
+
]
|
|
237
|
+
return function_factory.create_function(
|
|
238
|
+
rehydrated,
|
|
239
|
+
operator=lineage.operator,
|
|
240
|
+
)
|
|
241
|
+
elif isinstance(lineage, Parenthetical):
|
|
242
|
+
lineage.content = rehydrate_lineage(
|
|
243
|
+
lineage.content, environment, function_factory
|
|
244
|
+
)
|
|
245
|
+
return lineage
|
|
246
|
+
elif isinstance(lineage, WindowItem):
|
|
247
|
+
lineage.content.datatype = environment.concepts[
|
|
248
|
+
lineage.content.address
|
|
249
|
+
].datatype
|
|
250
|
+
return lineage
|
|
251
|
+
elif isinstance(lineage, AggregateWrapper):
|
|
252
|
+
lineage.function = rehydrate_lineage(
|
|
253
|
+
lineage.function, environment, function_factory
|
|
254
|
+
)
|
|
255
|
+
return lineage
|
|
256
|
+
elif isinstance(lineage, RowsetItem):
|
|
257
|
+
lineage.content.datatype = environment.concepts[
|
|
258
|
+
lineage.content.address
|
|
259
|
+
].datatype
|
|
260
|
+
return lineage
|
|
261
|
+
else:
|
|
262
|
+
return lineage
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
def rehydrate_concept_lineage(
|
|
266
|
+
concept: Concept, environment: Environment, function_factory: FunctionFactory
|
|
267
|
+
) -> Concept:
|
|
268
|
+
concept.lineage = rehydrate_lineage(concept.lineage, environment, function_factory)
|
|
269
|
+
if isinstance(concept.lineage, DataTyped):
|
|
270
|
+
concept.datatype = concept.lineage.output_datatype
|
|
271
|
+
return concept
|
|
272
|
+
|
|
273
|
+
|
|
226
274
|
class ParseToObjects(Transformer):
|
|
227
275
|
def __init__(
|
|
228
276
|
self,
|
|
@@ -275,6 +323,25 @@ class ParseToObjects(Transformer):
|
|
|
275
323
|
v.run_second_parse_pass()
|
|
276
324
|
reparsed = self.transform(self.tokens[self.token_address])
|
|
277
325
|
self.environment.concepts.undefined = {}
|
|
326
|
+
passed = False
|
|
327
|
+
passes = 0
|
|
328
|
+
# output datatypes for functions may have been wrong
|
|
329
|
+
# as they were derived from not fully understood upstream types
|
|
330
|
+
# so loop through to recreate function lineage until all datatypes are known
|
|
331
|
+
|
|
332
|
+
while not passed:
|
|
333
|
+
new_passed = True
|
|
334
|
+
for x, y in self.environment.concepts.items():
|
|
335
|
+
if y.datatype == DataType.UNKNOWN and y.lineage:
|
|
336
|
+
self.environment.concepts[x] = rehydrate_concept_lineage(
|
|
337
|
+
y, self.environment, self.function_factory
|
|
338
|
+
)
|
|
339
|
+
new_passed = False
|
|
340
|
+
passes += 1
|
|
341
|
+
if passes > MAX_PARSE_DEPTH:
|
|
342
|
+
break
|
|
343
|
+
passed = new_passed
|
|
344
|
+
|
|
278
345
|
return reparsed
|
|
279
346
|
|
|
280
347
|
def start(self, args):
|
|
@@ -1075,7 +1142,7 @@ class ParseToObjects(Transformer):
|
|
|
1075
1142
|
if not select_items:
|
|
1076
1143
|
raise ValueError("Malformed select, missing select items")
|
|
1077
1144
|
|
|
1078
|
-
|
|
1145
|
+
base = SelectStatement.from_inputs(
|
|
1079
1146
|
environment=self.environment,
|
|
1080
1147
|
selection=select_items,
|
|
1081
1148
|
order_by=order_by,
|
|
@@ -1084,6 +1151,7 @@ class ParseToObjects(Transformer):
|
|
|
1084
1151
|
limit=limit,
|
|
1085
1152
|
meta=Metadata(line_number=meta.line),
|
|
1086
1153
|
)
|
|
1154
|
+
return base
|
|
1087
1155
|
|
|
1088
1156
|
@v_args(meta=True)
|
|
1089
1157
|
def address(self, meta: Meta, args):
|
|
@@ -1,42 +0,0 @@
|
|
|
1
|
-
from trilogy import Dialects
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
def test_typing():
|
|
5
|
-
env = Dialects.DUCK_DB.default_executor()
|
|
6
|
-
env.environment.parse(
|
|
7
|
-
"""
|
|
8
|
-
|
|
9
|
-
type email string;
|
|
10
|
-
|
|
11
|
-
key customer_id int;
|
|
12
|
-
property customer_id.email string::email;
|
|
13
|
-
|
|
14
|
-
def is_valid_email(email) -> contains(email, '@');
|
|
15
|
-
|
|
16
|
-
datasource customers (
|
|
17
|
-
id:customer_id,
|
|
18
|
-
email: email
|
|
19
|
-
)
|
|
20
|
-
grain (customer_id)
|
|
21
|
-
query '''
|
|
22
|
-
select 1 as id, 'bright@gmail.com' as email
|
|
23
|
-
union all
|
|
24
|
-
select 2 as id, 'funky@hotmail.com' as email
|
|
25
|
-
''';
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
"""
|
|
29
|
-
)
|
|
30
|
-
|
|
31
|
-
results = env.execute_query(
|
|
32
|
-
"""SELECT
|
|
33
|
-
customer_id,
|
|
34
|
-
@is_valid_email(email)->valid;"""
|
|
35
|
-
)
|
|
36
|
-
|
|
37
|
-
for row in results.fetchall():
|
|
38
|
-
assert row.valid is True
|
|
39
|
-
|
|
40
|
-
assert "email" in env.environment.data_types
|
|
41
|
-
|
|
42
|
-
assert env.environment.concepts["email"].datatype.traits == ["email"]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/__init__.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/basic_node.py
RENAMED
|
File without changes
|
|
File without changes
|
{pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/filter_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/group_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/group_to_node.py
RENAMED
|
File without changes
|
|
File without changes
|
{pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/node_merge_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/rowset_node.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/select_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/synonym_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/union_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/unnest_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.3.15 → pytrilogy-0.0.3.17}/trilogy/core/processing/node_generators/window_node.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|