pytrilogy 0.0.1.102__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pytrilogy might be problematic. Click here for more details.
- pytrilogy-0.0.1.102.dist-info/LICENSE.md +19 -0
- pytrilogy-0.0.1.102.dist-info/METADATA +277 -0
- pytrilogy-0.0.1.102.dist-info/RECORD +77 -0
- pytrilogy-0.0.1.102.dist-info/WHEEL +5 -0
- pytrilogy-0.0.1.102.dist-info/entry_points.txt +2 -0
- pytrilogy-0.0.1.102.dist-info/top_level.txt +1 -0
- trilogy/__init__.py +8 -0
- trilogy/compiler.py +0 -0
- trilogy/constants.py +30 -0
- trilogy/core/__init__.py +0 -0
- trilogy/core/constants.py +3 -0
- trilogy/core/enums.py +270 -0
- trilogy/core/env_processor.py +33 -0
- trilogy/core/environment_helpers.py +156 -0
- trilogy/core/ergonomics.py +187 -0
- trilogy/core/exceptions.py +23 -0
- trilogy/core/functions.py +320 -0
- trilogy/core/graph_models.py +55 -0
- trilogy/core/internal.py +37 -0
- trilogy/core/models.py +3145 -0
- trilogy/core/processing/__init__.py +0 -0
- trilogy/core/processing/concept_strategies_v3.py +603 -0
- trilogy/core/processing/graph_utils.py +44 -0
- trilogy/core/processing/node_generators/__init__.py +25 -0
- trilogy/core/processing/node_generators/basic_node.py +71 -0
- trilogy/core/processing/node_generators/common.py +239 -0
- trilogy/core/processing/node_generators/concept_merge.py +152 -0
- trilogy/core/processing/node_generators/filter_node.py +83 -0
- trilogy/core/processing/node_generators/group_node.py +92 -0
- trilogy/core/processing/node_generators/group_to_node.py +99 -0
- trilogy/core/processing/node_generators/merge_node.py +148 -0
- trilogy/core/processing/node_generators/multiselect_node.py +189 -0
- trilogy/core/processing/node_generators/rowset_node.py +130 -0
- trilogy/core/processing/node_generators/select_node.py +328 -0
- trilogy/core/processing/node_generators/unnest_node.py +37 -0
- trilogy/core/processing/node_generators/window_node.py +85 -0
- trilogy/core/processing/nodes/__init__.py +76 -0
- trilogy/core/processing/nodes/base_node.py +251 -0
- trilogy/core/processing/nodes/filter_node.py +49 -0
- trilogy/core/processing/nodes/group_node.py +110 -0
- trilogy/core/processing/nodes/merge_node.py +326 -0
- trilogy/core/processing/nodes/select_node_v2.py +198 -0
- trilogy/core/processing/nodes/unnest_node.py +54 -0
- trilogy/core/processing/nodes/window_node.py +34 -0
- trilogy/core/processing/utility.py +278 -0
- trilogy/core/query_processor.py +331 -0
- trilogy/dialect/__init__.py +0 -0
- trilogy/dialect/base.py +679 -0
- trilogy/dialect/bigquery.py +80 -0
- trilogy/dialect/common.py +43 -0
- trilogy/dialect/config.py +55 -0
- trilogy/dialect/duckdb.py +83 -0
- trilogy/dialect/enums.py +95 -0
- trilogy/dialect/postgres.py +86 -0
- trilogy/dialect/presto.py +82 -0
- trilogy/dialect/snowflake.py +82 -0
- trilogy/dialect/sql_server.py +89 -0
- trilogy/docs/__init__.py +0 -0
- trilogy/engine.py +48 -0
- trilogy/executor.py +242 -0
- trilogy/hooks/__init__.py +0 -0
- trilogy/hooks/base_hook.py +37 -0
- trilogy/hooks/graph_hook.py +24 -0
- trilogy/hooks/query_debugger.py +133 -0
- trilogy/metadata/__init__.py +0 -0
- trilogy/parser.py +10 -0
- trilogy/parsing/__init__.py +0 -0
- trilogy/parsing/common.py +176 -0
- trilogy/parsing/config.py +5 -0
- trilogy/parsing/exceptions.py +2 -0
- trilogy/parsing/helpers.py +1 -0
- trilogy/parsing/parse_engine.py +1951 -0
- trilogy/parsing/render.py +483 -0
- trilogy/py.typed +0 -0
- trilogy/scripts/__init__.py +0 -0
- trilogy/scripts/trilogy.py +127 -0
- trilogy/utility.py +31 -0
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
from typing import Mapping, Callable, Any
|
|
2
|
+
|
|
3
|
+
from jinja2 import Template
|
|
4
|
+
from trilogy.utility import string_to_hash
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
from trilogy.core.enums import FunctionType, WindowType
|
|
8
|
+
from trilogy.core.models import (
|
|
9
|
+
ProcessedQuery,
|
|
10
|
+
ProcessedQueryPersist,
|
|
11
|
+
ProcessedShowStatement,
|
|
12
|
+
)
|
|
13
|
+
from trilogy.dialect.base import BaseDialect
|
|
14
|
+
|
|
15
|
+
WINDOW_FUNCTION_MAP: Mapping[WindowType, Callable[[Any, Any, Any], str]] = {}
|
|
16
|
+
|
|
17
|
+
FUNCTION_MAP = {
|
|
18
|
+
FunctionType.COUNT: lambda args: f"count({args[0]})",
|
|
19
|
+
FunctionType.SUM: lambda args: f"sum({args[0]})",
|
|
20
|
+
FunctionType.AVG: lambda args: f"avg({args[0]})",
|
|
21
|
+
FunctionType.LENGTH: lambda args: f"length({args[0]})",
|
|
22
|
+
FunctionType.LIKE: lambda args: (
|
|
23
|
+
f" CASE WHEN {args[0]} like {args[1]} THEN True ELSE False END"
|
|
24
|
+
),
|
|
25
|
+
FunctionType.CONCAT: lambda args: (
|
|
26
|
+
f"CONCAT({','.join([f''' '{a}' ''' for a in args])})"
|
|
27
|
+
),
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
# if an aggregate function is called on a source that is at the same grain as the aggregate
|
|
31
|
+
# we may return a static value
|
|
32
|
+
FUNCTION_GRAIN_MATCH_MAP = {
|
|
33
|
+
**FUNCTION_MAP,
|
|
34
|
+
FunctionType.COUNT: lambda args: "1",
|
|
35
|
+
FunctionType.SUM: lambda args: f"{args[0]}",
|
|
36
|
+
FunctionType.AVG: lambda args: f"{args[0]}",
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
TSQL_TEMPLATE = Template(
|
|
40
|
+
"""{%- if ctes %}
|
|
41
|
+
WITH {% for cte in ctes %}
|
|
42
|
+
{{cte.name}} as ({{cte.statement}}){% if not loop.last %},{% endif %}{% endfor %}{% endif %}
|
|
43
|
+
SELECT
|
|
44
|
+
{%- if limit is not none %}
|
|
45
|
+
TOP {{ limit }}{% endif %}
|
|
46
|
+
{%- for select in select_columns %}
|
|
47
|
+
{{ select }}{% if not loop.last %},{% endif %}{% endfor %}
|
|
48
|
+
{% if base %}FROM
|
|
49
|
+
{{ base }}{% endif %}{% if joins %}
|
|
50
|
+
{% for join in joins %}
|
|
51
|
+
{{ join }}
|
|
52
|
+
{% endfor %}{% endif %}
|
|
53
|
+
{% if where %}WHERE
|
|
54
|
+
{{ where }}
|
|
55
|
+
{% endif %}
|
|
56
|
+
{%- if group_by %}
|
|
57
|
+
GROUP BY {% for group in group_by %}
|
|
58
|
+
{{group}}{% if not loop.last %},{% endif %}
|
|
59
|
+
{% endfor %}{% endif %}
|
|
60
|
+
{%- if order_by %}
|
|
61
|
+
ORDER BY {% for order in order_by %}
|
|
62
|
+
{{ order }}{% if not loop.last %},{% endif %}
|
|
63
|
+
{% endfor %}{% endif %}
|
|
64
|
+
"""
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
MAX_IDENTIFIER_LENGTH = 128
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class SqlServerDialect(BaseDialect):
|
|
71
|
+
WINDOW_FUNCTION_MAP = {**BaseDialect.WINDOW_FUNCTION_MAP, **WINDOW_FUNCTION_MAP}
|
|
72
|
+
FUNCTION_MAP = {**BaseDialect.FUNCTION_MAP, **FUNCTION_MAP}
|
|
73
|
+
FUNCTION_GRAIN_MATCH_MAP = {
|
|
74
|
+
**BaseDialect.FUNCTION_GRAIN_MATCH_MAP,
|
|
75
|
+
**FUNCTION_GRAIN_MATCH_MAP,
|
|
76
|
+
}
|
|
77
|
+
QUOTE_CHARACTER = '"'
|
|
78
|
+
SQL_TEMPLATE = TSQL_TEMPLATE
|
|
79
|
+
|
|
80
|
+
def compile_statement(
|
|
81
|
+
self, query: ProcessedQuery | ProcessedQueryPersist | ProcessedShowStatement
|
|
82
|
+
) -> str:
|
|
83
|
+
base = super().compile_statement(query)
|
|
84
|
+
if isinstance(base, (ProcessedQuery, ProcessedQueryPersist)):
|
|
85
|
+
for cte in query.ctes:
|
|
86
|
+
if len(cte.name) > MAX_IDENTIFIER_LENGTH:
|
|
87
|
+
new_name = f"rhash_{string_to_hash(cte.name)}"
|
|
88
|
+
base = base.replace(cte.name, new_name)
|
|
89
|
+
return base
|
trilogy/docs/__init__.py
ADDED
|
File without changes
|
trilogy/engine.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
from sqlalchemy.engine import Engine, Connection, CursorResult
|
|
2
|
+
from typing import Protocol
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class EngineResult(Protocol):
|
|
6
|
+
pass
|
|
7
|
+
|
|
8
|
+
def fetchall(self) -> list[tuple]:
|
|
9
|
+
pass
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class EngineConnection(Protocol):
|
|
13
|
+
pass
|
|
14
|
+
|
|
15
|
+
def execute(self, statement: str) -> EngineResult:
|
|
16
|
+
pass
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ExecutionEngine(Protocol):
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
def connect(self) -> EngineConnection:
|
|
23
|
+
pass
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
### Begin default SQLAlchemy implementation
|
|
27
|
+
class SqlAlchemyResult(EngineResult):
|
|
28
|
+
def __init__(self, result: CursorResult):
|
|
29
|
+
self.result = result
|
|
30
|
+
|
|
31
|
+
def fetchall(self):
|
|
32
|
+
return self.result.fetchall()
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class SqlAlchemyConnection(EngineConnection):
|
|
36
|
+
def __init__(self, connection: Connection):
|
|
37
|
+
self.connection = connection
|
|
38
|
+
|
|
39
|
+
def execute(self, statement: str) -> SqlAlchemyResult:
|
|
40
|
+
return SqlAlchemyResult(self.connection.execute(statement))
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class SqlAlchemyEngine(ExecutionEngine):
|
|
44
|
+
def __init__(self, engine: Engine):
|
|
45
|
+
self.engine = engine
|
|
46
|
+
|
|
47
|
+
def connect(self) -> SqlAlchemyConnection:
|
|
48
|
+
return SqlAlchemyConnection(self.engine.connect())
|
trilogy/executor.py
ADDED
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
from typing import List, Optional, Any
|
|
2
|
+
from functools import singledispatchmethod
|
|
3
|
+
from sqlalchemy import text
|
|
4
|
+
from sqlalchemy.engine import Engine, CursorResult
|
|
5
|
+
|
|
6
|
+
from trilogy.constants import logger
|
|
7
|
+
from trilogy.core.models import (
|
|
8
|
+
Environment,
|
|
9
|
+
ProcessedQuery,
|
|
10
|
+
ProcessedShowStatement,
|
|
11
|
+
ProcessedQueryPersist,
|
|
12
|
+
MultiSelectStatement,
|
|
13
|
+
SelectStatement,
|
|
14
|
+
PersistStatement,
|
|
15
|
+
ShowStatement,
|
|
16
|
+
Concept,
|
|
17
|
+
)
|
|
18
|
+
from trilogy.dialect.base import BaseDialect
|
|
19
|
+
from trilogy.dialect.enums import Dialects
|
|
20
|
+
from trilogy.parser import parse_text
|
|
21
|
+
from trilogy.hooks.base_hook import BaseHook
|
|
22
|
+
|
|
23
|
+
from dataclasses import dataclass
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@dataclass
|
|
27
|
+
class MockResult:
|
|
28
|
+
values: list[Any]
|
|
29
|
+
columns: list[str]
|
|
30
|
+
|
|
31
|
+
def fetchall(self):
|
|
32
|
+
return self.values
|
|
33
|
+
|
|
34
|
+
def keys(self):
|
|
35
|
+
return self.columns
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def generate_result_set(columns: List[Concept], output_data: list[Any]) -> MockResult:
|
|
39
|
+
names = [x.address.replace(".", "_") for x in columns]
|
|
40
|
+
return MockResult(
|
|
41
|
+
values=[dict(zip(names, [row])) for row in output_data], columns=names
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class Executor(object):
|
|
46
|
+
def __init__(
|
|
47
|
+
self,
|
|
48
|
+
dialect: Dialects,
|
|
49
|
+
engine: Engine,
|
|
50
|
+
environment: Optional[Environment] = None,
|
|
51
|
+
hooks: List[BaseHook] | None = None,
|
|
52
|
+
):
|
|
53
|
+
self.dialect: Dialects = dialect
|
|
54
|
+
self.engine = engine
|
|
55
|
+
self.environment = environment or Environment()
|
|
56
|
+
self.generator: BaseDialect
|
|
57
|
+
self.logger = logger
|
|
58
|
+
self.hooks = hooks
|
|
59
|
+
if self.dialect == Dialects.BIGQUERY:
|
|
60
|
+
from trilogy.dialect.bigquery import BigqueryDialect
|
|
61
|
+
|
|
62
|
+
self.generator = BigqueryDialect()
|
|
63
|
+
elif self.dialect == Dialects.SQL_SERVER:
|
|
64
|
+
from trilogy.dialect.sql_server import SqlServerDialect
|
|
65
|
+
|
|
66
|
+
self.generator = SqlServerDialect()
|
|
67
|
+
elif self.dialect == Dialects.DUCK_DB:
|
|
68
|
+
from trilogy.dialect.duckdb import DuckDBDialect
|
|
69
|
+
|
|
70
|
+
self.generator = DuckDBDialect()
|
|
71
|
+
elif self.dialect == Dialects.PRESTO:
|
|
72
|
+
from trilogy.dialect.presto import PrestoDialect
|
|
73
|
+
|
|
74
|
+
self.generator = PrestoDialect()
|
|
75
|
+
elif self.dialect == Dialects.TRINO:
|
|
76
|
+
from trilogy.dialect.presto import TrinoDialect
|
|
77
|
+
|
|
78
|
+
self.generator = TrinoDialect()
|
|
79
|
+
elif self.dialect == Dialects.POSTGRES:
|
|
80
|
+
from trilogy.dialect.postgres import PostgresDialect
|
|
81
|
+
|
|
82
|
+
self.generator = PostgresDialect()
|
|
83
|
+
elif self.dialect == Dialects.SNOWFLAKE:
|
|
84
|
+
|
|
85
|
+
from trilogy.dialect.snowflake import SnowflakeDialect
|
|
86
|
+
|
|
87
|
+
self.generator = SnowflakeDialect()
|
|
88
|
+
else:
|
|
89
|
+
raise ValueError(f"Unsupported dialect {self.dialect}")
|
|
90
|
+
self.connection = self.engine.connect()
|
|
91
|
+
|
|
92
|
+
def execute_statement(self, statement) -> Optional[CursorResult]:
|
|
93
|
+
if not isinstance(statement, (ProcessedQuery, ProcessedQueryPersist)):
|
|
94
|
+
return None
|
|
95
|
+
return self.execute_query(statement)
|
|
96
|
+
|
|
97
|
+
@singledispatchmethod
|
|
98
|
+
def execute_query(self, query) -> CursorResult:
|
|
99
|
+
raise NotImplementedError("Cannot execute type {}".format(type(query)))
|
|
100
|
+
|
|
101
|
+
@execute_query.register
|
|
102
|
+
def _(self, query: SelectStatement | PersistStatement) -> CursorResult:
|
|
103
|
+
sql = self.generator.generate_queries(
|
|
104
|
+
self.environment, [query], hooks=self.hooks
|
|
105
|
+
)
|
|
106
|
+
return self.execute_query(sql[0])
|
|
107
|
+
|
|
108
|
+
@execute_query.register
|
|
109
|
+
def _(self, query: ProcessedShowStatement) -> CursorResult:
|
|
110
|
+
return generate_result_set(
|
|
111
|
+
query.output_columns,
|
|
112
|
+
[
|
|
113
|
+
self.generator.compile_statement(x)
|
|
114
|
+
for x in query.output_values
|
|
115
|
+
if isinstance(x, ProcessedQuery)
|
|
116
|
+
],
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
@execute_query.register
|
|
120
|
+
def _(self, query: ProcessedQuery | ProcessedQueryPersist) -> CursorResult:
|
|
121
|
+
sql = self.generator.compile_statement(query)
|
|
122
|
+
# connection = self.engine.connect()
|
|
123
|
+
output = self.connection.execute(text(sql))
|
|
124
|
+
if isinstance(query, ProcessedQueryPersist):
|
|
125
|
+
self.environment.add_datasource(query.datasource)
|
|
126
|
+
return output
|
|
127
|
+
|
|
128
|
+
@singledispatchmethod
|
|
129
|
+
def generate_sql(self, command: ProcessedQuery | str) -> list[str]:
|
|
130
|
+
raise NotImplementedError(
|
|
131
|
+
"Cannot generate sql for type {}".format(type(command))
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
@generate_sql.register # type: ignore
|
|
135
|
+
def _(self, command: ProcessedQuery) -> List[str]:
|
|
136
|
+
output = []
|
|
137
|
+
compiled_sql = self.generator.compile_statement(command)
|
|
138
|
+
output.append(compiled_sql)
|
|
139
|
+
return output
|
|
140
|
+
|
|
141
|
+
@generate_sql.register # type: ignore
|
|
142
|
+
def _(self, command: MultiSelectStatement) -> List[str]:
|
|
143
|
+
output = []
|
|
144
|
+
sql = self.generator.generate_queries(
|
|
145
|
+
self.environment, [command], hooks=self.hooks
|
|
146
|
+
)
|
|
147
|
+
for statement in sql:
|
|
148
|
+
compiled_sql = self.generator.compile_statement(statement)
|
|
149
|
+
output.append(compiled_sql)
|
|
150
|
+
|
|
151
|
+
output.append(compiled_sql)
|
|
152
|
+
return output
|
|
153
|
+
|
|
154
|
+
@generate_sql.register # type: ignore
|
|
155
|
+
def _(self, command: SelectStatement) -> List[str]:
|
|
156
|
+
output = []
|
|
157
|
+
sql = self.generator.generate_queries(
|
|
158
|
+
self.environment, [command], hooks=self.hooks
|
|
159
|
+
)
|
|
160
|
+
for statement in sql:
|
|
161
|
+
compiled_sql = self.generator.compile_statement(statement)
|
|
162
|
+
output.append(compiled_sql)
|
|
163
|
+
return output
|
|
164
|
+
|
|
165
|
+
@generate_sql.register # type: ignore
|
|
166
|
+
def _(self, command: str) -> List[str]:
|
|
167
|
+
"""generate SQL for execution"""
|
|
168
|
+
_, parsed = parse_text(command, self.environment)
|
|
169
|
+
generatable = [
|
|
170
|
+
x for x in parsed if isinstance(x, (SelectStatement, PersistStatement))
|
|
171
|
+
]
|
|
172
|
+
sql = self.generator.generate_queries(
|
|
173
|
+
self.environment, generatable, hooks=self.hooks
|
|
174
|
+
)
|
|
175
|
+
output = []
|
|
176
|
+
for statement in sql:
|
|
177
|
+
if isinstance(statement, ProcessedShowStatement):
|
|
178
|
+
continue
|
|
179
|
+
compiled_sql = self.generator.compile_statement(statement)
|
|
180
|
+
output.append(compiled_sql)
|
|
181
|
+
return output
|
|
182
|
+
|
|
183
|
+
def parse_text(
|
|
184
|
+
self, command: str, persist: bool = False
|
|
185
|
+
) -> List[ProcessedQuery | ProcessedQueryPersist | ProcessedShowStatement]:
|
|
186
|
+
"""Process a preql text command"""
|
|
187
|
+
_, parsed = parse_text(command, self.environment)
|
|
188
|
+
generatable = [
|
|
189
|
+
x
|
|
190
|
+
for x in parsed
|
|
191
|
+
if isinstance(
|
|
192
|
+
x,
|
|
193
|
+
(
|
|
194
|
+
SelectStatement,
|
|
195
|
+
PersistStatement,
|
|
196
|
+
MultiSelectStatement,
|
|
197
|
+
ShowStatement,
|
|
198
|
+
),
|
|
199
|
+
)
|
|
200
|
+
]
|
|
201
|
+
sql = []
|
|
202
|
+
while generatable:
|
|
203
|
+
t = generatable.pop(0)
|
|
204
|
+
x = self.generator.generate_queries(
|
|
205
|
+
self.environment, [t], hooks=self.hooks
|
|
206
|
+
)[0]
|
|
207
|
+
if persist and isinstance(x, ProcessedQueryPersist):
|
|
208
|
+
self.environment.add_datasource(x.datasource)
|
|
209
|
+
sql.append(x)
|
|
210
|
+
return sql
|
|
211
|
+
|
|
212
|
+
def execute_raw_sql(self, command: str) -> CursorResult:
|
|
213
|
+
"""Run a command against the raw underlying
|
|
214
|
+
execution engine"""
|
|
215
|
+
return self.connection.execute(text(command))
|
|
216
|
+
|
|
217
|
+
def execute_text(self, command: str) -> List[CursorResult]:
|
|
218
|
+
"""Run a preql text command"""
|
|
219
|
+
sql = self.parse_text(command)
|
|
220
|
+
output = []
|
|
221
|
+
# connection = self.engine.connect()
|
|
222
|
+
for statement in sql:
|
|
223
|
+
if isinstance(statement, ProcessedShowStatement):
|
|
224
|
+
output.append(
|
|
225
|
+
generate_result_set(
|
|
226
|
+
statement.output_columns,
|
|
227
|
+
[
|
|
228
|
+
self.generator.compile_statement(x)
|
|
229
|
+
for x in statement.output_values
|
|
230
|
+
if isinstance(x, ProcessedQuery)
|
|
231
|
+
],
|
|
232
|
+
)
|
|
233
|
+
)
|
|
234
|
+
continue
|
|
235
|
+
compiled_sql = self.generator.compile_statement(statement)
|
|
236
|
+
logger.debug(compiled_sql)
|
|
237
|
+
|
|
238
|
+
output.append(self.connection.execute(text(compiled_sql)))
|
|
239
|
+
# generalize post-run success hooks
|
|
240
|
+
if isinstance(statement, ProcessedQueryPersist):
|
|
241
|
+
self.environment.add_datasource(statement.datasource)
|
|
242
|
+
return output
|
|
File without changes
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
from trilogy.core.models import (
|
|
2
|
+
QueryDatasource,
|
|
3
|
+
CTE,
|
|
4
|
+
SelectStatement,
|
|
5
|
+
PersistStatement,
|
|
6
|
+
MultiSelectStatement,
|
|
7
|
+
RowsetDerivationStatement,
|
|
8
|
+
)
|
|
9
|
+
from trilogy.core.processing.nodes import StrategyNode
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class BaseHook:
|
|
13
|
+
pass
|
|
14
|
+
|
|
15
|
+
def process_multiselect_info(self, select: MultiSelectStatement):
|
|
16
|
+
print("Multiselect with components:")
|
|
17
|
+
for x in select.selects:
|
|
18
|
+
self.process_select_info(x)
|
|
19
|
+
|
|
20
|
+
def process_select_info(self, select: SelectStatement):
|
|
21
|
+
print(f"Select statement grain: {str(select.grain)}")
|
|
22
|
+
|
|
23
|
+
def process_persist_info(self, persist: PersistStatement):
|
|
24
|
+
print(f"Persist statement persisting to {persist.address}")
|
|
25
|
+
self.process_select_info(persist.select)
|
|
26
|
+
|
|
27
|
+
def process_rowset_info(self, rowset: RowsetDerivationStatement):
|
|
28
|
+
print(f"Rowset statement with grain {str(rowset.select.grain)}")
|
|
29
|
+
|
|
30
|
+
def process_root_datasource(self, datasource: QueryDatasource):
|
|
31
|
+
pass
|
|
32
|
+
|
|
33
|
+
def process_root_cte(self, cte: CTE):
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
def process_root_strategy_node(self, node: StrategyNode):
|
|
37
|
+
pass
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from trilogy.hooks.base_hook import BaseHook
|
|
2
|
+
from networkx import DiGraph
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class GraphHook(BaseHook):
|
|
6
|
+
def __init__(self):
|
|
7
|
+
super().__init__()
|
|
8
|
+
try:
|
|
9
|
+
pass
|
|
10
|
+
except ImportError:
|
|
11
|
+
raise ImportError("GraphHook requires matplotlib and scipy to be installed")
|
|
12
|
+
|
|
13
|
+
def query_graph_built(self, graph: DiGraph):
|
|
14
|
+
from networkx import draw_kamada_kawai
|
|
15
|
+
from matplotlib import pyplot as plt
|
|
16
|
+
|
|
17
|
+
graph = graph.copy()
|
|
18
|
+
nodes = [*graph.nodes]
|
|
19
|
+
for node in nodes:
|
|
20
|
+
if "__preql_internal" in node:
|
|
21
|
+
graph.remove_node(node)
|
|
22
|
+
draw_kamada_kawai(graph, with_labels=True, connectionstyle="arc3, rad = 0.1")
|
|
23
|
+
# draw_spring(graph, with_labels=True, connectionstyle='arc3, rad = 0.1')
|
|
24
|
+
plt.show()
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
from typing import Union
|
|
2
|
+
from trilogy.core.models import QueryDatasource, CTE, Datasource, SelectStatement
|
|
3
|
+
|
|
4
|
+
from trilogy.hooks.base_hook import BaseHook
|
|
5
|
+
from trilogy.constants import logger
|
|
6
|
+
from logging import StreamHandler, DEBUG
|
|
7
|
+
from trilogy.core.processing.nodes import StrategyNode
|
|
8
|
+
|
|
9
|
+
from trilogy.dialect.bigquery import BigqueryDialect
|
|
10
|
+
|
|
11
|
+
from enum import Enum
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class PrintMode(Enum):
|
|
15
|
+
OFF = False
|
|
16
|
+
BASIC = True
|
|
17
|
+
FULL = 3
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
renderer = BigqueryDialect()
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def print_recursive_resolved(
|
|
24
|
+
input: Union[QueryDatasource, Datasource], mode: PrintMode, depth: int = 0
|
|
25
|
+
):
|
|
26
|
+
extra = []
|
|
27
|
+
if isinstance(input, QueryDatasource):
|
|
28
|
+
if input.joins:
|
|
29
|
+
extra.append("join")
|
|
30
|
+
if input.condition:
|
|
31
|
+
extra.append("filter")
|
|
32
|
+
if input.group_required:
|
|
33
|
+
extra.append("group")
|
|
34
|
+
display = [
|
|
35
|
+
(
|
|
36
|
+
" " * depth,
|
|
37
|
+
input.__class__.__name__,
|
|
38
|
+
"<",
|
|
39
|
+
",".join(extra),
|
|
40
|
+
">",
|
|
41
|
+
# [c.address for c in input.input_concepts],
|
|
42
|
+
"->",
|
|
43
|
+
[c.address for c in input.output_concepts],
|
|
44
|
+
)
|
|
45
|
+
]
|
|
46
|
+
if isinstance(input, QueryDatasource):
|
|
47
|
+
for child in input.datasources:
|
|
48
|
+
display += print_recursive_resolved(child, mode=mode, depth=depth + 1)
|
|
49
|
+
return display
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def print_recursive_nodes(
|
|
53
|
+
input: StrategyNode, mode: PrintMode = PrintMode.BASIC, depth: int = 0
|
|
54
|
+
):
|
|
55
|
+
resolved = input.resolve()
|
|
56
|
+
if mode == PrintMode.FULL:
|
|
57
|
+
display = [
|
|
58
|
+
[
|
|
59
|
+
" " * depth,
|
|
60
|
+
input,
|
|
61
|
+
"->",
|
|
62
|
+
resolved.grain,
|
|
63
|
+
"->",
|
|
64
|
+
[c.address for c in resolved.output_concepts],
|
|
65
|
+
]
|
|
66
|
+
]
|
|
67
|
+
elif mode == PrintMode.BASIC:
|
|
68
|
+
display = [
|
|
69
|
+
[
|
|
70
|
+
" " * depth,
|
|
71
|
+
input,
|
|
72
|
+
"->",
|
|
73
|
+
resolved.grain,
|
|
74
|
+
]
|
|
75
|
+
]
|
|
76
|
+
for child in input.parents:
|
|
77
|
+
display += print_recursive_nodes(child, mode=mode, depth=depth + 1)
|
|
78
|
+
return display
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def print_recursive_ctes(input: CTE, depth: int = 0, max_depth: int | None = None):
|
|
82
|
+
if max_depth and depth > max_depth:
|
|
83
|
+
return
|
|
84
|
+
select_statement = [c.address for c in input.output_columns]
|
|
85
|
+
print(" " * depth, input.name, "->", input.group_to_grain, "->", select_statement)
|
|
86
|
+
sql = renderer.render_cte(input).statement
|
|
87
|
+
for line in sql.split("\n"):
|
|
88
|
+
logger.debug(" " * (depth) + line)
|
|
89
|
+
print("-----")
|
|
90
|
+
if isinstance(input, CTE):
|
|
91
|
+
for child in input.parent_ctes:
|
|
92
|
+
print_recursive_ctes(child, depth + 1)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class DebuggingHook(BaseHook):
|
|
96
|
+
def __init__(
|
|
97
|
+
self,
|
|
98
|
+
level=DEBUG,
|
|
99
|
+
max_depth: int | None = None,
|
|
100
|
+
process_ctes: PrintMode | bool = True,
|
|
101
|
+
process_nodes: PrintMode | bool = True,
|
|
102
|
+
process_datasources: PrintMode | bool = True,
|
|
103
|
+
process_other: bool = True,
|
|
104
|
+
):
|
|
105
|
+
if not any([isinstance(x, StreamHandler) for x in logger.handlers]):
|
|
106
|
+
logger.addHandler(StreamHandler())
|
|
107
|
+
logger.setLevel(level)
|
|
108
|
+
|
|
109
|
+
self.max_depth = max_depth
|
|
110
|
+
self.process_ctes = PrintMode(process_ctes)
|
|
111
|
+
self.process_nodes = PrintMode(process_nodes)
|
|
112
|
+
self.process_datasources = PrintMode(process_datasources)
|
|
113
|
+
self.process_other = PrintMode(process_other)
|
|
114
|
+
|
|
115
|
+
def process_select_info(self, select: SelectStatement):
|
|
116
|
+
if self.process_datasources != PrintMode.OFF:
|
|
117
|
+
print(f"grain: {str(select.grain)}")
|
|
118
|
+
|
|
119
|
+
def process_root_datasource(self, datasource: QueryDatasource):
|
|
120
|
+
if self.process_datasources != PrintMode.OFF:
|
|
121
|
+
printed = print_recursive_resolved(datasource, self.process_datasources)
|
|
122
|
+
for row in printed:
|
|
123
|
+
print("".join([str(v) for v in row]))
|
|
124
|
+
|
|
125
|
+
def process_root_cte(self, cte: CTE):
|
|
126
|
+
if self.process_ctes != PrintMode.OFF:
|
|
127
|
+
print_recursive_ctes(cte, max_depth=self.max_depth)
|
|
128
|
+
|
|
129
|
+
def process_root_strategy_node(self, node: StrategyNode):
|
|
130
|
+
if self.process_nodes != PrintMode.OFF:
|
|
131
|
+
printed = print_recursive_nodes(node, mode=self.process_nodes)
|
|
132
|
+
for row in printed:
|
|
133
|
+
print("".join([str(v) for v in row]))
|
|
File without changes
|
trilogy/parser.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
|
|
3
|
+
from trilogy.core.models import Environment
|
|
4
|
+
from trilogy.parsing.parse_engine import parse_text
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def parse(
|
|
8
|
+
input: str, environment: Optional[Environment] = None
|
|
9
|
+
) -> tuple[Environment, list]:
|
|
10
|
+
return parse_text(input, environment=environment)
|
|
File without changes
|