pytrilogy 0.0.2.46__py3-none-any.whl → 0.0.2.48__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

Files changed (69) hide show
  1. {pytrilogy-0.0.2.46.dist-info → pytrilogy-0.0.2.48.dist-info}/METADATA +1 -1
  2. pytrilogy-0.0.2.48.dist-info/RECORD +85 -0
  3. trilogy/__init__.py +2 -2
  4. trilogy/constants.py +4 -2
  5. trilogy/core/enums.py +7 -1
  6. trilogy/core/env_processor.py +1 -2
  7. trilogy/core/environment_helpers.py +5 -5
  8. trilogy/core/functions.py +11 -10
  9. trilogy/core/internal.py +2 -3
  10. trilogy/core/models.py +448 -394
  11. trilogy/core/optimization.py +37 -21
  12. trilogy/core/optimizations/__init__.py +1 -1
  13. trilogy/core/optimizations/base_optimization.py +6 -6
  14. trilogy/core/optimizations/inline_constant.py +7 -4
  15. trilogy/core/optimizations/inline_datasource.py +14 -5
  16. trilogy/core/optimizations/predicate_pushdown.py +20 -10
  17. trilogy/core/processing/concept_strategies_v3.py +40 -24
  18. trilogy/core/processing/graph_utils.py +2 -3
  19. trilogy/core/processing/node_generators/__init__.py +7 -5
  20. trilogy/core/processing/node_generators/basic_node.py +4 -4
  21. trilogy/core/processing/node_generators/common.py +10 -11
  22. trilogy/core/processing/node_generators/filter_node.py +7 -9
  23. trilogy/core/processing/node_generators/group_node.py +10 -11
  24. trilogy/core/processing/node_generators/group_to_node.py +5 -5
  25. trilogy/core/processing/node_generators/multiselect_node.py +10 -12
  26. trilogy/core/processing/node_generators/node_merge_node.py +7 -9
  27. trilogy/core/processing/node_generators/rowset_node.py +9 -8
  28. trilogy/core/processing/node_generators/select_merge_node.py +11 -10
  29. trilogy/core/processing/node_generators/select_node.py +5 -5
  30. trilogy/core/processing/node_generators/union_node.py +75 -0
  31. trilogy/core/processing/node_generators/unnest_node.py +2 -3
  32. trilogy/core/processing/node_generators/window_node.py +3 -4
  33. trilogy/core/processing/nodes/__init__.py +9 -5
  34. trilogy/core/processing/nodes/base_node.py +17 -13
  35. trilogy/core/processing/nodes/filter_node.py +3 -4
  36. trilogy/core/processing/nodes/group_node.py +8 -10
  37. trilogy/core/processing/nodes/merge_node.py +11 -11
  38. trilogy/core/processing/nodes/select_node_v2.py +8 -9
  39. trilogy/core/processing/nodes/union_node.py +50 -0
  40. trilogy/core/processing/nodes/unnest_node.py +2 -3
  41. trilogy/core/processing/nodes/window_node.py +2 -3
  42. trilogy/core/processing/utility.py +37 -40
  43. trilogy/core/query_processor.py +68 -44
  44. trilogy/dialect/base.py +95 -53
  45. trilogy/dialect/bigquery.py +2 -3
  46. trilogy/dialect/common.py +5 -4
  47. trilogy/dialect/config.py +0 -2
  48. trilogy/dialect/duckdb.py +2 -2
  49. trilogy/dialect/enums.py +5 -5
  50. trilogy/dialect/postgres.py +2 -2
  51. trilogy/dialect/presto.py +3 -4
  52. trilogy/dialect/snowflake.py +2 -2
  53. trilogy/dialect/sql_server.py +3 -4
  54. trilogy/engine.py +2 -1
  55. trilogy/executor.py +43 -30
  56. trilogy/hooks/base_hook.py +5 -4
  57. trilogy/hooks/graph_hook.py +2 -1
  58. trilogy/hooks/query_debugger.py +18 -8
  59. trilogy/parsing/common.py +15 -20
  60. trilogy/parsing/parse_engine.py +124 -88
  61. trilogy/parsing/render.py +32 -35
  62. trilogy/parsing/trilogy.lark +8 -1
  63. trilogy/scripts/trilogy.py +6 -4
  64. trilogy/utility.py +1 -1
  65. pytrilogy-0.0.2.46.dist-info/RECORD +0 -83
  66. {pytrilogy-0.0.2.46.dist-info → pytrilogy-0.0.2.48.dist-info}/LICENSE.md +0 -0
  67. {pytrilogy-0.0.2.46.dist-info → pytrilogy-0.0.2.48.dist-info}/WHEEL +0 -0
  68. {pytrilogy-0.0.2.46.dist-info → pytrilogy-0.0.2.48.dist-info}/entry_points.txt +0 -0
  69. {pytrilogy-0.0.2.46.dist-info → pytrilogy-0.0.2.48.dist-info}/top_level.txt +0 -0
@@ -1,43 +1,41 @@
1
- from typing import List, Optional, Set, Union, Dict, Tuple
1
+ from collections import defaultdict
2
+ from math import ceil
3
+ from typing import Dict, List, Optional, Set, Tuple, Union
2
4
 
5
+ from trilogy.constants import CONFIG, logger
6
+ from trilogy.core.constants import CONSTANT_DATASET
7
+ from trilogy.core.enums import BooleanOperator, SourceType
3
8
  from trilogy.core.env_processor import generate_graph
9
+ from trilogy.core.ergonomics import generate_cte_names
4
10
  from trilogy.core.graph_models import ReferenceGraph
5
- from trilogy.core.constants import CONSTANT_DATASET
6
- from trilogy.core.processing.concept_strategies_v3 import source_query_concepts
7
- from trilogy.core.enums import BooleanOperator
8
- from trilogy.constants import CONFIG
9
- from trilogy.core.processing.nodes import SelectNode, StrategyNode, History
10
11
  from trilogy.core.models import (
12
+ CTE,
13
+ BaseJoin,
11
14
  Concept,
12
- Environment,
13
- PersistStatement,
14
15
  ConceptDeclarationStatement,
15
- SelectStatement,
16
- MultiSelectStatement,
17
- CTE,
16
+ Conditional,
17
+ CopyStatement,
18
+ CTEConceptPair,
19
+ Datasource,
20
+ Environment,
21
+ InstantiatedUnnestJoin,
18
22
  Join,
19
- UnnestJoin,
20
23
  MaterializedDataset,
24
+ MultiSelectStatement,
25
+ PersistStatement,
26
+ ProcessedCopyStatement,
21
27
  ProcessedQuery,
22
28
  ProcessedQueryPersist,
23
29
  QueryDatasource,
24
- Datasource,
25
- BaseJoin,
26
- InstantiatedUnnestJoin,
27
- Conditional,
28
- ProcessedCopyStatement,
29
- CopyStatement,
30
- CTEConceptPair,
30
+ SelectStatement,
31
+ UnionCTE,
32
+ UnnestJoin,
31
33
  )
32
-
33
- from trilogy.utility import unique
34
-
35
- from trilogy.hooks.base_hook import BaseHook
36
- from trilogy.constants import logger
37
- from trilogy.core.ergonomics import generate_cte_names
38
34
  from trilogy.core.optimization import optimize_ctes
39
- from math import ceil
40
- from collections import defaultdict
35
+ from trilogy.core.processing.concept_strategies_v3 import source_query_concepts
36
+ from trilogy.core.processing.nodes import History, SelectNode, StrategyNode
37
+ from trilogy.hooks.base_hook import BaseHook
38
+ from trilogy.utility import unique
41
39
 
42
40
  LOGGER_PREFIX = "[QUERY BUILD]"
43
41
 
@@ -103,7 +101,7 @@ def base_join_to_join(
103
101
 
104
102
 
105
103
  def generate_source_map(
106
- query_datasource: QueryDatasource, all_new_ctes: List[CTE]
104
+ query_datasource: QueryDatasource, all_new_ctes: List[CTE | UnionCTE]
107
105
  ) -> Tuple[Dict[str, list[str]], Dict[str, list[str]]]:
108
106
  source_map: Dict[str, list[str]] = defaultdict(list)
109
107
  # now populate anything derived in this level
@@ -246,24 +244,44 @@ def resolve_cte_base_name_and_alias_v2(
246
244
  return None, None
247
245
 
248
246
 
249
- def datasource_to_ctes(
247
+ def datasource_to_cte(
250
248
  query_datasource: QueryDatasource, name_map: dict[str, str]
251
- ) -> List[CTE]:
252
- output: List[CTE] = []
253
- parents: list[CTE] = []
249
+ ) -> CTE | UnionCTE:
250
+ parents: list[CTE | UnionCTE] = []
251
+ if query_datasource.source_type == SourceType.UNION:
252
+ direct_parents: list[CTE | UnionCTE] = []
253
+ for child in query_datasource.datasources:
254
+ assert isinstance(child, QueryDatasource)
255
+ child_cte = datasource_to_cte(child, name_map=name_map)
256
+ direct_parents.append(child_cte)
257
+ parents += child_cte.parent_ctes
258
+ human_id = generate_cte_name(query_datasource.identifier, name_map)
259
+ final = UnionCTE(
260
+ name=human_id,
261
+ source=query_datasource,
262
+ parent_ctes=parents,
263
+ internal_ctes=direct_parents,
264
+ output_columns=[
265
+ c.with_grain(query_datasource.grain)
266
+ for c in query_datasource.output_concepts
267
+ ],
268
+ grain=direct_parents[0].grain,
269
+ )
270
+ return final
271
+
254
272
  if len(query_datasource.datasources) > 1 or any(
255
273
  [isinstance(x, QueryDatasource) for x in query_datasource.datasources]
256
274
  ):
257
- all_new_ctes: List[CTE] = []
275
+ all_new_ctes: List[CTE | UnionCTE] = []
258
276
  for datasource in query_datasource.datasources:
259
277
  if isinstance(datasource, QueryDatasource):
260
278
  sub_datasource = datasource
261
279
  else:
262
280
  sub_datasource = datasource_to_query_datasource(datasource)
263
281
 
264
- sub_cte = datasource_to_ctes(sub_datasource, name_map)
265
- parents += sub_cte
266
- all_new_ctes += sub_cte
282
+ sub_cte = datasource_to_cte(sub_datasource, name_map)
283
+ parents.append(sub_cte)
284
+ all_new_ctes.append(sub_cte)
267
285
  source_map, existence_map = generate_source_map(query_datasource, all_new_ctes)
268
286
 
269
287
  else:
@@ -284,7 +302,10 @@ def datasource_to_ctes(
284
302
 
285
303
  human_id = generate_cte_name(query_datasource.identifier, name_map)
286
304
 
287
- final_joins = [base_join_to_join(join, parents) for join in query_datasource.joins]
305
+ final_joins = [
306
+ base_join_to_join(join, [x for x in parents if isinstance(x, CTE)])
307
+ for join in query_datasource.joins
308
+ ]
288
309
 
289
310
  base_name, base_alias = resolve_cte_base_name_and_alias_v2(
290
311
  human_id, query_datasource, source_map, final_joins
@@ -326,8 +347,7 @@ def datasource_to_ctes(
326
347
  f"Missing {x.address} in {cte.source_map}, source map {cte.source.source_map.keys()} "
327
348
  )
328
349
 
329
- output.append(cte)
330
- return output
350
+ return cte
331
351
 
332
352
 
333
353
  def get_query_node(
@@ -336,6 +356,9 @@ def get_query_node(
336
356
  graph: Optional[ReferenceGraph] = None,
337
357
  history: History | None = None,
338
358
  ) -> StrategyNode:
359
+ environment = environment.duplicate()
360
+ for k, v in statement.local_concepts.items():
361
+ environment.concepts[k] = v
339
362
  graph = graph or generate_graph(environment)
340
363
  logger.info(
341
364
  f"{LOGGER_PREFIX} getting source datasource for query with filtering {statement.where_clause_category} and output {[str(c) for c in statement.output_components]}"
@@ -393,7 +416,7 @@ def get_query_datasources(
393
416
  return final_qds
394
417
 
395
418
 
396
- def flatten_ctes(input: CTE) -> list[CTE]:
419
+ def flatten_ctes(input: CTE | UnionCTE) -> list[CTE | UnionCTE]:
397
420
  output = [input]
398
421
  for cte in input.parent_ctes:
399
422
  output += flatten_ctes(cte)
@@ -456,18 +479,18 @@ def process_query(
456
479
  hooks: List[BaseHook] | None = None,
457
480
  ) -> ProcessedQuery:
458
481
  hooks = hooks or []
459
- statement.refresh_bindings(environment)
460
482
  graph = generate_graph(environment)
483
+
461
484
  root_datasource = get_query_datasources(
462
485
  environment=environment, graph=graph, statement=statement, hooks=hooks
463
486
  )
464
487
  for hook in hooks:
465
488
  hook.process_root_datasource(root_datasource)
466
489
  # this should always return 1 - TODO, refactor
467
- root_cte = datasource_to_ctes(root_datasource, environment.cte_name_map)[0]
490
+ root_cte = datasource_to_cte(root_datasource, environment.cte_name_map)
468
491
  for hook in hooks:
469
492
  hook.process_root_cte(root_cte)
470
- raw_ctes: List[CTE] = list(reversed(flatten_ctes(root_cte)))
493
+ raw_ctes: List[CTE | UnionCTE] = list(reversed(flatten_ctes(root_cte)))
471
494
  seen = dict()
472
495
  # we can have duplicate CTEs at this point
473
496
  # so merge them together
@@ -479,7 +502,7 @@ def process_query(
479
502
  seen[cte.name] = seen[cte.name] + cte
480
503
  for cte in raw_ctes:
481
504
  cte.parent_ctes = [seen[x.name] for x in cte.parent_ctes]
482
- deduped_ctes: List[CTE] = list(seen.values())
505
+ deduped_ctes: List[CTE | UnionCTE] = list(seen.values())
483
506
  root_cte.order_by = statement.order_by
484
507
  root_cte.limit = statement.limit
485
508
  root_cte.hidden_concepts = [x for x in statement.hidden_components]
@@ -497,4 +520,5 @@ def process_query(
497
520
  # we no longer do any joins at final level, this should always happen in parent CTEs
498
521
  joins=[],
499
522
  hidden_columns=[x for x in statement.hidden_components],
523
+ local_concepts=statement.local_concepts,
500
524
  )
trilogy/dialect/base.py CHANGED
@@ -1,66 +1,67 @@
1
- from typing import List, Union, Optional, Dict, Any, Sequence, Callable
1
+ from typing import Any, Callable, Dict, List, Optional, Sequence, Union
2
2
 
3
3
  from jinja2 import Template
4
4
 
5
- from trilogy.core.processing.utility import (
6
- is_scalar_condition,
7
- decompose_condition,
8
- sort_select_output,
9
- )
10
- from trilogy.constants import CONFIG, logger, MagicConstants
11
- from trilogy.core.internal import DEFAULT_CONCEPTS
5
+ from trilogy.constants import CONFIG, MagicConstants, logger
12
6
  from trilogy.core.enums import (
7
+ DatePart,
13
8
  FunctionType,
9
+ UnnestMode,
14
10
  WindowType,
15
- DatePart,
16
11
  )
12
+ from trilogy.core.internal import DEFAULT_CONCEPTS
17
13
  from trilogy.core.models import (
18
- ListType,
19
- DataType,
20
- Concept,
21
14
  CTE,
22
- ProcessedQuery,
23
- ProcessedQueryPersist,
24
- ProcessedShowStatement,
15
+ AggregateWrapper,
16
+ CaseElse,
17
+ CaseWhen,
18
+ Comparison,
25
19
  CompiledCTE,
20
+ Concept,
21
+ ConceptDeclarationStatement,
26
22
  Conditional,
27
- Comparison,
28
- SubselectComparison,
29
- OrderItem,
30
- WindowItem,
23
+ CopyStatement,
24
+ Datasource,
25
+ DataType,
26
+ Environment,
31
27
  FilterItem,
32
28
  Function,
33
- AggregateWrapper,
34
- Parenthetical,
35
- CaseWhen,
36
- CaseElse,
37
- SelectStatement,
38
- PersistStatement,
39
- Environment,
40
- RawColumnExpr,
29
+ ImportStatement,
30
+ ListType,
41
31
  ListWrapper,
42
- TupleWrapper,
32
+ MapType,
43
33
  MapWrapper,
44
- ShowStatement,
45
- RowsetItem,
34
+ MergeStatementV2,
46
35
  MultiSelectStatement,
47
- RowsetDerivationStatement,
48
- ConceptDeclarationStatement,
49
- ImportStatement,
50
- RawSQLStatement,
51
- ProcessedRawSQLStatement,
52
36
  NumericType,
53
- MapType,
54
- StructType,
55
- MergeStatementV2,
56
- Datasource,
57
- CopyStatement,
37
+ OrderItem,
38
+ Parenthetical,
39
+ PersistStatement,
58
40
  ProcessedCopyStatement,
41
+ ProcessedQuery,
42
+ ProcessedQueryPersist,
43
+ ProcessedRawSQLStatement,
44
+ ProcessedShowStatement,
45
+ RawColumnExpr,
46
+ RawSQLStatement,
47
+ RowsetDerivationStatement,
48
+ RowsetItem,
49
+ SelectStatement,
50
+ ShowStatement,
51
+ StructType,
52
+ SubselectComparison,
53
+ TupleWrapper,
54
+ UnionCTE,
55
+ WindowItem,
59
56
  )
60
- from trilogy.core.query_processor import process_query, process_persist, process_copy
57
+ from trilogy.core.processing.utility import (
58
+ decompose_condition,
59
+ is_scalar_condition,
60
+ sort_select_output,
61
+ )
62
+ from trilogy.core.query_processor import process_copy, process_persist, process_query
61
63
  from trilogy.dialect.common import render_join, render_unnest
62
64
  from trilogy.hooks.base_hook import BaseHook
63
- from trilogy.core.enums import UnnestMode
64
65
 
65
66
  LOGGER_PREFIX = "[RENDERING]"
66
67
 
@@ -130,7 +131,7 @@ FUNCTION_MAP = {
130
131
  FunctionType.SPLIT: lambda x: f"split({x[0]}, {x[1]})",
131
132
  FunctionType.IS_NULL: lambda x: f"isnull({x[0]})",
132
133
  FunctionType.BOOL: lambda x: f"CASE WHEN {x[0]} THEN TRUE ELSE FALSE END",
133
- # complex
134
+ # Complex
134
135
  FunctionType.INDEX_ACCESS: lambda x: f"{x[0]}[{x[1]}]",
135
136
  FunctionType.MAP_ACCESS: lambda x: f"{x[0]}[{x[1]}][1]",
136
137
  FunctionType.UNNEST: lambda x: f"unnest({x[0]})",
@@ -230,7 +231,7 @@ def safe_quote(string: str, quote_char: str):
230
231
  return ".".join([f"{quote_char}{string}{quote_char}" for string in components])
231
232
 
232
233
 
233
- def safe_get_cte_value(coalesce, cte: CTE, c: Concept, quote_char: str):
234
+ def safe_get_cte_value(coalesce, cte: CTE | UnionCTE, c: Concept, quote_char: str):
234
235
  address = c.address
235
236
  raw = cte.source_map.get(address, None)
236
237
 
@@ -255,15 +256,26 @@ class BaseDialect:
255
256
  UNNEST_MODE = UnnestMode.CROSS_APPLY
256
257
 
257
258
  def render_order_item(
258
- self, order_item: OrderItem, cte: CTE, final: bool = False
259
+ self,
260
+ order_item: OrderItem,
261
+ cte: CTE | UnionCTE,
262
+ final: bool = False,
263
+ alias: bool = True,
259
264
  ) -> str:
260
265
  if final:
266
+ if not alias:
267
+ return f"{self.QUOTE_CHARACTER}{order_item.expr.safe_address}{self.QUOTE_CHARACTER} {order_item.order.value}"
268
+
261
269
  return f"{cte.name}.{self.QUOTE_CHARACTER}{order_item.expr.safe_address}{self.QUOTE_CHARACTER} {order_item.order.value}"
262
270
 
263
271
  return f"{self.render_concept_sql(order_item.expr, cte=cte, alias=False)} {order_item.order.value}"
264
272
 
265
273
  def render_concept_sql(
266
- self, c: Concept, cte: CTE, alias: bool = True, raise_invalid: bool = False
274
+ self,
275
+ c: Concept,
276
+ cte: CTE | UnionCTE,
277
+ alias: bool = True,
278
+ raise_invalid: bool = False,
267
279
  ) -> str:
268
280
  result = None
269
281
  if c.pseudonyms:
@@ -290,7 +302,7 @@ class BaseDialect:
290
302
  return result
291
303
 
292
304
  def _render_concept_sql(
293
- self, c: Concept, cte: CTE, raise_invalid: bool = False
305
+ self, c: Concept, cte: CTE | UnionCTE, raise_invalid: bool = False
294
306
  ) -> str:
295
307
  # only recurse while it's in sources of the current cte
296
308
  logger.debug(
@@ -348,6 +360,20 @@ class BaseDialect:
348
360
  " target grain"
349
361
  )
350
362
  rval = f"{self.FUNCTION_GRAIN_MATCH_MAP[c.lineage.function.operator](args)}"
363
+ elif (
364
+ isinstance(c.lineage, Function)
365
+ and c.lineage.operator == FunctionType.UNION
366
+ ):
367
+ local_matched = [
368
+ x
369
+ for x in c.lineage.arguments
370
+ if isinstance(x, Concept) and x.address in cte.output_columns
371
+ ]
372
+ if not local_matched:
373
+ raise SyntaxError(
374
+ "Could not find appropriate source element for union"
375
+ )
376
+ rval = self.render_expr(local_matched[0], cte)
351
377
  elif (
352
378
  isinstance(c.lineage, Function)
353
379
  and c.lineage.operator == FunctionType.CONSTANT
@@ -447,13 +473,11 @@ class BaseDialect:
447
473
  FilterItem,
448
474
  # FilterItem
449
475
  ],
450
- cte: Optional[CTE] = None,
451
- cte_map: Optional[Dict[str, CTE]] = None,
476
+ cte: Optional[CTE | UnionCTE] = None,
477
+ cte_map: Optional[Dict[str, CTE | UnionCTE]] = None,
452
478
  raise_invalid: bool = False,
453
479
  ) -> str:
454
-
455
480
  if isinstance(e, SubselectComparison):
456
-
457
481
  if isinstance(e.right, Concept):
458
482
  # we won't always have an existnce map
459
483
  # so fall back to the normal map
@@ -559,6 +583,13 @@ class BaseDialect:
559
583
  elif isinstance(e, FilterItem):
560
584
  return f"CASE WHEN {self.render_expr(e.where.conditional,cte=cte, cte_map=cte_map, raise_invalid=raise_invalid)} THEN {self.render_expr(e.content, cte, cte_map=cte_map, raise_invalid=raise_invalid)} ELSE NULL END"
561
585
  elif isinstance(e, Concept):
586
+ if (
587
+ isinstance(e.lineage, Function)
588
+ and e.lineage.operator == FunctionType.CONSTANT
589
+ and CONFIG.rendering.parameters is True
590
+ and e.datatype.data_type != DataType.MAP
591
+ ):
592
+ return f":{e.safe_address}"
562
593
  if cte:
563
594
  return self.render_concept_sql(
564
595
  e, cte, alias=False, raise_invalid=raise_invalid
@@ -592,7 +623,18 @@ class BaseDialect:
592
623
  else:
593
624
  raise ValueError(f"Unable to render type {type(e)} {e}")
594
625
 
595
- def render_cte(self, cte: CTE, auto_sort: bool = True) -> CompiledCTE:
626
+ def render_cte(self, cte: CTE | UnionCTE, auto_sort: bool = True) -> CompiledCTE:
627
+ if isinstance(cte, UnionCTE):
628
+ base_statement = f"\n{cte.operator}\n".join(
629
+ [self.render_cte(child).statement for child in cte.internal_ctes]
630
+ )
631
+ if cte.order_by:
632
+ ordering = [
633
+ self.render_order_item(i, cte, final=True, alias=False)
634
+ for i in cte.order_by.items
635
+ ]
636
+ base_statement += "\nORDER BY " + ",".join(ordering)
637
+ return CompiledCTE(name=cte.name, statement=base_statement)
596
638
  if self.UNNEST_MODE in (
597
639
  UnnestMode.CROSS_APPLY,
598
640
  UnnestMode.CROSS_JOIN,
@@ -863,7 +905,7 @@ class BaseDialect:
863
905
  if CONFIG.strict_mode and INVALID_REFERENCE_STRING(1) in final:
864
906
  raise ValueError(
865
907
  f"Invalid reference string found in query: {final}, this should never"
866
- " occur. Please report this issue."
908
+ " occur. Please create a GitHub issue to report this."
867
909
  )
868
910
  logger.info(f"{LOGGER_PREFIX} Compiled query: {final}")
869
911
  return final
@@ -1,11 +1,10 @@
1
- from typing import Mapping, Callable, Any
1
+ from typing import Any, Callable, Mapping
2
2
 
3
3
  from jinja2 import Template
4
4
 
5
- from trilogy.core.enums import FunctionType, WindowType, UnnestMode
5
+ from trilogy.core.enums import FunctionType, UnnestMode, WindowType
6
6
  from trilogy.dialect.base import BaseDialect
7
7
 
8
-
9
8
  WINDOW_FUNCTION_MAP: Mapping[WindowType, Callable[[Any, Any, Any], str]] = {}
10
9
 
11
10
  FUNCTION_MAP = {
trilogy/dialect/common.py CHANGED
@@ -1,13 +1,14 @@
1
+ from typing import Callable
2
+
3
+ from trilogy.core.enums import Modifier, UnnestMode
1
4
  from trilogy.core.models import (
2
- Join,
3
- InstantiatedUnnestJoin,
4
5
  CTE,
5
6
  Concept,
6
7
  Function,
8
+ InstantiatedUnnestJoin,
9
+ Join,
7
10
  RawColumnExpr,
8
11
  )
9
- from trilogy.core.enums import UnnestMode, Modifier
10
- from typing import Callable
11
12
 
12
13
 
13
14
  def null_wrapper(lval: str, rval: str, modifiers: list[Modifier]) -> str:
trilogy/dialect/config.py CHANGED
@@ -1,5 +1,4 @@
1
1
  class DialectConfig:
2
-
3
2
  def __init__(self):
4
3
  pass
5
4
 
@@ -101,7 +100,6 @@ class PrestoConfig(DialectConfig):
101
100
 
102
101
 
103
102
  class TrinoConfig(PrestoConfig):
104
-
105
103
  def connection_string(self) -> str:
106
104
  if self.schema:
107
105
  return f"trino://{self.username}:{self.password}@{self.host}:{self.port}/{self.catalog}/{self.schema}"
trilogy/dialect/duckdb.py CHANGED
@@ -1,8 +1,8 @@
1
- from typing import Mapping, Callable, Any
1
+ from typing import Any, Callable, Mapping
2
2
 
3
3
  from jinja2 import Template
4
4
 
5
- from trilogy.core.enums import FunctionType, WindowType, UnnestMode
5
+ from trilogy.core.enums import FunctionType, UnnestMode, WindowType
6
6
  from trilogy.dialect.base import BaseDialect
7
7
 
8
8
  WINDOW_FUNCTION_MAP: Mapping[WindowType, Callable[[Any, Any, Any], str]] = {}
trilogy/dialect/enums.py CHANGED
@@ -1,12 +1,12 @@
1
1
  from enum import Enum
2
- from typing import List, TYPE_CHECKING, Optional, Callable
2
+ from typing import TYPE_CHECKING, Callable, List, Optional
3
3
 
4
4
  if TYPE_CHECKING:
5
+ from trilogy import Environment, Executor
5
6
  from trilogy.hooks.base_hook import BaseHook
6
- from trilogy import Executor, Environment
7
7
 
8
- from trilogy.dialect.config import DialectConfig
9
8
  from trilogy.constants import logger
9
+ from trilogy.dialect.config import DialectConfig
10
10
 
11
11
 
12
12
  def default_factory(conf: DialectConfig, config_type):
@@ -42,6 +42,7 @@ class Dialects(Enum):
42
42
  if self == Dialects.BIGQUERY:
43
43
  from google.auth import default
44
44
  from google.cloud import bigquery
45
+
45
46
  from trilogy.dialect.config import BigQueryConfig
46
47
 
47
48
  credentials, project = default()
@@ -52,7 +53,6 @@ class Dialects(Enum):
52
53
  BigQueryConfig,
53
54
  )
54
55
  elif self == Dialects.SQL_SERVER:
55
-
56
56
  raise NotImplementedError()
57
57
  elif self == Dialects.DUCK_DB:
58
58
  from trilogy.dialect.config import DuckDBConfig
@@ -98,7 +98,7 @@ class Dialects(Enum):
98
98
  conf: DialectConfig | None = None,
99
99
  _engine_factory: Callable | None = None,
100
100
  ) -> "Executor":
101
- from trilogy import Executor, Environment
101
+ from trilogy import Environment, Executor
102
102
 
103
103
  if _engine_factory is not None:
104
104
  return Executor(
@@ -1,8 +1,8 @@
1
- from typing import Mapping, Callable, Any
1
+ from typing import Any, Callable, Mapping
2
2
 
3
3
  from jinja2 import Template
4
4
 
5
- from trilogy.core.enums import FunctionType, WindowType, DatePart
5
+ from trilogy.core.enums import DatePart, FunctionType, WindowType
6
6
  from trilogy.dialect.base import BaseDialect
7
7
 
8
8
 
trilogy/dialect/presto.py CHANGED
@@ -1,11 +1,10 @@
1
- from typing import Mapping, Callable, Any
1
+ from typing import Any, Callable, Mapping
2
2
 
3
3
  from jinja2 import Template
4
4
 
5
- from trilogy.core.enums import FunctionType, WindowType
6
- from trilogy.dialect.base import BaseDialect
5
+ from trilogy.core.enums import FunctionType, UnnestMode, WindowType
7
6
  from trilogy.core.models import DataType
8
- from trilogy.core.enums import UnnestMode
7
+ from trilogy.dialect.base import BaseDialect
9
8
 
10
9
  WINDOW_FUNCTION_MAP: Mapping[WindowType, Callable[[Any, Any, Any], str]] = {}
11
10
 
@@ -1,8 +1,8 @@
1
- from typing import Mapping, Callable, Any
1
+ from typing import Any, Callable, Mapping
2
2
 
3
3
  from jinja2 import Template
4
4
 
5
- from trilogy.core.enums import FunctionType, WindowType, UnnestMode
5
+ from trilogy.core.enums import FunctionType, UnnestMode, WindowType
6
6
  from trilogy.dialect.base import BaseDialect
7
7
 
8
8
  ENV_SNOWFLAKE_PW = "PREQL_SNOWFLAKE_PW"
@@ -1,17 +1,16 @@
1
- from typing import Mapping, Callable, Any
1
+ from typing import Any, Callable, Mapping
2
2
 
3
3
  from jinja2 import Template
4
- from trilogy.utility import string_to_hash
5
-
6
4
 
7
5
  from trilogy.core.enums import FunctionType, WindowType
8
6
  from trilogy.core.models import (
9
7
  ProcessedQuery,
10
8
  ProcessedQueryPersist,
11
- ProcessedShowStatement,
12
9
  ProcessedRawSQLStatement,
10
+ ProcessedShowStatement,
13
11
  )
14
12
  from trilogy.dialect.base import BaseDialect
13
+ from trilogy.utility import string_to_hash
15
14
 
16
15
  WINDOW_FUNCTION_MAP: Mapping[WindowType, Callable[[Any, Any, Any], str]] = {}
17
16
 
trilogy/engine.py CHANGED
@@ -1,6 +1,7 @@
1
- from sqlalchemy.engine import Engine, Connection, CursorResult
2
1
  from typing import Protocol
3
2
 
3
+ from sqlalchemy.engine import Connection, CursorResult, Engine
4
+
4
5
 
5
6
  class EngineResult(Protocol):
6
7
  pass