pytrilogy 0.0.2.47__py3-none-any.whl → 0.0.2.48__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

Files changed (69) hide show
  1. {pytrilogy-0.0.2.47.dist-info → pytrilogy-0.0.2.48.dist-info}/METADATA +1 -1
  2. pytrilogy-0.0.2.48.dist-info/RECORD +85 -0
  3. trilogy/__init__.py +2 -2
  4. trilogy/constants.py +4 -2
  5. trilogy/core/enums.py +7 -1
  6. trilogy/core/env_processor.py +1 -2
  7. trilogy/core/environment_helpers.py +5 -5
  8. trilogy/core/functions.py +11 -10
  9. trilogy/core/internal.py +2 -3
  10. trilogy/core/models.py +444 -392
  11. trilogy/core/optimization.py +37 -21
  12. trilogy/core/optimizations/__init__.py +1 -1
  13. trilogy/core/optimizations/base_optimization.py +6 -6
  14. trilogy/core/optimizations/inline_constant.py +7 -4
  15. trilogy/core/optimizations/inline_datasource.py +14 -5
  16. trilogy/core/optimizations/predicate_pushdown.py +20 -10
  17. trilogy/core/processing/concept_strategies_v3.py +40 -24
  18. trilogy/core/processing/graph_utils.py +2 -3
  19. trilogy/core/processing/node_generators/__init__.py +7 -5
  20. trilogy/core/processing/node_generators/basic_node.py +4 -4
  21. trilogy/core/processing/node_generators/common.py +10 -11
  22. trilogy/core/processing/node_generators/filter_node.py +7 -9
  23. trilogy/core/processing/node_generators/group_node.py +10 -11
  24. trilogy/core/processing/node_generators/group_to_node.py +5 -5
  25. trilogy/core/processing/node_generators/multiselect_node.py +10 -12
  26. trilogy/core/processing/node_generators/node_merge_node.py +7 -9
  27. trilogy/core/processing/node_generators/rowset_node.py +9 -8
  28. trilogy/core/processing/node_generators/select_merge_node.py +11 -10
  29. trilogy/core/processing/node_generators/select_node.py +5 -5
  30. trilogy/core/processing/node_generators/union_node.py +75 -0
  31. trilogy/core/processing/node_generators/unnest_node.py +2 -3
  32. trilogy/core/processing/node_generators/window_node.py +3 -4
  33. trilogy/core/processing/nodes/__init__.py +9 -5
  34. trilogy/core/processing/nodes/base_node.py +17 -13
  35. trilogy/core/processing/nodes/filter_node.py +3 -4
  36. trilogy/core/processing/nodes/group_node.py +8 -10
  37. trilogy/core/processing/nodes/merge_node.py +11 -11
  38. trilogy/core/processing/nodes/select_node_v2.py +8 -9
  39. trilogy/core/processing/nodes/union_node.py +50 -0
  40. trilogy/core/processing/nodes/unnest_node.py +2 -3
  41. trilogy/core/processing/nodes/window_node.py +2 -3
  42. trilogy/core/processing/utility.py +37 -40
  43. trilogy/core/query_processor.py +68 -44
  44. trilogy/dialect/base.py +95 -53
  45. trilogy/dialect/bigquery.py +2 -3
  46. trilogy/dialect/common.py +5 -4
  47. trilogy/dialect/config.py +0 -2
  48. trilogy/dialect/duckdb.py +2 -2
  49. trilogy/dialect/enums.py +5 -5
  50. trilogy/dialect/postgres.py +2 -2
  51. trilogy/dialect/presto.py +3 -4
  52. trilogy/dialect/snowflake.py +2 -2
  53. trilogy/dialect/sql_server.py +3 -4
  54. trilogy/engine.py +2 -1
  55. trilogy/executor.py +43 -30
  56. trilogy/hooks/base_hook.py +5 -4
  57. trilogy/hooks/graph_hook.py +2 -1
  58. trilogy/hooks/query_debugger.py +18 -8
  59. trilogy/parsing/common.py +15 -20
  60. trilogy/parsing/parse_engine.py +124 -88
  61. trilogy/parsing/render.py +32 -35
  62. trilogy/parsing/trilogy.lark +8 -1
  63. trilogy/scripts/trilogy.py +6 -4
  64. trilogy/utility.py +1 -1
  65. pytrilogy-0.0.2.47.dist-info/RECORD +0 -83
  66. {pytrilogy-0.0.2.47.dist-info → pytrilogy-0.0.2.48.dist-info}/LICENSE.md +0 -0
  67. {pytrilogy-0.0.2.47.dist-info → pytrilogy-0.0.2.48.dist-info}/WHEEL +0 -0
  68. {pytrilogy-0.0.2.47.dist-info → pytrilogy-0.0.2.48.dist-info}/entry_points.txt +0 -0
  69. {pytrilogy-0.0.2.47.dist-info → pytrilogy-0.0.2.48.dist-info}/top_level.txt +0 -0
@@ -1,17 +1,18 @@
1
+ from trilogy.constants import CONFIG, logger
2
+ from trilogy.core.enums import BooleanOperator, PurposeLineage
1
3
  from trilogy.core.models import (
2
4
  CTE,
3
- SelectStatement,
4
- MultiSelectStatement,
5
5
  Conditional,
6
+ MultiSelectStatement,
7
+ SelectStatement,
8
+ UnionCTE,
6
9
  )
7
- from trilogy.core.enums import PurposeLineage, BooleanOperator
8
- from trilogy.constants import logger, CONFIG
9
10
  from trilogy.core.optimizations import (
10
- OptimizationRule,
11
11
  InlineConstant,
12
+ InlineDatasource,
13
+ OptimizationRule,
12
14
  PredicatePushdown,
13
15
  PredicatePushdownRemove,
14
- InlineDatasource,
15
16
  )
16
17
  from trilogy.core.processing.utility import sort_select_output
17
18
 
@@ -61,12 +62,12 @@ def reorder_ctes(
61
62
 
62
63
 
63
64
  def filter_irrelevant_ctes(
64
- input: list[CTE],
65
- root_cte: CTE,
65
+ input: list[CTE | UnionCTE],
66
+ root_cte: CTE | UnionCTE,
66
67
  ):
67
68
  relevant_ctes = set()
68
69
 
69
- def recurse(cte: CTE, inverse_map: dict[str, list[CTE]]):
70
+ def recurse(cte: CTE | UnionCTE, inverse_map: dict[str, list[CTE | UnionCTE]]):
70
71
  # TODO: revisit this
71
72
  # if parent := is_locally_irrelevant(cte):
72
73
  # logger.info(
@@ -88,6 +89,9 @@ def filter_irrelevant_ctes(
88
89
  relevant_ctes.add(cte.name)
89
90
  for cte in cte.parent_ctes:
90
91
  recurse(cte, inverse_map)
92
+ if isinstance(cte, UnionCTE):
93
+ for cte in cte.internal_ctes:
94
+ recurse(cte, inverse_map)
91
95
 
92
96
  inverse_map = gen_inverse_map(input)
93
97
  recurse(root_cte, inverse_map)
@@ -97,22 +101,31 @@ def filter_irrelevant_ctes(
97
101
  return filter_irrelevant_ctes(final, root_cte)
98
102
 
99
103
 
100
- def gen_inverse_map(input: list[CTE]) -> dict[str, list[CTE]]:
101
- inverse_map: dict[str, list[CTE]] = {}
104
+ def gen_inverse_map(input: list[CTE | UnionCTE]) -> dict[str, list[CTE | UnionCTE]]:
105
+ inverse_map: dict[str, list[CTE | UnionCTE]] = {}
102
106
  for cte in input:
103
- for parent in cte.parent_ctes:
104
- if parent.name not in inverse_map:
105
- inverse_map[parent.name] = []
106
- inverse_map[parent.name].append(cte)
107
+ if isinstance(cte, UnionCTE):
108
+ for internal in cte.internal_ctes:
109
+ if internal.name not in inverse_map:
110
+ inverse_map[internal.name] = []
111
+ inverse_map[internal.name].append(cte)
112
+ else:
113
+ for parent in cte.parent_ctes:
114
+ if parent.name not in inverse_map:
115
+ inverse_map[parent.name] = []
116
+ inverse_map[parent.name].append(cte)
117
+
107
118
  return inverse_map
108
119
 
109
120
 
110
- def is_direct_return_eligible(cte: CTE) -> CTE | None:
121
+ def is_direct_return_eligible(cte: CTE | UnionCTE) -> CTE | UnionCTE | None:
111
122
  # if isinstance(select, (PersistStatement, MultiSelectStatement)):
112
123
  # return False
113
124
  if len(cte.parent_ctes) != 1:
114
125
  return None
115
126
  direct_parent = cte.parent_ctes[0]
127
+ if isinstance(direct_parent, UnionCTE):
128
+ return None
116
129
 
117
130
  output_addresses = set([x.address for x in cte.output_columns])
118
131
  parent_output_addresses = set([x.address for x in direct_parent.output_columns])
@@ -120,6 +133,8 @@ def is_direct_return_eligible(cte: CTE) -> CTE | None:
120
133
  return None
121
134
  if not direct_parent.grain == cte.grain:
122
135
  return None
136
+
137
+ assert isinstance(cte, CTE)
123
138
  derived_concepts = [
124
139
  c
125
140
  for c in cte.source.output_concepts + cte.source.hidden_concepts
@@ -155,10 +170,11 @@ def is_direct_return_eligible(cte: CTE) -> CTE | None:
155
170
 
156
171
 
157
172
  def optimize_ctes(
158
- input: list[CTE], root_cte: CTE, select: SelectStatement | MultiSelectStatement
159
- ) -> list[CTE]:
160
-
161
- direct_parent: CTE | None = root_cte
173
+ input: list[CTE | UnionCTE],
174
+ root_cte: CTE | UnionCTE,
175
+ select: SelectStatement | MultiSelectStatement,
176
+ ) -> list[CTE | UnionCTE]:
177
+ direct_parent: CTE | UnionCTE | None = root_cte
162
178
  while CONFIG.optimizations.direct_return and (
163
179
  direct_parent := is_direct_return_eligible(root_cte)
164
180
  ):
@@ -178,7 +194,7 @@ def optimize_ctes(
178
194
  direct_parent.condition = root_cte.condition
179
195
  root_cte = direct_parent
180
196
 
181
- sort_select_output(root_cte, select)
197
+ sort_select_output(root_cte, select)
182
198
 
183
199
  REGISTERED_RULES: list["OptimizationRule"] = []
184
200
  if CONFIG.optimizations.constant_inlining:
@@ -1,7 +1,7 @@
1
+ from .base_optimization import OptimizationRule
1
2
  from .inline_constant import InlineConstant
2
3
  from .inline_datasource import InlineDatasource
3
4
  from .predicate_pushdown import PredicatePushdown, PredicatePushdownRemove
4
- from .base_optimization import OptimizationRule
5
5
 
6
6
  __all__ = [
7
7
  "OptimizationRule",
@@ -1,13 +1,13 @@
1
- from trilogy.core.models import (
2
- CTE,
3
- )
4
- from trilogy.constants import logger
5
1
  from abc import ABC
6
2
 
3
+ from trilogy.constants import logger
4
+ from trilogy.core.models import CTE, UnionCTE
5
+
7
6
 
8
7
  class OptimizationRule(ABC):
9
-
10
- def optimize(self, cte: CTE, inverse_map: dict[str, list[CTE]]) -> bool:
8
+ def optimize(
9
+ self, cte: CTE | UnionCTE, inverse_map: dict[str, list[CTE | UnionCTE]]
10
+ ) -> bool:
11
11
  raise NotImplementedError
12
12
 
13
13
  def log(self, message: str):
@@ -1,15 +1,18 @@
1
+ from trilogy.core.enums import PurposeLineage
1
2
  from trilogy.core.models import (
2
3
  CTE,
3
4
  Concept,
5
+ UnionCTE,
4
6
  )
5
- from trilogy.core.enums import PurposeLineage
6
-
7
7
  from trilogy.core.optimizations.base_optimization import OptimizationRule
8
8
 
9
9
 
10
10
  class InlineConstant(OptimizationRule):
11
-
12
- def optimize(self, cte: CTE, inverse_map: dict[str, list[CTE]]) -> bool:
11
+ def optimize(
12
+ self, cte: CTE | UnionCTE, inverse_map: dict[str, list[CTE | UnionCTE]]
13
+ ) -> bool:
14
+ if isinstance(cte, UnionCTE):
15
+ return any(self.optimize(x, inverse_map) for x in cte.internal_ctes)
13
16
 
14
17
  to_inline: list[Concept] = []
15
18
  for x in cte.source.input_concepts:
@@ -1,21 +1,28 @@
1
+ from collections import defaultdict
2
+
3
+ from trilogy.constants import CONFIG
1
4
  from trilogy.core.models import (
2
5
  CTE,
3
6
  Datasource,
7
+ UnionCTE,
4
8
  )
5
-
6
9
  from trilogy.core.optimizations.base_optimization import OptimizationRule
7
- from collections import defaultdict
8
- from trilogy.constants import CONFIG
9
10
 
10
11
 
11
12
  class InlineDatasource(OptimizationRule):
12
-
13
13
  def __init__(self):
14
14
  super().__init__()
15
15
  self.candidates = defaultdict(lambda: set())
16
16
  self.count = defaultdict(lambda: 0)
17
17
 
18
- def optimize(self, cte: CTE, inverse_map: dict[str, list[CTE]]) -> bool:
18
+ def optimize(
19
+ self, cte: CTE | UnionCTE, inverse_map: dict[str, list[CTE | UnionCTE]]
20
+ ) -> bool:
21
+ if isinstance(cte, UnionCTE):
22
+ return any(
23
+ self.optimize(x, inverse_map=inverse_map) for x in cte.internal_ctes
24
+ )
25
+
19
26
  if not cte.parent_ctes:
20
27
  return False
21
28
 
@@ -25,6 +32,8 @@ class InlineDatasource(OptimizationRule):
25
32
  to_inline: list[CTE] = []
26
33
  force_group = False
27
34
  for parent_cte in cte.parent_ctes:
35
+ if isinstance(parent_cte, UnionCTE):
36
+ continue
28
37
  if not parent_cte.is_root_datasource:
29
38
  self.debug(f"parent {parent_cte.name} is not root")
30
39
  continue
@@ -1,11 +1,12 @@
1
1
  from trilogy.core.models import (
2
2
  CTE,
3
- Conditional,
4
3
  BooleanOperator,
5
- Datasource,
6
- ConceptArgs,
7
4
  Comparison,
5
+ ConceptArgs,
6
+ Conditional,
7
+ Datasource,
8
8
  Parenthetical,
9
+ UnionCTE,
9
10
  WindowItem,
10
11
  )
11
12
  from trilogy.core.optimizations.base_optimization import OptimizationRule
@@ -25,20 +26,21 @@ def is_child_of(a, comparison):
25
26
 
26
27
 
27
28
  class PredicatePushdown(OptimizationRule):
28
-
29
29
  def __init__(self, *args, **kwargs) -> None:
30
30
  super().__init__(*args, **kwargs)
31
31
  self.complete: dict[str, bool] = {}
32
32
 
33
33
  def _check_parent(
34
34
  self,
35
- cte: CTE,
36
- parent_cte: CTE,
35
+ cte: CTE | UnionCTE,
36
+ parent_cte: CTE | UnionCTE,
37
37
  candidate: Conditional | Comparison | Parenthetical | None,
38
- inverse_map: dict[str, list[CTE]],
38
+ inverse_map: dict[str, list[CTE | UnionCTE]],
39
39
  ):
40
40
  if not isinstance(candidate, ConceptArgs):
41
41
  return False
42
+ if not isinstance(parent_cte, CTE):
43
+ return False
42
44
  row_conditions = {x.address for x in candidate.row_arguments}
43
45
  existence_conditions = {
44
46
  y.address for x in candidate.existence_arguments for y in x
@@ -112,7 +114,12 @@ class PredicatePushdown(OptimizationRule):
112
114
  )
113
115
  return False
114
116
 
115
- def optimize(self, cte: CTE, inverse_map: dict[str, list[CTE]]) -> bool:
117
+ def optimize(
118
+ self, cte: CTE | UnionCTE, inverse_map: dict[str, list[CTE | UnionCTE]]
119
+ ) -> bool:
120
+ # TODO - pushdown through unions
121
+ if isinstance(cte, UnionCTE):
122
+ return False
116
123
  optimized = False
117
124
 
118
125
  if not cte.parent_ctes:
@@ -167,12 +174,15 @@ class PredicatePushdown(OptimizationRule):
167
174
 
168
175
 
169
176
  class PredicatePushdownRemove(OptimizationRule):
170
-
171
177
  def __init__(self, *args, **kwargs) -> None:
172
178
  super().__init__(*args, **kwargs)
173
179
  self.complete: dict[str, bool] = {}
174
180
 
175
- def optimize(self, cte: CTE, inverse_map: dict[str, list[CTE]]) -> bool:
181
+ def optimize(
182
+ self, cte: CTE | UnionCTE, inverse_map: dict[str, list[CTE | UnionCTE]]
183
+ ) -> bool:
184
+ if isinstance(cte, UnionCTE):
185
+ return False
176
186
  optimized = False
177
187
 
178
188
  if not cte.parent_ctes:
@@ -1,41 +1,42 @@
1
1
  from collections import defaultdict
2
+ from enum import Enum
2
3
  from typing import List, Optional, Protocol, Union
3
4
 
4
5
  from trilogy.constants import logger
5
- from trilogy.core.enums import PurposeLineage, Granularity, FunctionType
6
+ from trilogy.core.enums import FunctionType, Granularity, PurposeLineage
6
7
  from trilogy.core.env_processor import generate_graph
7
8
  from trilogy.core.graph_models import ReferenceGraph
8
9
  from trilogy.core.models import (
9
10
  Concept,
10
11
  Environment,
11
12
  Function,
12
- WhereClause,
13
13
  RowsetItem,
14
+ UndefinedConcept,
15
+ WhereClause,
14
16
  )
15
- from trilogy.core.processing.utility import (
16
- get_disconnected_components,
17
+ from trilogy.core.processing.node_generators import (
18
+ gen_basic_node,
19
+ gen_filter_node,
20
+ gen_group_node,
21
+ gen_group_to_node,
22
+ gen_merge_node,
23
+ gen_multiselect_node,
24
+ gen_rowset_node,
25
+ gen_union_node,
26
+ gen_unnest_node,
27
+ gen_window_node,
17
28
  )
18
- from trilogy.utility import unique
19
29
  from trilogy.core.processing.nodes import (
20
30
  ConstantNode,
21
- MergeNode,
22
31
  GroupNode,
23
- StrategyNode,
24
32
  History,
33
+ MergeNode,
34
+ StrategyNode,
25
35
  )
26
- from trilogy.core.processing.node_generators import (
27
- gen_filter_node,
28
- gen_window_node,
29
- gen_group_node,
30
- gen_basic_node,
31
- gen_unnest_node,
32
- gen_merge_node,
33
- gen_group_to_node,
34
- gen_rowset_node,
35
- gen_multiselect_node,
36
+ from trilogy.core.processing.utility import (
37
+ get_disconnected_components,
36
38
  )
37
-
38
- from enum import Enum
39
+ from trilogy.utility import unique
39
40
 
40
41
 
41
42
  class ValidationResult(Enum):
@@ -107,6 +108,9 @@ def get_priority_concept(
107
108
  +
108
109
  # then rowsets to remove them from scope, as they cannot get partials
109
110
  [c for c in remaining_concept if c.derivation == PurposeLineage.ROWSET]
111
+ +
112
+ # then rowsets to remove them from scope, as they cannot get partials
113
+ [c for c in remaining_concept if c.derivation == PurposeLineage.UNION]
110
114
  # we should be home-free here
111
115
  +
112
116
  # then aggregates to remove them from scope, as they cannot get partials
@@ -250,6 +254,20 @@ def generate_node(
250
254
  history,
251
255
  conditions=conditions,
252
256
  )
257
+ elif concept.derivation == PurposeLineage.UNION:
258
+ logger.info(
259
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} for {concept.address}, generating union node with optional {[x.address for x in local_optional]} and condition {conditions}"
260
+ )
261
+ return gen_union_node(
262
+ concept,
263
+ local_optional,
264
+ environment,
265
+ g,
266
+ depth + 1,
267
+ source_concepts,
268
+ history,
269
+ conditions=conditions,
270
+ )
253
271
  elif concept.derivation == PurposeLineage.AGGREGATE:
254
272
  # don't push constants up before aggregation
255
273
  # if not required
@@ -495,11 +513,9 @@ def validate_concept(
495
513
  seen: set[str],
496
514
  environment: Environment,
497
515
  ):
498
-
499
516
  found_map[str(node)].add(concept)
500
517
  seen.add(concept.address)
501
518
  if concept not in node.partial_concepts:
502
-
503
519
  found_addresses.add(concept.address)
504
520
  non_partial_addresses.add(concept.address)
505
521
  # remove it from our partial tracking
@@ -658,7 +674,6 @@ def search_concepts(
658
674
  history: History | None = None,
659
675
  conditions: WhereClause | None = None,
660
676
  ) -> StrategyNode | None:
661
-
662
677
  history = history or History()
663
678
  hist = history.get_history(
664
679
  search=mandatory_list, accept_partial=accept_partial, conditions=conditions
@@ -698,10 +713,11 @@ def _search_concepts(
698
713
  accept_partial: bool = False,
699
714
  conditions: WhereClause | None = None,
700
715
  ) -> StrategyNode | None:
701
-
702
716
  # these are the concepts we need in the output projection
703
717
  mandatory_list = unique(mandatory_list, "address")
704
-
718
+ for x in mandatory_list:
719
+ if isinstance(x, UndefinedConcept):
720
+ raise SyntaxError(f"Undefined concept {x.address}")
705
721
  all_mandatory = set(c.address for c in mandatory_list)
706
722
 
707
723
  must_evaluate_condition_on_this_level_not_push_down = False
@@ -1,18 +1,17 @@
1
+ from collections import defaultdict
1
2
  from typing import Dict, List
3
+
2
4
  from trilogy.core.models import Concept
3
- from collections import defaultdict
4
5
  from trilogy.utility import unique
5
6
 
6
7
 
7
8
  def extract_required_subgraphs(
8
9
  assocs: defaultdict[str, list], path: List[str]
9
10
  ) -> defaultdict[str, list]:
10
-
11
11
  ds = path[0]
12
12
  current: list[str] = []
13
13
  for idx, val in enumerate(path):
14
14
  if val.startswith("ds~"):
15
-
16
15
  if current:
17
16
  assocs[ds] += current
18
17
  current = [path[idx - 1]] if idx > 0 else []
@@ -1,13 +1,14 @@
1
+ from .basic_node import gen_basic_node
1
2
  from .filter_node import gen_filter_node
2
- from .window_node import gen_window_node
3
3
  from .group_node import gen_group_node
4
4
  from .group_to_node import gen_group_to_node
5
- from .basic_node import gen_basic_node
6
- from .select_node import gen_select_node
7
- from .unnest_node import gen_unnest_node
5
+ from .multiselect_node import gen_multiselect_node
8
6
  from .node_merge_node import gen_merge_node
9
7
  from .rowset_node import gen_rowset_node
10
- from .multiselect_node import gen_multiselect_node
8
+ from .select_node import gen_select_node
9
+ from .union_node import gen_union_node
10
+ from .unnest_node import gen_unnest_node
11
+ from .window_node import gen_window_node
11
12
 
12
13
  __all__ = [
13
14
  "gen_filter_node",
@@ -16,6 +17,7 @@ __all__ = [
16
17
  "gen_select_node",
17
18
  "gen_basic_node",
18
19
  "gen_unnest_node",
20
+ "gen_union_node",
19
21
  "gen_merge_node",
20
22
  "gen_group_to_node",
21
23
  "gen_rowset_node",
@@ -1,18 +1,18 @@
1
1
  # directly select out a basic derivation
2
2
  from typing import List
3
3
 
4
+ from trilogy.constants import logger
5
+ from trilogy.core.enums import SourceType
4
6
  from trilogy.core.models import (
5
7
  Concept,
6
- WhereClause,
7
8
  Function,
8
9
  FunctionClass,
10
+ WhereClause,
9
11
  )
10
- from trilogy.core.processing.nodes import StrategyNode, History
11
12
  from trilogy.core.processing.node_generators.common import (
12
13
  resolve_function_parent_concepts,
13
14
  )
14
- from trilogy.constants import logger
15
- from trilogy.core.enums import SourceType
15
+ from trilogy.core.processing.nodes import History, StrategyNode
16
16
 
17
17
  LOGGER_PREFIX = "[GEN_BASIC_NODE]"
18
18
 
@@ -1,23 +1,23 @@
1
- from typing import List, Tuple, Callable
1
+ from collections import defaultdict
2
+ from typing import Callable, List, Tuple
2
3
 
3
- from trilogy.core.enums import PurposeLineage, Purpose
4
+ from trilogy.core.enums import Purpose, PurposeLineage
4
5
  from trilogy.core.models import (
5
- Concept,
6
- Function,
7
6
  AggregateWrapper,
8
- FilterItem,
7
+ Concept,
9
8
  Environment,
9
+ FilterItem,
10
+ Function,
10
11
  LooseConceptList,
11
12
  WhereClause,
12
13
  )
13
- from trilogy.utility import unique
14
- from trilogy.core.processing.nodes.base_node import StrategyNode
15
- from trilogy.core.processing.nodes.merge_node import MergeNode
16
- from trilogy.core.processing.nodes import History
17
14
  from trilogy.core.processing.nodes import (
15
+ History,
18
16
  NodeJoin,
19
17
  )
20
- from collections import defaultdict
18
+ from trilogy.core.processing.nodes.base_node import StrategyNode
19
+ from trilogy.core.processing.nodes.merge_node import MergeNode
20
+ from trilogy.utility import unique
21
21
 
22
22
 
23
23
  def resolve_function_parent_concepts(concept: Concept) -> List[Concept]:
@@ -151,7 +151,6 @@ def gen_enrichment_node(
151
151
  history: History | None = None,
152
152
  conditions: WhereClause | None = None,
153
153
  ):
154
-
155
154
  local_opts = LooseConceptList(concepts=local_optional)
156
155
 
157
156
  extra_required = [
@@ -1,20 +1,18 @@
1
1
  from typing import List
2
2
 
3
-
3
+ from trilogy.constants import logger
4
4
  from trilogy.core.models import Concept, Environment, FilterItem, Grain, WhereClause
5
+ from trilogy.core.processing.node_generators.common import (
6
+ resolve_filter_parent_concepts,
7
+ )
5
8
  from trilogy.core.processing.nodes import (
6
9
  FilterNode,
7
- MergeNode,
8
10
  History,
9
- StrategyNode,
11
+ MergeNode,
10
12
  SelectNode,
13
+ StrategyNode,
11
14
  )
12
- from trilogy.core.processing.node_generators.common import (
13
- resolve_filter_parent_concepts,
14
- )
15
- from trilogy.constants import logger
16
- from trilogy.core.processing.utility import padding, unique
17
- from trilogy.core.processing.utility import is_scalar_condition
15
+ from trilogy.core.processing.utility import is_scalar_condition, padding, unique
18
16
 
19
17
  LOGGER_PREFIX = "[GEN_FILTER_NODE]"
20
18
 
@@ -1,23 +1,22 @@
1
+ from typing import List
2
+
3
+ from trilogy.constants import logger
1
4
  from trilogy.core.models import (
5
+ AggregateWrapper,
2
6
  Concept,
3
7
  Environment,
4
- LooseConceptList,
5
- WhereClause,
6
8
  Function,
7
- AggregateWrapper,
8
9
  Grain,
10
+ LooseConceptList,
11
+ WhereClause,
9
12
  )
10
- from trilogy.utility import unique
11
- from trilogy.core.processing.nodes import GroupNode, StrategyNode, History
12
- from typing import List
13
- from trilogy.core.processing.node_generators.common import (
14
- resolve_function_parent_concepts,
15
- )
16
- from trilogy.constants import logger
17
- from trilogy.core.processing.utility import padding, create_log_lambda
18
13
  from trilogy.core.processing.node_generators.common import (
19
14
  gen_enrichment_node,
15
+ resolve_function_parent_concepts,
20
16
  )
17
+ from trilogy.core.processing.nodes import GroupNode, History, StrategyNode
18
+ from trilogy.core.processing.utility import create_log_lambda, padding
19
+ from trilogy.utility import unique
21
20
 
22
21
  LOGGER_PREFIX = "[GEN_GROUP_NODE]"
23
22
 
@@ -1,13 +1,13 @@
1
+ from typing import List
2
+
3
+ from trilogy.constants import logger
1
4
  from trilogy.core.models import Concept, Environment, Function, WhereClause
2
5
  from trilogy.core.processing.nodes import (
3
6
  GroupNode,
4
- StrategyNode,
5
- MergeNode,
6
7
  History,
8
+ MergeNode,
9
+ StrategyNode,
7
10
  )
8
- from typing import List
9
-
10
- from trilogy.constants import logger
11
11
  from trilogy.core.processing.utility import padding
12
12
 
13
13
  LOGGER_PREFIX = "[GEN_GROUP_TO_NODE]"
@@ -1,22 +1,20 @@
1
+ from collections import defaultdict
2
+ from itertools import combinations
3
+ from typing import List
4
+
5
+ from trilogy.constants import logger
6
+ from trilogy.core.enums import BooleanOperator, JoinType, Purpose
1
7
  from trilogy.core.models import (
2
8
  Concept,
9
+ Conditional,
3
10
  Environment,
4
11
  MultiSelectStatement,
5
12
  WhereClause,
6
- Conditional,
7
13
  )
8
- from trilogy.core.processing.nodes import MergeNode, NodeJoin, History
9
- from trilogy.core.processing.nodes.base_node import concept_list_to_grain, StrategyNode
10
- from typing import List
11
-
12
- from trilogy.core.enums import JoinType
13
- from trilogy.constants import logger
14
- from trilogy.core.processing.utility import padding
15
- from trilogy.core.processing.utility import concept_to_relevant_joins
16
- from collections import defaultdict
17
- from itertools import combinations
18
- from trilogy.core.enums import Purpose, BooleanOperator
19
14
  from trilogy.core.processing.node_generators.common import resolve_join_order
15
+ from trilogy.core.processing.nodes import History, MergeNode, NodeJoin
16
+ from trilogy.core.processing.nodes.base_node import StrategyNode, concept_list_to_grain
17
+ from trilogy.core.processing.utility import concept_to_relevant_joins, padding
20
18
 
21
19
  LOGGER_PREFIX = "[GEN_MULTISELECT_NODE]"
22
20