pytrilogy 0.3.138__cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (182) hide show
  1. LICENSE.md +19 -0
  2. _preql_import_resolver/__init__.py +5 -0
  3. _preql_import_resolver/_preql_import_resolver.cpython-311-x86_64-linux-gnu.so +0 -0
  4. pytrilogy-0.3.138.dist-info/METADATA +525 -0
  5. pytrilogy-0.3.138.dist-info/RECORD +182 -0
  6. pytrilogy-0.3.138.dist-info/WHEEL +5 -0
  7. pytrilogy-0.3.138.dist-info/entry_points.txt +2 -0
  8. pytrilogy-0.3.138.dist-info/licenses/LICENSE.md +19 -0
  9. trilogy/__init__.py +9 -0
  10. trilogy/ai/README.md +10 -0
  11. trilogy/ai/__init__.py +19 -0
  12. trilogy/ai/constants.py +92 -0
  13. trilogy/ai/conversation.py +107 -0
  14. trilogy/ai/enums.py +7 -0
  15. trilogy/ai/execute.py +50 -0
  16. trilogy/ai/models.py +34 -0
  17. trilogy/ai/prompts.py +87 -0
  18. trilogy/ai/providers/__init__.py +0 -0
  19. trilogy/ai/providers/anthropic.py +106 -0
  20. trilogy/ai/providers/base.py +24 -0
  21. trilogy/ai/providers/google.py +146 -0
  22. trilogy/ai/providers/openai.py +89 -0
  23. trilogy/ai/providers/utils.py +68 -0
  24. trilogy/authoring/README.md +3 -0
  25. trilogy/authoring/__init__.py +143 -0
  26. trilogy/constants.py +113 -0
  27. trilogy/core/README.md +52 -0
  28. trilogy/core/__init__.py +0 -0
  29. trilogy/core/constants.py +6 -0
  30. trilogy/core/enums.py +443 -0
  31. trilogy/core/env_processor.py +120 -0
  32. trilogy/core/environment_helpers.py +320 -0
  33. trilogy/core/ergonomics.py +193 -0
  34. trilogy/core/exceptions.py +123 -0
  35. trilogy/core/functions.py +1227 -0
  36. trilogy/core/graph_models.py +139 -0
  37. trilogy/core/internal.py +85 -0
  38. trilogy/core/models/__init__.py +0 -0
  39. trilogy/core/models/author.py +2672 -0
  40. trilogy/core/models/build.py +2521 -0
  41. trilogy/core/models/build_environment.py +180 -0
  42. trilogy/core/models/core.py +494 -0
  43. trilogy/core/models/datasource.py +322 -0
  44. trilogy/core/models/environment.py +748 -0
  45. trilogy/core/models/execute.py +1177 -0
  46. trilogy/core/optimization.py +251 -0
  47. trilogy/core/optimizations/__init__.py +12 -0
  48. trilogy/core/optimizations/base_optimization.py +17 -0
  49. trilogy/core/optimizations/hide_unused_concept.py +47 -0
  50. trilogy/core/optimizations/inline_datasource.py +102 -0
  51. trilogy/core/optimizations/predicate_pushdown.py +245 -0
  52. trilogy/core/processing/README.md +94 -0
  53. trilogy/core/processing/READMEv2.md +121 -0
  54. trilogy/core/processing/VIRTUAL_UNNEST.md +30 -0
  55. trilogy/core/processing/__init__.py +0 -0
  56. trilogy/core/processing/concept_strategies_v3.py +508 -0
  57. trilogy/core/processing/constants.py +15 -0
  58. trilogy/core/processing/discovery_node_factory.py +451 -0
  59. trilogy/core/processing/discovery_utility.py +517 -0
  60. trilogy/core/processing/discovery_validation.py +167 -0
  61. trilogy/core/processing/graph_utils.py +43 -0
  62. trilogy/core/processing/node_generators/README.md +9 -0
  63. trilogy/core/processing/node_generators/__init__.py +31 -0
  64. trilogy/core/processing/node_generators/basic_node.py +160 -0
  65. trilogy/core/processing/node_generators/common.py +268 -0
  66. trilogy/core/processing/node_generators/constant_node.py +38 -0
  67. trilogy/core/processing/node_generators/filter_node.py +315 -0
  68. trilogy/core/processing/node_generators/group_node.py +213 -0
  69. trilogy/core/processing/node_generators/group_to_node.py +117 -0
  70. trilogy/core/processing/node_generators/multiselect_node.py +205 -0
  71. trilogy/core/processing/node_generators/node_merge_node.py +653 -0
  72. trilogy/core/processing/node_generators/recursive_node.py +88 -0
  73. trilogy/core/processing/node_generators/rowset_node.py +165 -0
  74. trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
  75. trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +261 -0
  76. trilogy/core/processing/node_generators/select_merge_node.py +748 -0
  77. trilogy/core/processing/node_generators/select_node.py +95 -0
  78. trilogy/core/processing/node_generators/synonym_node.py +98 -0
  79. trilogy/core/processing/node_generators/union_node.py +91 -0
  80. trilogy/core/processing/node_generators/unnest_node.py +182 -0
  81. trilogy/core/processing/node_generators/window_node.py +201 -0
  82. trilogy/core/processing/nodes/README.md +28 -0
  83. trilogy/core/processing/nodes/__init__.py +179 -0
  84. trilogy/core/processing/nodes/base_node.py +519 -0
  85. trilogy/core/processing/nodes/filter_node.py +75 -0
  86. trilogy/core/processing/nodes/group_node.py +194 -0
  87. trilogy/core/processing/nodes/merge_node.py +420 -0
  88. trilogy/core/processing/nodes/recursive_node.py +46 -0
  89. trilogy/core/processing/nodes/select_node_v2.py +242 -0
  90. trilogy/core/processing/nodes/union_node.py +53 -0
  91. trilogy/core/processing/nodes/unnest_node.py +62 -0
  92. trilogy/core/processing/nodes/window_node.py +56 -0
  93. trilogy/core/processing/utility.py +823 -0
  94. trilogy/core/query_processor.py +596 -0
  95. trilogy/core/statements/README.md +35 -0
  96. trilogy/core/statements/__init__.py +0 -0
  97. trilogy/core/statements/author.py +536 -0
  98. trilogy/core/statements/build.py +0 -0
  99. trilogy/core/statements/common.py +20 -0
  100. trilogy/core/statements/execute.py +155 -0
  101. trilogy/core/table_processor.py +66 -0
  102. trilogy/core/utility.py +8 -0
  103. trilogy/core/validation/README.md +46 -0
  104. trilogy/core/validation/__init__.py +0 -0
  105. trilogy/core/validation/common.py +161 -0
  106. trilogy/core/validation/concept.py +146 -0
  107. trilogy/core/validation/datasource.py +227 -0
  108. trilogy/core/validation/environment.py +73 -0
  109. trilogy/core/validation/fix.py +106 -0
  110. trilogy/dialect/__init__.py +32 -0
  111. trilogy/dialect/base.py +1359 -0
  112. trilogy/dialect/bigquery.py +256 -0
  113. trilogy/dialect/common.py +147 -0
  114. trilogy/dialect/config.py +144 -0
  115. trilogy/dialect/dataframe.py +50 -0
  116. trilogy/dialect/duckdb.py +177 -0
  117. trilogy/dialect/enums.py +147 -0
  118. trilogy/dialect/metadata.py +173 -0
  119. trilogy/dialect/mock.py +190 -0
  120. trilogy/dialect/postgres.py +91 -0
  121. trilogy/dialect/presto.py +104 -0
  122. trilogy/dialect/results.py +89 -0
  123. trilogy/dialect/snowflake.py +90 -0
  124. trilogy/dialect/sql_server.py +92 -0
  125. trilogy/engine.py +48 -0
  126. trilogy/execution/config.py +75 -0
  127. trilogy/executor.py +568 -0
  128. trilogy/hooks/__init__.py +4 -0
  129. trilogy/hooks/base_hook.py +40 -0
  130. trilogy/hooks/graph_hook.py +139 -0
  131. trilogy/hooks/query_debugger.py +166 -0
  132. trilogy/metadata/__init__.py +0 -0
  133. trilogy/parser.py +10 -0
  134. trilogy/parsing/README.md +21 -0
  135. trilogy/parsing/__init__.py +0 -0
  136. trilogy/parsing/common.py +1069 -0
  137. trilogy/parsing/config.py +5 -0
  138. trilogy/parsing/exceptions.py +8 -0
  139. trilogy/parsing/helpers.py +1 -0
  140. trilogy/parsing/parse_engine.py +2813 -0
  141. trilogy/parsing/render.py +750 -0
  142. trilogy/parsing/trilogy.lark +540 -0
  143. trilogy/py.typed +0 -0
  144. trilogy/render.py +42 -0
  145. trilogy/scripts/README.md +7 -0
  146. trilogy/scripts/__init__.py +0 -0
  147. trilogy/scripts/dependency/Cargo.lock +617 -0
  148. trilogy/scripts/dependency/Cargo.toml +39 -0
  149. trilogy/scripts/dependency/README.md +131 -0
  150. trilogy/scripts/dependency/build.sh +25 -0
  151. trilogy/scripts/dependency/src/directory_resolver.rs +162 -0
  152. trilogy/scripts/dependency/src/lib.rs +16 -0
  153. trilogy/scripts/dependency/src/main.rs +770 -0
  154. trilogy/scripts/dependency/src/parser.rs +435 -0
  155. trilogy/scripts/dependency/src/preql.pest +208 -0
  156. trilogy/scripts/dependency/src/python_bindings.rs +289 -0
  157. trilogy/scripts/dependency/src/resolver.rs +716 -0
  158. trilogy/scripts/dependency/tests/base.preql +3 -0
  159. trilogy/scripts/dependency/tests/cli_integration.rs +377 -0
  160. trilogy/scripts/dependency/tests/customer.preql +6 -0
  161. trilogy/scripts/dependency/tests/main.preql +9 -0
  162. trilogy/scripts/dependency/tests/orders.preql +7 -0
  163. trilogy/scripts/dependency/tests/test_data/base.preql +9 -0
  164. trilogy/scripts/dependency/tests/test_data/consumer.preql +1 -0
  165. trilogy/scripts/dependency.py +323 -0
  166. trilogy/scripts/display.py +460 -0
  167. trilogy/scripts/environment.py +46 -0
  168. trilogy/scripts/parallel_execution.py +483 -0
  169. trilogy/scripts/single_execution.py +131 -0
  170. trilogy/scripts/trilogy.py +772 -0
  171. trilogy/std/__init__.py +0 -0
  172. trilogy/std/color.preql +3 -0
  173. trilogy/std/date.preql +13 -0
  174. trilogy/std/display.preql +18 -0
  175. trilogy/std/geography.preql +22 -0
  176. trilogy/std/metric.preql +15 -0
  177. trilogy/std/money.preql +67 -0
  178. trilogy/std/net.preql +14 -0
  179. trilogy/std/ranking.preql +7 -0
  180. trilogy/std/report.preql +5 -0
  181. trilogy/std/semantic.preql +6 -0
  182. trilogy/utility.py +34 -0
@@ -0,0 +1,43 @@
1
+ from collections import defaultdict
2
+ from typing import Dict, List
3
+
4
+ from trilogy.core.models.author import Concept
5
+ from trilogy.utility import unique
6
+
7
+
8
+ def extract_required_subgraphs(
9
+ assocs: defaultdict[str, list], path: List[str]
10
+ ) -> defaultdict[str, list]:
11
+ ds = path[0]
12
+ current: list[str] = []
13
+ for idx, val in enumerate(path):
14
+ if val.startswith("ds~"):
15
+ if current:
16
+ assocs[ds] += current
17
+ current = [path[idx - 1]] if idx > 0 else []
18
+ ds = val
19
+ else:
20
+ current.append(val)
21
+ else:
22
+ if current:
23
+ assocs[ds] += current
24
+
25
+ return assocs
26
+
27
+
28
+ def extract_mandatory_subgraphs(paths: Dict[str, List[str]], g) -> List[List[Concept]]:
29
+ final: list[list[str]] = []
30
+ assocs: defaultdict[str, list] = defaultdict(list)
31
+ for path in paths.values():
32
+ extract_required_subgraphs(assocs, path)
33
+
34
+ for _, v in assocs.items():
35
+ final.append(v)
36
+ final_concepts = []
37
+ for value in final:
38
+ final_concepts.append(
39
+ unique(
40
+ [g.nodes[v]["concept"] for v in value if v.startswith("c~")], "address"
41
+ )
42
+ )
43
+ return final_concepts
@@ -0,0 +1,9 @@
1
+
2
+
3
+
4
+ For any specialized node, unpack the specialized concept X and fetch the rest.
5
+
6
+ For unspecialized node, attempt to fetch concept X and the rest. If cannot, attempt to see
7
+ if all combinations of others + X can be found, and return a merge node with all of those.
8
+
9
+ If not all combinations can be found, return what can be found.
@@ -0,0 +1,31 @@
1
+ from .basic_node import gen_basic_node
2
+ from .constant_node import gen_constant_node
3
+ from .filter_node import gen_filter_node
4
+ from .group_node import gen_group_node
5
+ from .group_to_node import gen_group_to_node
6
+ from .multiselect_node import gen_multiselect_node
7
+ from .node_merge_node import gen_merge_node
8
+ from .recursive_node import gen_recursive_node
9
+ from .rowset_node import gen_rowset_node
10
+ from .select_node import gen_select_node
11
+ from .synonym_node import gen_synonym_node
12
+ from .union_node import gen_union_node
13
+ from .unnest_node import gen_unnest_node
14
+ from .window_node import gen_window_node
15
+
16
+ __all__ = [
17
+ "gen_filter_node",
18
+ "gen_window_node",
19
+ "gen_group_node",
20
+ "gen_select_node",
21
+ "gen_basic_node",
22
+ "gen_unnest_node",
23
+ "gen_union_node",
24
+ "gen_merge_node",
25
+ "gen_group_to_node",
26
+ "gen_rowset_node",
27
+ "gen_multiselect_node",
28
+ "gen_synonym_node",
29
+ "gen_recursive_node",
30
+ "gen_constant_node",
31
+ ]
@@ -0,0 +1,160 @@
1
+ from typing import List
2
+
3
+ from trilogy.constants import logger
4
+ from trilogy.core.enums import FunctionClass, FunctionType, SourceType
5
+ from trilogy.core.models.build import BuildConcept, BuildFunction, BuildWhereClause
6
+ from trilogy.core.models.build_environment import BuildEnvironment
7
+ from trilogy.core.processing.node_generators.common import (
8
+ resolve_function_parent_concepts,
9
+ )
10
+ from trilogy.core.processing.nodes import ConstantNode, History, StrategyNode
11
+ from trilogy.utility import unique
12
+
13
+ LOGGER_PREFIX = "[GEN_BASIC_NODE]"
14
+
15
+
16
+ def is_equivalent_basic_function_lineage(
17
+ x: BuildConcept,
18
+ y: BuildConcept,
19
+ ):
20
+ if not isinstance(x.lineage, BuildFunction) or not isinstance(
21
+ y.lineage, BuildFunction
22
+ ):
23
+ return False
24
+ if x.lineage.operator == y.lineage.operator == FunctionType.ATTR_ACCESS:
25
+ return x.lineage.concept_arguments == y.lineage.concept_arguments
26
+ if x.lineage.operator == y.lineage.operator:
27
+ return True
28
+ if (
29
+ y.lineage.operator in FunctionClass.AGGREGATE_FUNCTIONS.value
30
+ or y.lineage.operator in FunctionClass.ONE_TO_MANY.value
31
+ ):
32
+ return False
33
+ return True
34
+
35
+
36
+ def gen_basic_node(
37
+ concept: BuildConcept,
38
+ local_optional: List[BuildConcept],
39
+ environment: BuildEnvironment,
40
+ g,
41
+ depth: int,
42
+ source_concepts,
43
+ history: History | None = None,
44
+ conditions: BuildWhereClause | None = None,
45
+ ):
46
+ depth_prefix = "\t" * depth
47
+ parent_concepts = resolve_function_parent_concepts(concept, environment=environment)
48
+
49
+ logger.info(
50
+ f"{depth_prefix}{LOGGER_PREFIX} basic node for {concept} with lineage {concept.lineage} has parents {[x for x in parent_concepts]}"
51
+ )
52
+ synonyms: list[BuildConcept] = []
53
+ ignored_optional: set[str] = set()
54
+
55
+ # when we are getting an attribute, if there is anything else
56
+ # that is an attribute of the same struct in local optional
57
+ # select that value for discovery as well
58
+ if (
59
+ isinstance(concept.lineage, BuildFunction)
60
+ and concept.lineage.operator == FunctionType.ATTR_ACCESS
61
+ ):
62
+ logger.info(
63
+ f"{depth_prefix}{LOGGER_PREFIX} checking for synonyms for attribute access"
64
+ )
65
+ for x in local_optional:
66
+ found = False
67
+ for z in x.pseudonyms:
68
+ # gate to ensure we don't match to multiple synonyms
69
+ if found:
70
+ continue
71
+ if z in environment.concepts:
72
+ s_concept = environment.concepts[z]
73
+ else:
74
+ s_concept = environment.alias_origin_lookup[z]
75
+ if is_equivalent_basic_function_lineage(concept, s_concept):
76
+ found = True
77
+ synonyms.append(s_concept)
78
+ ignored_optional.add(x.address)
79
+ equivalent_optional = [
80
+ x
81
+ for x in local_optional
82
+ if is_equivalent_basic_function_lineage(concept, x)
83
+ and x.address != concept.address
84
+ ] + synonyms
85
+
86
+ if equivalent_optional:
87
+ logger.info(
88
+ f"{depth_prefix}{LOGGER_PREFIX} basic node for {concept} has equivalent optional {[x.address for x in equivalent_optional]}"
89
+ )
90
+ for eo in equivalent_optional:
91
+ new_parents = resolve_function_parent_concepts(eo, environment=environment)
92
+ logger.info(
93
+ f"{depth_prefix}{LOGGER_PREFIX} equivalent optional {eo.address} has parents {[x.address for x in new_parents]}"
94
+ )
95
+ parent_concepts += new_parents
96
+ non_equivalent_optional = [
97
+ x
98
+ for x in local_optional
99
+ if x not in equivalent_optional
100
+ and not any(x.address in y.pseudonyms for y in equivalent_optional)
101
+ and x.address not in ignored_optional
102
+ ]
103
+ logger.info(
104
+ f"{depth_prefix}{LOGGER_PREFIX} basic node for {concept} has non-equivalent optional {[x.address for x in non_equivalent_optional]}"
105
+ )
106
+ all_parents: list[BuildConcept] = unique(
107
+ parent_concepts + non_equivalent_optional, "address"
108
+ )
109
+ logger.info(
110
+ f"{depth_prefix}{LOGGER_PREFIX} Fetching parents {[x.address for x in all_parents]} with conditions {conditions}"
111
+ )
112
+ if all_parents:
113
+ parent_node: StrategyNode | None = source_concepts(
114
+ mandatory_list=all_parents,
115
+ environment=environment,
116
+ g=g,
117
+ depth=depth + 1,
118
+ history=history,
119
+ conditions=conditions,
120
+ )
121
+
122
+ if not parent_node:
123
+ logger.info(
124
+ f"{depth_prefix}{LOGGER_PREFIX} No basic node could be generated for {concept}"
125
+ )
126
+ return None
127
+ else:
128
+ return ConstantNode(
129
+ input_concepts=[],
130
+ output_concepts=[concept],
131
+ environment=environment,
132
+ depth=depth,
133
+ )
134
+ if parent_node.source_type != SourceType.CONSTANT:
135
+ parent_node.source_type = SourceType.BASIC
136
+ parent_node.add_output_concept(concept)
137
+ for x in equivalent_optional:
138
+ parent_node.add_output_concept(x)
139
+
140
+ logger.info(
141
+ f"{depth_prefix}{LOGGER_PREFIX} Returning basic select for {concept}: output {[x.address for x in parent_node.output_concepts]}"
142
+ )
143
+ # if it's a constant, don't prune outputs
144
+ if parent_node.source_type == SourceType.CONSTANT:
145
+ return parent_node
146
+ targets = [concept] + local_optional + equivalent_optional
147
+ targets = [
148
+ s
149
+ for s in parent_node.output_concepts
150
+ if any(s.address in y.pseudonyms for y in targets)
151
+ ] + targets
152
+ hidden = [x for x in parent_node.output_concepts if x.address not in targets]
153
+ parent_node.hide_output_concepts(hidden)
154
+ parent_node.source_type = SourceType.BASIC
155
+
156
+ logger.info(
157
+ f"{depth_prefix}{LOGGER_PREFIX} Returning basic select for {concept}: input: {[x.address for x in parent_node.input_concepts]} output {[x.address for x in parent_node.output_concepts]} hidden {[x for x in parent_node.hidden_concepts]}"
158
+ )
159
+
160
+ return parent_node
@@ -0,0 +1,268 @@
1
+ from collections import defaultdict
2
+ from typing import Callable, List, Tuple
3
+
4
+ from trilogy.core.enums import Derivation, Purpose
5
+ from trilogy.core.models.build import (
6
+ BuildAggregateWrapper,
7
+ BuildComparison,
8
+ BuildConcept,
9
+ BuildFilterItem,
10
+ BuildFunction,
11
+ BuildWhereClause,
12
+ LooseBuildConceptList,
13
+ )
14
+ from trilogy.core.models.build_environment import BuildEnvironment
15
+ from trilogy.core.processing.nodes import (
16
+ History,
17
+ NodeJoin,
18
+ )
19
+ from trilogy.core.processing.nodes.base_node import StrategyNode
20
+ from trilogy.core.processing.nodes.merge_node import MergeNode
21
+ from trilogy.utility import unique
22
+
23
+ AGGREGATE_TYPES = (BuildAggregateWrapper,)
24
+ FUNCTION_TYPES = (BuildFunction,)
25
+
26
+
27
+ def resolve_function_parent_concepts(
28
+ concept: BuildConcept, environment: BuildEnvironment
29
+ ) -> List[BuildConcept]:
30
+ if not isinstance(
31
+ concept.lineage, (*FUNCTION_TYPES, *AGGREGATE_TYPES, BuildComparison)
32
+ ):
33
+ raise ValueError(
34
+ f"Concept {concept} lineage is not function or aggregate, is {type(concept.lineage)}"
35
+ )
36
+ if concept.derivation == Derivation.AGGREGATE:
37
+ base: list[BuildConcept] = []
38
+ if not concept.grain.abstract:
39
+ base = concept.lineage.concept_arguments + [
40
+ environment.concepts[c] for c in concept.grain.components
41
+ ]
42
+ # if the base concept being aggregated is a property with a key
43
+ # keep the key as a parent
44
+ else:
45
+ base = concept.lineage.concept_arguments
46
+ if isinstance(concept.lineage, AGGREGATE_TYPES):
47
+ # for aggregate wrapper, don't include the by
48
+ extra_property_grain = concept.lineage.function.concept_arguments
49
+ else:
50
+ extra_property_grain = concept.lineage.concept_arguments
51
+ for x in extra_property_grain:
52
+ if isinstance(x, BuildConcept) and x.purpose == Purpose.PROPERTY and x.keys:
53
+ base += [environment.concepts[c] for c in x.keys]
54
+ return unique(base, "address")
55
+ # TODO: handle basic lineage chains?
56
+ return unique(concept.lineage.concept_arguments, "address")
57
+
58
+
59
+ def resolve_condition_parent_concepts(
60
+ condition: BuildWhereClause,
61
+ ) -> Tuple[List[BuildConcept], List[Tuple[BuildConcept, ...]]]:
62
+ base_existence = []
63
+ base_rows: list[BuildConcept] = []
64
+ base_rows += condition.row_arguments
65
+ for ctuple in condition.existence_arguments:
66
+ base_existence.append(ctuple)
67
+ return unique(base_rows, "address"), base_existence
68
+
69
+
70
+ def resolve_filter_parent_concepts(
71
+ concept: BuildConcept,
72
+ environment: BuildEnvironment,
73
+ ) -> Tuple[List[BuildConcept], List[Tuple[BuildConcept, ...]]]:
74
+ if not isinstance(concept.lineage, (BuildFilterItem,)):
75
+ raise ValueError(
76
+ f"Concept {concept} lineage is not filter item, is {type(concept.lineage)}"
77
+ )
78
+ direct_parent = concept.lineage.content
79
+ base_existence = []
80
+ base_rows = [direct_parent] if isinstance(direct_parent, BuildConcept) else []
81
+ condition_rows, condition_existence = resolve_condition_parent_concepts(
82
+ concept.lineage.where
83
+ )
84
+ base_rows += condition_rows
85
+ base_existence += condition_existence
86
+ # this is required so that
87
+ if (
88
+ isinstance(direct_parent, BuildConcept)
89
+ and direct_parent.purpose in (Purpose.PROPERTY, Purpose.METRIC)
90
+ and direct_parent.keys
91
+ ):
92
+ base_rows += [environment.concepts[c] for c in direct_parent.keys]
93
+
94
+ if concept.lineage.where.existence_arguments:
95
+ return (
96
+ unique(base_rows, "address"),
97
+ base_existence,
98
+ )
99
+ return unique(base_rows, "address"), []
100
+
101
+
102
+ def gen_property_enrichment_node(
103
+ base_node: StrategyNode,
104
+ extra_properties: list[BuildConcept],
105
+ history: History,
106
+ environment: BuildEnvironment,
107
+ g,
108
+ depth: int,
109
+ source_concepts,
110
+ log_lambda: Callable,
111
+ conditions: BuildWhereClause | None = None,
112
+ ):
113
+ required_keys: dict[str, set[str]] = defaultdict(set)
114
+ for x in extra_properties:
115
+ if not x.keys:
116
+ raise SyntaxError(f"Property {x.address} missing keys in lookup")
117
+ keys = "-".join([y for y in x.keys])
118
+ required_keys[keys].add(x.address)
119
+ final_nodes = []
120
+ for _k, vs in required_keys.items():
121
+ log_lambda(f"Generating enrichment node for {_k} with {vs}")
122
+ ks = _k.split("-")
123
+ enrich_node: StrategyNode = source_concepts(
124
+ mandatory_list=[environment.concepts[k] for k in ks]
125
+ + [environment.concepts[v] for v in vs],
126
+ environment=environment,
127
+ g=g,
128
+ depth=depth + 1,
129
+ history=history,
130
+ conditions=conditions,
131
+ )
132
+ final_nodes.append(enrich_node)
133
+ return MergeNode(
134
+ input_concepts=unique(
135
+ base_node.output_concepts
136
+ + extra_properties
137
+ + [
138
+ environment.concepts[v]
139
+ for k, values in required_keys.items()
140
+ for v in values
141
+ ],
142
+ "address",
143
+ ),
144
+ output_concepts=base_node.output_concepts + extra_properties,
145
+ environment=environment,
146
+ parents=[
147
+ base_node,
148
+ ]
149
+ + final_nodes,
150
+ preexisting_conditions=conditions.conditional if conditions else None,
151
+ )
152
+
153
+
154
+ def gen_enrichment_node(
155
+ base_node: StrategyNode,
156
+ join_keys: List[BuildConcept],
157
+ local_optional: list[BuildConcept],
158
+ environment: BuildEnvironment,
159
+ g,
160
+ depth: int,
161
+ source_concepts,
162
+ log_lambda,
163
+ history: History,
164
+ conditions: BuildWhereClause | None = None,
165
+ ):
166
+ local_opts = LooseBuildConceptList(concepts=local_optional)
167
+
168
+ extra_required = [
169
+ x
170
+ for x in local_opts
171
+ if x not in base_node.output_lcl or x in base_node.partial_lcl
172
+ ]
173
+
174
+ # property lookup optimization
175
+ # this helps create ergonomic merge nodes when evaluating a normalized star schema
176
+ # as we only want to lookup the missing properties based on the relevant keys
177
+ if all([x.purpose == Purpose.PROPERTY for x in extra_required]):
178
+ if all(
179
+ x.keys and all([key in base_node.output_lcl for key in x.keys])
180
+ for x in extra_required
181
+ ):
182
+ log_lambda(
183
+ f"{str(type(base_node).__name__)} returning property optimized enrichment node for {extra_required[0].keys}"
184
+ )
185
+ return gen_property_enrichment_node(
186
+ base_node,
187
+ extra_required,
188
+ environment=environment,
189
+ g=g,
190
+ depth=depth,
191
+ source_concepts=source_concepts,
192
+ history=history,
193
+ conditions=conditions,
194
+ log_lambda=log_lambda,
195
+ )
196
+ log_lambda(
197
+ f"{str(type(base_node).__name__)} searching for join keys {LooseBuildConceptList(concepts=join_keys)} and extra required {local_opts}"
198
+ )
199
+ enrich_node: StrategyNode = source_concepts( # this fetches the parent + join keys
200
+ # to then connect to the rest of the query
201
+ mandatory_list=join_keys + extra_required,
202
+ environment=environment,
203
+ g=g,
204
+ depth=depth + 1,
205
+ history=history,
206
+ conditions=conditions,
207
+ )
208
+ if not enrich_node:
209
+ log_lambda(
210
+ f"{str(type(base_node).__name__)} enrichment node unresolvable, returning just group node"
211
+ )
212
+ return base_node
213
+ log_lambda(
214
+ f"{str(type(base_node).__name__)} returning merge node with group node + enrichment node"
215
+ )
216
+ non_hidden = [
217
+ x
218
+ for x in base_node.output_concepts
219
+ if x.address not in base_node.hidden_concepts
220
+ ]
221
+ return MergeNode(
222
+ input_concepts=unique(join_keys + extra_required + non_hidden, "address"),
223
+ output_concepts=unique(join_keys + extra_required + non_hidden, "address"),
224
+ environment=environment,
225
+ parents=[enrich_node, base_node],
226
+ force_group=False,
227
+ preexisting_conditions=conditions.conditional if conditions else None,
228
+ depth=depth,
229
+ )
230
+
231
+
232
+ def resolve_join_order(joins: List[NodeJoin]) -> List[NodeJoin]:
233
+ if not joins:
234
+ return []
235
+ available_aliases: set[StrategyNode] = set()
236
+ final_joins_pre = [*joins]
237
+ final_joins = []
238
+ left = set()
239
+ right = set()
240
+ for join in joins:
241
+ left.add(join.left_node)
242
+ right.add(join.right_node)
243
+
244
+ potential_basis = left.difference(right)
245
+ base_candidates = [x for x in final_joins_pre if x.left_node in potential_basis]
246
+ if not base_candidates:
247
+ raise SyntaxError(
248
+ f"Unresolvable join dependencies, left requires {left} and right requires {right}"
249
+ )
250
+ base = base_candidates[0]
251
+ final_joins.append(base)
252
+ available_aliases.add(base.left_node)
253
+ available_aliases.add(base.right_node)
254
+ while final_joins_pre:
255
+ new_final_joins_pre: List[NodeJoin] = []
256
+ for join in final_joins_pre:
257
+ if join.left_node in available_aliases:
258
+ # we don't need to join twice
259
+ # so whatever join we found first, works
260
+ if join.right_node in available_aliases:
261
+ continue
262
+ final_joins.append(join)
263
+ available_aliases.add(join.left_node)
264
+ available_aliases.add(join.right_node)
265
+ else:
266
+ new_final_joins_pre.append(join)
267
+ final_joins_pre = new_final_joins_pre
268
+ return final_joins
@@ -0,0 +1,38 @@
1
+ from typing import List
2
+
3
+ from trilogy.core.models.build import BuildConcept, BuildWhereClause
4
+ from trilogy.core.models.build_environment import BuildEnvironment
5
+ from trilogy.core.processing.nodes import History, StrategyNode
6
+
7
+ LOGGER_PREFIX = "[GEN_CONSTANT_NODE]"
8
+
9
+
10
+ def gen_constant_node(
11
+ concept: BuildConcept,
12
+ local_optional: List[BuildConcept],
13
+ environment: BuildEnvironment,
14
+ g,
15
+ depth: int,
16
+ source_concepts,
17
+ history: History | None = None,
18
+ conditions: BuildWhereClause | None = None,
19
+ accept_partial: bool = False,
20
+ ):
21
+ """our only goal here is to generate a row if conditions exist, or none if they do not"""
22
+
23
+ targets = [concept] + local_optional
24
+ if conditions:
25
+ targets += conditions.row_arguments
26
+ parent_node: StrategyNode | None = source_concepts(
27
+ mandatory_list=targets,
28
+ environment=environment,
29
+ g=g,
30
+ depth=depth + 1,
31
+ history=history,
32
+ conditions=conditions,
33
+ accept_partial=accept_partial,
34
+ )
35
+ if not parent_node:
36
+ return None
37
+ parent_node.set_output_concepts([concept] + local_optional)
38
+ return parent_node