pytrilogy 0.3.148__cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (206) hide show
  1. LICENSE.md +19 -0
  2. _preql_import_resolver/__init__.py +5 -0
  3. _preql_import_resolver/_preql_import_resolver.cpython-312-aarch64-linux-gnu.so +0 -0
  4. pytrilogy-0.3.148.dist-info/METADATA +555 -0
  5. pytrilogy-0.3.148.dist-info/RECORD +206 -0
  6. pytrilogy-0.3.148.dist-info/WHEEL +5 -0
  7. pytrilogy-0.3.148.dist-info/entry_points.txt +2 -0
  8. pytrilogy-0.3.148.dist-info/licenses/LICENSE.md +19 -0
  9. trilogy/__init__.py +27 -0
  10. trilogy/ai/README.md +10 -0
  11. trilogy/ai/__init__.py +19 -0
  12. trilogy/ai/constants.py +92 -0
  13. trilogy/ai/conversation.py +107 -0
  14. trilogy/ai/enums.py +7 -0
  15. trilogy/ai/execute.py +50 -0
  16. trilogy/ai/models.py +34 -0
  17. trilogy/ai/prompts.py +100 -0
  18. trilogy/ai/providers/__init__.py +0 -0
  19. trilogy/ai/providers/anthropic.py +106 -0
  20. trilogy/ai/providers/base.py +24 -0
  21. trilogy/ai/providers/google.py +146 -0
  22. trilogy/ai/providers/openai.py +89 -0
  23. trilogy/ai/providers/utils.py +68 -0
  24. trilogy/authoring/README.md +3 -0
  25. trilogy/authoring/__init__.py +148 -0
  26. trilogy/constants.py +119 -0
  27. trilogy/core/README.md +52 -0
  28. trilogy/core/__init__.py +0 -0
  29. trilogy/core/constants.py +6 -0
  30. trilogy/core/enums.py +454 -0
  31. trilogy/core/env_processor.py +239 -0
  32. trilogy/core/environment_helpers.py +320 -0
  33. trilogy/core/ergonomics.py +193 -0
  34. trilogy/core/exceptions.py +123 -0
  35. trilogy/core/functions.py +1240 -0
  36. trilogy/core/graph_models.py +142 -0
  37. trilogy/core/internal.py +85 -0
  38. trilogy/core/models/__init__.py +0 -0
  39. trilogy/core/models/author.py +2662 -0
  40. trilogy/core/models/build.py +2603 -0
  41. trilogy/core/models/build_environment.py +165 -0
  42. trilogy/core/models/core.py +506 -0
  43. trilogy/core/models/datasource.py +434 -0
  44. trilogy/core/models/environment.py +756 -0
  45. trilogy/core/models/execute.py +1213 -0
  46. trilogy/core/optimization.py +251 -0
  47. trilogy/core/optimizations/__init__.py +12 -0
  48. trilogy/core/optimizations/base_optimization.py +17 -0
  49. trilogy/core/optimizations/hide_unused_concept.py +47 -0
  50. trilogy/core/optimizations/inline_datasource.py +102 -0
  51. trilogy/core/optimizations/predicate_pushdown.py +245 -0
  52. trilogy/core/processing/README.md +94 -0
  53. trilogy/core/processing/READMEv2.md +121 -0
  54. trilogy/core/processing/VIRTUAL_UNNEST.md +30 -0
  55. trilogy/core/processing/__init__.py +0 -0
  56. trilogy/core/processing/concept_strategies_v3.py +508 -0
  57. trilogy/core/processing/constants.py +15 -0
  58. trilogy/core/processing/discovery_node_factory.py +451 -0
  59. trilogy/core/processing/discovery_utility.py +548 -0
  60. trilogy/core/processing/discovery_validation.py +167 -0
  61. trilogy/core/processing/graph_utils.py +43 -0
  62. trilogy/core/processing/node_generators/README.md +9 -0
  63. trilogy/core/processing/node_generators/__init__.py +31 -0
  64. trilogy/core/processing/node_generators/basic_node.py +160 -0
  65. trilogy/core/processing/node_generators/common.py +270 -0
  66. trilogy/core/processing/node_generators/constant_node.py +38 -0
  67. trilogy/core/processing/node_generators/filter_node.py +315 -0
  68. trilogy/core/processing/node_generators/group_node.py +213 -0
  69. trilogy/core/processing/node_generators/group_to_node.py +117 -0
  70. trilogy/core/processing/node_generators/multiselect_node.py +207 -0
  71. trilogy/core/processing/node_generators/node_merge_node.py +695 -0
  72. trilogy/core/processing/node_generators/recursive_node.py +88 -0
  73. trilogy/core/processing/node_generators/rowset_node.py +165 -0
  74. trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
  75. trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +261 -0
  76. trilogy/core/processing/node_generators/select_merge_node.py +786 -0
  77. trilogy/core/processing/node_generators/select_node.py +95 -0
  78. trilogy/core/processing/node_generators/synonym_node.py +98 -0
  79. trilogy/core/processing/node_generators/union_node.py +91 -0
  80. trilogy/core/processing/node_generators/unnest_node.py +182 -0
  81. trilogy/core/processing/node_generators/window_node.py +201 -0
  82. trilogy/core/processing/nodes/README.md +28 -0
  83. trilogy/core/processing/nodes/__init__.py +179 -0
  84. trilogy/core/processing/nodes/base_node.py +522 -0
  85. trilogy/core/processing/nodes/filter_node.py +75 -0
  86. trilogy/core/processing/nodes/group_node.py +194 -0
  87. trilogy/core/processing/nodes/merge_node.py +420 -0
  88. trilogy/core/processing/nodes/recursive_node.py +46 -0
  89. trilogy/core/processing/nodes/select_node_v2.py +242 -0
  90. trilogy/core/processing/nodes/union_node.py +53 -0
  91. trilogy/core/processing/nodes/unnest_node.py +62 -0
  92. trilogy/core/processing/nodes/window_node.py +56 -0
  93. trilogy/core/processing/utility.py +823 -0
  94. trilogy/core/query_processor.py +604 -0
  95. trilogy/core/statements/README.md +35 -0
  96. trilogy/core/statements/__init__.py +0 -0
  97. trilogy/core/statements/author.py +536 -0
  98. trilogy/core/statements/build.py +0 -0
  99. trilogy/core/statements/common.py +20 -0
  100. trilogy/core/statements/execute.py +155 -0
  101. trilogy/core/table_processor.py +66 -0
  102. trilogy/core/utility.py +8 -0
  103. trilogy/core/validation/README.md +46 -0
  104. trilogy/core/validation/__init__.py +0 -0
  105. trilogy/core/validation/common.py +161 -0
  106. trilogy/core/validation/concept.py +146 -0
  107. trilogy/core/validation/datasource.py +227 -0
  108. trilogy/core/validation/environment.py +73 -0
  109. trilogy/core/validation/fix.py +256 -0
  110. trilogy/dialect/__init__.py +32 -0
  111. trilogy/dialect/base.py +1431 -0
  112. trilogy/dialect/bigquery.py +314 -0
  113. trilogy/dialect/common.py +147 -0
  114. trilogy/dialect/config.py +159 -0
  115. trilogy/dialect/dataframe.py +50 -0
  116. trilogy/dialect/duckdb.py +376 -0
  117. trilogy/dialect/enums.py +149 -0
  118. trilogy/dialect/metadata.py +173 -0
  119. trilogy/dialect/mock.py +190 -0
  120. trilogy/dialect/postgres.py +117 -0
  121. trilogy/dialect/presto.py +110 -0
  122. trilogy/dialect/results.py +89 -0
  123. trilogy/dialect/snowflake.py +129 -0
  124. trilogy/dialect/sql_server.py +137 -0
  125. trilogy/engine.py +48 -0
  126. trilogy/execution/__init__.py +17 -0
  127. trilogy/execution/config.py +119 -0
  128. trilogy/execution/state/__init__.py +0 -0
  129. trilogy/execution/state/file_state_store.py +0 -0
  130. trilogy/execution/state/sqllite_state_store.py +0 -0
  131. trilogy/execution/state/state_store.py +301 -0
  132. trilogy/executor.py +656 -0
  133. trilogy/hooks/__init__.py +4 -0
  134. trilogy/hooks/base_hook.py +40 -0
  135. trilogy/hooks/graph_hook.py +135 -0
  136. trilogy/hooks/query_debugger.py +166 -0
  137. trilogy/metadata/__init__.py +0 -0
  138. trilogy/parser.py +10 -0
  139. trilogy/parsing/README.md +21 -0
  140. trilogy/parsing/__init__.py +0 -0
  141. trilogy/parsing/common.py +1069 -0
  142. trilogy/parsing/config.py +5 -0
  143. trilogy/parsing/exceptions.py +8 -0
  144. trilogy/parsing/helpers.py +1 -0
  145. trilogy/parsing/parse_engine.py +2863 -0
  146. trilogy/parsing/render.py +773 -0
  147. trilogy/parsing/trilogy.lark +544 -0
  148. trilogy/py.typed +0 -0
  149. trilogy/render.py +45 -0
  150. trilogy/scripts/README.md +9 -0
  151. trilogy/scripts/__init__.py +0 -0
  152. trilogy/scripts/agent.py +41 -0
  153. trilogy/scripts/agent_info.py +306 -0
  154. trilogy/scripts/common.py +430 -0
  155. trilogy/scripts/dependency/Cargo.lock +617 -0
  156. trilogy/scripts/dependency/Cargo.toml +39 -0
  157. trilogy/scripts/dependency/README.md +131 -0
  158. trilogy/scripts/dependency/build.sh +25 -0
  159. trilogy/scripts/dependency/src/directory_resolver.rs +387 -0
  160. trilogy/scripts/dependency/src/lib.rs +16 -0
  161. trilogy/scripts/dependency/src/main.rs +770 -0
  162. trilogy/scripts/dependency/src/parser.rs +435 -0
  163. trilogy/scripts/dependency/src/preql.pest +208 -0
  164. trilogy/scripts/dependency/src/python_bindings.rs +311 -0
  165. trilogy/scripts/dependency/src/resolver.rs +716 -0
  166. trilogy/scripts/dependency/tests/base.preql +3 -0
  167. trilogy/scripts/dependency/tests/cli_integration.rs +377 -0
  168. trilogy/scripts/dependency/tests/customer.preql +6 -0
  169. trilogy/scripts/dependency/tests/main.preql +9 -0
  170. trilogy/scripts/dependency/tests/orders.preql +7 -0
  171. trilogy/scripts/dependency/tests/test_data/base.preql +9 -0
  172. trilogy/scripts/dependency/tests/test_data/consumer.preql +1 -0
  173. trilogy/scripts/dependency.py +323 -0
  174. trilogy/scripts/display.py +555 -0
  175. trilogy/scripts/environment.py +59 -0
  176. trilogy/scripts/fmt.py +32 -0
  177. trilogy/scripts/ingest.py +472 -0
  178. trilogy/scripts/ingest_helpers/__init__.py +1 -0
  179. trilogy/scripts/ingest_helpers/foreign_keys.py +123 -0
  180. trilogy/scripts/ingest_helpers/formatting.py +93 -0
  181. trilogy/scripts/ingest_helpers/typing.py +161 -0
  182. trilogy/scripts/init.py +105 -0
  183. trilogy/scripts/parallel_execution.py +748 -0
  184. trilogy/scripts/plan.py +189 -0
  185. trilogy/scripts/refresh.py +106 -0
  186. trilogy/scripts/run.py +79 -0
  187. trilogy/scripts/serve.py +202 -0
  188. trilogy/scripts/serve_helpers/__init__.py +41 -0
  189. trilogy/scripts/serve_helpers/file_discovery.py +142 -0
  190. trilogy/scripts/serve_helpers/index_generation.py +206 -0
  191. trilogy/scripts/serve_helpers/models.py +38 -0
  192. trilogy/scripts/single_execution.py +131 -0
  193. trilogy/scripts/testing.py +129 -0
  194. trilogy/scripts/trilogy.py +75 -0
  195. trilogy/std/__init__.py +0 -0
  196. trilogy/std/color.preql +3 -0
  197. trilogy/std/date.preql +13 -0
  198. trilogy/std/display.preql +18 -0
  199. trilogy/std/geography.preql +22 -0
  200. trilogy/std/metric.preql +15 -0
  201. trilogy/std/money.preql +67 -0
  202. trilogy/std/net.preql +14 -0
  203. trilogy/std/ranking.preql +7 -0
  204. trilogy/std/report.preql +5 -0
  205. trilogy/std/semantic.preql +6 -0
  206. trilogy/utility.py +34 -0
@@ -0,0 +1,46 @@
1
+ from typing import List
2
+
3
+ from trilogy.core.enums import SourceType
4
+ from trilogy.core.models.build import BuildConcept
5
+ from trilogy.core.models.build_environment import BuildEnvironment
6
+ from trilogy.core.models.execute import QueryDatasource
7
+ from trilogy.core.processing.nodes.base_node import StrategyNode
8
+
9
+
10
+ class RecursiveNode(StrategyNode):
11
+ """Union nodes represent combining two keyspaces"""
12
+
13
+ source_type = SourceType.RECURSIVE
14
+
15
+ def __init__(
16
+ self,
17
+ input_concepts: List[BuildConcept],
18
+ output_concepts: List[BuildConcept],
19
+ environment: BuildEnvironment,
20
+ whole_grain: bool = False,
21
+ parents: List["StrategyNode"] | None = None,
22
+ depth: int = 0,
23
+ ):
24
+ super().__init__(
25
+ input_concepts=input_concepts,
26
+ output_concepts=output_concepts,
27
+ environment=environment,
28
+ whole_grain=whole_grain,
29
+ parents=parents,
30
+ depth=depth,
31
+ )
32
+
33
+ def _resolve(self) -> QueryDatasource:
34
+ """We need to ensure that any filtered values are removed from the output to avoid inappropriate references"""
35
+ base = super()._resolve()
36
+ return base
37
+
38
+ def copy(self) -> "RecursiveNode":
39
+ return RecursiveNode(
40
+ input_concepts=list(self.input_concepts),
41
+ output_concepts=list(self.output_concepts),
42
+ environment=self.environment,
43
+ whole_grain=self.whole_grain,
44
+ parents=self.parents,
45
+ depth=self.depth,
46
+ )
@@ -0,0 +1,242 @@
1
+ from typing import List, Optional
2
+
3
+ from trilogy.constants import logger
4
+ from trilogy.core.constants import CONSTANT_DATASET
5
+ from trilogy.core.enums import Derivation, Purpose, SourceType
6
+ from trilogy.core.models.build import (
7
+ BuildComparison,
8
+ BuildConcept,
9
+ BuildConditional,
10
+ BuildDatasource,
11
+ BuildFunction,
12
+ BuildGrain,
13
+ BuildOrderBy,
14
+ BuildParenthetical,
15
+ )
16
+ from trilogy.core.models.build_environment import BuildEnvironment
17
+ from trilogy.core.models.execute import QueryDatasource, UnnestJoin
18
+ from trilogy.core.processing.nodes.base_node import StrategyNode, resolve_concept_map
19
+ from trilogy.utility import unique
20
+
21
+ LOGGER_PREFIX = "[CONCEPT DETAIL - SELECT NODE]"
22
+
23
+
24
+ class SelectNode(StrategyNode):
25
+ """Select nodes actually fetch raw data from a table
26
+ Responsible for selecting the cheapest option from which to select.
27
+ """
28
+
29
+ source_type = SourceType.SELECT
30
+
31
+ def __init__(
32
+ self,
33
+ input_concepts: List[BuildConcept],
34
+ output_concepts: List[BuildConcept],
35
+ environment: BuildEnvironment,
36
+ datasource: BuildDatasource | None = None,
37
+ whole_grain: bool = False,
38
+ parents: List["StrategyNode"] | None = None,
39
+ depth: int = 0,
40
+ partial_concepts: List[BuildConcept] | None = None,
41
+ nullable_concepts: List[BuildConcept] | None = None,
42
+ accept_partial: bool = False,
43
+ grain: Optional[BuildGrain] = None,
44
+ force_group: bool | None = False,
45
+ conditions: (
46
+ BuildConditional | BuildComparison | BuildParenthetical | None
47
+ ) = None,
48
+ preexisting_conditions: (
49
+ BuildConditional | BuildComparison | BuildParenthetical | None
50
+ ) = None,
51
+ hidden_concepts: set[str] | None = None,
52
+ ordering: BuildOrderBy | None = None,
53
+ ):
54
+ super().__init__(
55
+ input_concepts=input_concepts,
56
+ output_concepts=output_concepts,
57
+ environment=environment,
58
+ whole_grain=whole_grain,
59
+ parents=parents,
60
+ depth=depth,
61
+ partial_concepts=partial_concepts,
62
+ nullable_concepts=nullable_concepts,
63
+ force_group=force_group,
64
+ grain=grain,
65
+ conditions=conditions,
66
+ preexisting_conditions=preexisting_conditions,
67
+ hidden_concepts=hidden_concepts,
68
+ ordering=ordering,
69
+ )
70
+ self.accept_partial = accept_partial
71
+ self.datasource = datasource
72
+
73
+ def validate_inputs(self):
74
+ # we do not need to validate inputs for a select node
75
+ # as it will be a root
76
+ return
77
+
78
+ def resolve_from_provided_datasource(
79
+ self,
80
+ ) -> QueryDatasource:
81
+ if not self.datasource:
82
+ raise ValueError("Datasource not provided")
83
+ datasource: BuildDatasource = self.datasource
84
+
85
+ all_concepts_final: List[BuildConcept] = unique(self.all_concepts, "address")
86
+ source_map: dict[str, set[BuildDatasource | QueryDatasource | UnnestJoin]] = {
87
+ concept.address: {datasource} for concept in self.input_concepts
88
+ }
89
+
90
+ derived_concepts = [
91
+ c
92
+ for c in datasource.columns
93
+ if isinstance(c.alias, BuildFunction) and c.concept.address in source_map
94
+ ]
95
+ for c in derived_concepts:
96
+ if not isinstance(c.alias, BuildFunction):
97
+ continue
98
+ for x in c.alias.concept_arguments:
99
+ source_map[x.address] = {datasource}
100
+ for x in all_concepts_final:
101
+ if x.address not in source_map and x.derivation in (
102
+ Derivation.MULTISELECT,
103
+ Derivation.FILTER,
104
+ Derivation.BASIC,
105
+ Derivation.ROWSET,
106
+ Derivation.BASIC,
107
+ Derivation.UNION,
108
+ ):
109
+ source_map[x.address] = set()
110
+
111
+ # if we're not grouping
112
+ # force grain to datasource grain
113
+ # so that we merge on the same grain
114
+ if self.force_group is False:
115
+ grain = datasource.grain
116
+ else:
117
+ grain = self.grain or BuildGrain()
118
+ return QueryDatasource(
119
+ input_concepts=self.input_concepts,
120
+ output_concepts=all_concepts_final,
121
+ source_map=source_map,
122
+ datasources=[datasource],
123
+ grain=grain,
124
+ joins=[],
125
+ partial_concepts=[
126
+ c.concept for c in datasource.columns if not c.is_complete
127
+ ],
128
+ nullable_concepts=[c.concept for c in datasource.columns if c.is_nullable],
129
+ source_type=SourceType.DIRECT_SELECT,
130
+ # we can skip rendering conditions
131
+ condition=self.conditions,
132
+ # select nodes should never group
133
+ force_group=self.force_group,
134
+ hidden_concepts=self.hidden_concepts,
135
+ ordering=self.ordering,
136
+ )
137
+
138
+ def resolve_from_constant_datasources(self) -> QueryDatasource:
139
+ datasource = BuildDatasource(
140
+ name=CONSTANT_DATASET, address=CONSTANT_DATASET, columns=[]
141
+ )
142
+ return QueryDatasource(
143
+ input_concepts=[],
144
+ output_concepts=unique(self.all_concepts, "address"),
145
+ source_map={concept.address: set() for concept in self.all_concepts},
146
+ datasources=[datasource],
147
+ grain=datasource.grain,
148
+ condition=self.conditions,
149
+ joins=[],
150
+ partial_concepts=[],
151
+ source_type=SourceType.CONSTANT,
152
+ hidden_concepts=self.hidden_concepts,
153
+ ordering=self.ordering,
154
+ )
155
+
156
+ def _resolve(self) -> QueryDatasource:
157
+ # if we have parent nodes, we do not need to go to a datasource
158
+ resolution: QueryDatasource | None = None
159
+ if all(
160
+ [
161
+ (
162
+ c.derivation == Derivation.CONSTANT
163
+ or (
164
+ c.purpose == Purpose.CONSTANT
165
+ and c.derivation == Derivation.MULTISELECT
166
+ )
167
+ )
168
+ for c in self.all_concepts
169
+ ]
170
+ ):
171
+ logger.info(
172
+ f"{self.logging_prefix}{LOGGER_PREFIX} have a constant datasource"
173
+ )
174
+ resolution = self.resolve_from_constant_datasources()
175
+ return resolution
176
+
177
+ if self.datasource and not resolution:
178
+ resolution = self.resolve_from_provided_datasource()
179
+
180
+ if self.parents:
181
+ if not resolution:
182
+ return super()._resolve()
183
+ # zip in our parent source map
184
+ parent_sources: List[QueryDatasource | BuildDatasource] = [
185
+ p.resolve() for p in self.parents
186
+ ]
187
+
188
+ resolution.datasources += parent_sources
189
+
190
+ source_map = resolve_concept_map(
191
+ parent_sources,
192
+ targets=self.output_concepts,
193
+ inherited_inputs=self.input_concepts + self.existence_concepts,
194
+ )
195
+ for k, v in source_map.items():
196
+ if v and k not in resolution.source_map:
197
+ resolution.source_map[k] = v
198
+ if not resolution:
199
+ raise ValueError(f"No select node could be generated for {self}")
200
+ return resolution
201
+
202
+ def copy(self) -> "SelectNode":
203
+ return SelectNode(
204
+ input_concepts=list(self.input_concepts),
205
+ output_concepts=list(self.output_concepts),
206
+ environment=self.environment,
207
+ datasource=self.datasource,
208
+ depth=self.depth,
209
+ parents=self.parents,
210
+ whole_grain=self.whole_grain,
211
+ partial_concepts=list(self.partial_concepts),
212
+ nullable_concepts=list(self.nullable_concepts),
213
+ accept_partial=self.accept_partial,
214
+ grain=self.grain,
215
+ force_group=self.force_group,
216
+ conditions=self.conditions,
217
+ preexisting_conditions=self.preexisting_conditions,
218
+ hidden_concepts=self.hidden_concepts,
219
+ ordering=self.ordering,
220
+ )
221
+
222
+
223
+ class ConstantNode(SelectNode):
224
+ source_type = SourceType.CONSTANT
225
+ """Represents a constant value."""
226
+
227
+ def copy(self) -> "ConstantNode":
228
+ return ConstantNode(
229
+ input_concepts=list(self.input_concepts),
230
+ output_concepts=list(self.output_concepts),
231
+ environment=self.environment,
232
+ datasource=self.datasource,
233
+ depth=self.depth,
234
+ partial_concepts=list(self.partial_concepts),
235
+ conditions=self.conditions,
236
+ preexisting_conditions=self.preexisting_conditions,
237
+ hidden_concepts=self.hidden_concepts,
238
+ ordering=self.ordering,
239
+ )
240
+
241
+ def _resolve(self) -> QueryDatasource:
242
+ return self.resolve_from_constant_datasources()
@@ -0,0 +1,53 @@
1
+ from typing import List
2
+
3
+ from trilogy.core.enums import SourceType
4
+ from trilogy.core.models.build import BuildConcept
5
+ from trilogy.core.models.execute import QueryDatasource
6
+ from trilogy.core.processing.nodes.base_node import StrategyNode
7
+
8
+
9
+ class UnionNode(StrategyNode):
10
+ """Union nodes represent combining two keyspaces"""
11
+
12
+ source_type = SourceType.UNION
13
+
14
+ def __init__(
15
+ self,
16
+ input_concepts: List[BuildConcept],
17
+ output_concepts: List[BuildConcept],
18
+ environment,
19
+ whole_grain: bool = False,
20
+ parents: List["StrategyNode"] | None = None,
21
+ depth: int = 0,
22
+ partial_concepts: List[BuildConcept] | None = None,
23
+ ):
24
+ super().__init__(
25
+ input_concepts=input_concepts,
26
+ output_concepts=output_concepts,
27
+ environment=environment,
28
+ whole_grain=whole_grain,
29
+ parents=parents,
30
+ depth=depth,
31
+ partial_concepts=partial_concepts,
32
+ )
33
+ if self.partial_concepts != []:
34
+ raise ValueError(
35
+ f"UnionNode should not have partial concepts, has {self.partial_concepts}, was given {partial_concepts}"
36
+ )
37
+ self.partial_concepts = []
38
+
39
+ def _resolve(self) -> QueryDatasource:
40
+ """We need to ensure that any filtered values are removed from the output to avoid inappropriate references"""
41
+ base = super()._resolve()
42
+ return base
43
+
44
+ def copy(self) -> "UnionNode":
45
+ return UnionNode(
46
+ input_concepts=list(self.input_concepts),
47
+ output_concepts=list(self.output_concepts),
48
+ environment=self.environment,
49
+ whole_grain=self.whole_grain,
50
+ parents=[x.copy() for x in self.parents] if self.parents else None,
51
+ depth=self.depth,
52
+ partial_concepts=self.partial_concepts,
53
+ )
@@ -0,0 +1,62 @@
1
+ from typing import List
2
+
3
+ from trilogy.core.enums import SourceType
4
+ from trilogy.core.models.build import BuildConcept, BuildFunction
5
+ from trilogy.core.models.execute import QueryDatasource, UnnestJoin
6
+ from trilogy.core.processing.nodes.base_node import StrategyNode
7
+
8
+
9
+ class UnnestNode(StrategyNode):
10
+ """Unnest nodes represent an expansion of an array or other
11
+ column into rows.
12
+ """
13
+
14
+ source_type = SourceType.UNNEST
15
+
16
+ def __init__(
17
+ self,
18
+ unnest_concepts: List[BuildConcept],
19
+ input_concepts: List[BuildConcept],
20
+ output_concepts: List[BuildConcept],
21
+ environment,
22
+ whole_grain: bool = False,
23
+ parents: List["StrategyNode"] | None = None,
24
+ depth: int = 0,
25
+ ):
26
+ super().__init__(
27
+ input_concepts=input_concepts,
28
+ output_concepts=output_concepts,
29
+ environment=environment,
30
+ whole_grain=whole_grain,
31
+ parents=parents,
32
+ depth=depth,
33
+ )
34
+ self.unnest_concepts = unnest_concepts
35
+
36
+ def _resolve(self) -> QueryDatasource:
37
+ """We need to ensure that any filtered values are removed from the output to avoid inappropriate references"""
38
+ base = super()._resolve()
39
+ lineage = self.unnest_concepts[0].lineage
40
+ assert isinstance(lineage, BuildFunction)
41
+ final = "_".join(set([c.address for c in self.unnest_concepts]))
42
+ unnest = UnnestJoin(
43
+ concepts=self.unnest_concepts,
44
+ parent=lineage,
45
+ alias=f'unnest_{final.replace(".", "_")}',
46
+ )
47
+ base.joins.append(unnest)
48
+ for unnest_concept in self.unnest_concepts:
49
+ base.source_map[unnest_concept.address] = {unnest}
50
+ base.join_derived_concepts = [unnest_concept]
51
+ return base
52
+
53
+ def copy(self) -> "UnnestNode":
54
+ return UnnestNode(
55
+ unnest_concepts=self.unnest_concepts,
56
+ input_concepts=list(self.input_concepts),
57
+ output_concepts=list(self.output_concepts),
58
+ environment=self.environment,
59
+ whole_grain=self.whole_grain,
60
+ parents=self.parents,
61
+ depth=self.depth,
62
+ )
@@ -0,0 +1,56 @@
1
+ from typing import List
2
+
3
+ from trilogy.core.enums import SourceType
4
+ from trilogy.core.models.build import (
5
+ BuildComparison,
6
+ BuildConcept,
7
+ BuildConditional,
8
+ BuildOrderBy,
9
+ BuildParenthetical,
10
+ )
11
+ from trilogy.core.models.execute import QueryDatasource
12
+ from trilogy.core.processing.nodes.base_node import StrategyNode
13
+
14
+
15
+ class WindowNode(StrategyNode):
16
+ source_type = SourceType.WINDOW
17
+
18
+ def __init__(
19
+ self,
20
+ input_concepts: List[BuildConcept],
21
+ output_concepts: List[BuildConcept],
22
+ environment,
23
+ whole_grain: bool = False,
24
+ parents: List["StrategyNode"] | None = None,
25
+ depth: int = 0,
26
+ ordering: BuildOrderBy | None = None,
27
+ preexisting_conditions: (
28
+ BuildConditional | BuildComparison | BuildParenthetical | None
29
+ ) = None,
30
+ ):
31
+ super().__init__(
32
+ input_concepts=input_concepts,
33
+ output_concepts=output_concepts,
34
+ environment=environment,
35
+ whole_grain=whole_grain,
36
+ parents=parents,
37
+ depth=depth,
38
+ ordering=ordering,
39
+ preexisting_conditions=preexisting_conditions,
40
+ )
41
+
42
+ def _resolve(self) -> QueryDatasource:
43
+ base = super()._resolve()
44
+ return base
45
+
46
+ def copy(self) -> "WindowNode":
47
+ return WindowNode(
48
+ input_concepts=list(self.input_concepts),
49
+ output_concepts=list(self.output_concepts),
50
+ environment=self.environment,
51
+ whole_grain=self.whole_grain,
52
+ parents=self.parents,
53
+ depth=self.depth,
54
+ ordering=self.ordering,
55
+ preexisting_conditions=self.preexisting_conditions,
56
+ )