pytrilogy 0.3.142__cp312-cp312-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (200) hide show
  1. LICENSE.md +19 -0
  2. _preql_import_resolver/__init__.py +5 -0
  3. _preql_import_resolver/_preql_import_resolver.cp312-win_amd64.pyd +0 -0
  4. pytrilogy-0.3.142.dist-info/METADATA +555 -0
  5. pytrilogy-0.3.142.dist-info/RECORD +200 -0
  6. pytrilogy-0.3.142.dist-info/WHEEL +4 -0
  7. pytrilogy-0.3.142.dist-info/entry_points.txt +2 -0
  8. pytrilogy-0.3.142.dist-info/licenses/LICENSE.md +19 -0
  9. trilogy/__init__.py +16 -0
  10. trilogy/ai/README.md +10 -0
  11. trilogy/ai/__init__.py +19 -0
  12. trilogy/ai/constants.py +92 -0
  13. trilogy/ai/conversation.py +107 -0
  14. trilogy/ai/enums.py +7 -0
  15. trilogy/ai/execute.py +50 -0
  16. trilogy/ai/models.py +34 -0
  17. trilogy/ai/prompts.py +100 -0
  18. trilogy/ai/providers/__init__.py +0 -0
  19. trilogy/ai/providers/anthropic.py +106 -0
  20. trilogy/ai/providers/base.py +24 -0
  21. trilogy/ai/providers/google.py +146 -0
  22. trilogy/ai/providers/openai.py +89 -0
  23. trilogy/ai/providers/utils.py +68 -0
  24. trilogy/authoring/README.md +3 -0
  25. trilogy/authoring/__init__.py +148 -0
  26. trilogy/constants.py +113 -0
  27. trilogy/core/README.md +52 -0
  28. trilogy/core/__init__.py +0 -0
  29. trilogy/core/constants.py +6 -0
  30. trilogy/core/enums.py +443 -0
  31. trilogy/core/env_processor.py +120 -0
  32. trilogy/core/environment_helpers.py +320 -0
  33. trilogy/core/ergonomics.py +193 -0
  34. trilogy/core/exceptions.py +123 -0
  35. trilogy/core/functions.py +1227 -0
  36. trilogy/core/graph_models.py +139 -0
  37. trilogy/core/internal.py +85 -0
  38. trilogy/core/models/__init__.py +0 -0
  39. trilogy/core/models/author.py +2669 -0
  40. trilogy/core/models/build.py +2521 -0
  41. trilogy/core/models/build_environment.py +180 -0
  42. trilogy/core/models/core.py +501 -0
  43. trilogy/core/models/datasource.py +322 -0
  44. trilogy/core/models/environment.py +751 -0
  45. trilogy/core/models/execute.py +1177 -0
  46. trilogy/core/optimization.py +251 -0
  47. trilogy/core/optimizations/__init__.py +12 -0
  48. trilogy/core/optimizations/base_optimization.py +17 -0
  49. trilogy/core/optimizations/hide_unused_concept.py +47 -0
  50. trilogy/core/optimizations/inline_datasource.py +102 -0
  51. trilogy/core/optimizations/predicate_pushdown.py +245 -0
  52. trilogy/core/processing/README.md +94 -0
  53. trilogy/core/processing/READMEv2.md +121 -0
  54. trilogy/core/processing/VIRTUAL_UNNEST.md +30 -0
  55. trilogy/core/processing/__init__.py +0 -0
  56. trilogy/core/processing/concept_strategies_v3.py +508 -0
  57. trilogy/core/processing/constants.py +15 -0
  58. trilogy/core/processing/discovery_node_factory.py +451 -0
  59. trilogy/core/processing/discovery_utility.py +548 -0
  60. trilogy/core/processing/discovery_validation.py +167 -0
  61. trilogy/core/processing/graph_utils.py +43 -0
  62. trilogy/core/processing/node_generators/README.md +9 -0
  63. trilogy/core/processing/node_generators/__init__.py +31 -0
  64. trilogy/core/processing/node_generators/basic_node.py +160 -0
  65. trilogy/core/processing/node_generators/common.py +268 -0
  66. trilogy/core/processing/node_generators/constant_node.py +38 -0
  67. trilogy/core/processing/node_generators/filter_node.py +315 -0
  68. trilogy/core/processing/node_generators/group_node.py +213 -0
  69. trilogy/core/processing/node_generators/group_to_node.py +117 -0
  70. trilogy/core/processing/node_generators/multiselect_node.py +205 -0
  71. trilogy/core/processing/node_generators/node_merge_node.py +653 -0
  72. trilogy/core/processing/node_generators/recursive_node.py +88 -0
  73. trilogy/core/processing/node_generators/rowset_node.py +165 -0
  74. trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
  75. trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +261 -0
  76. trilogy/core/processing/node_generators/select_merge_node.py +748 -0
  77. trilogy/core/processing/node_generators/select_node.py +95 -0
  78. trilogy/core/processing/node_generators/synonym_node.py +98 -0
  79. trilogy/core/processing/node_generators/union_node.py +91 -0
  80. trilogy/core/processing/node_generators/unnest_node.py +182 -0
  81. trilogy/core/processing/node_generators/window_node.py +201 -0
  82. trilogy/core/processing/nodes/README.md +28 -0
  83. trilogy/core/processing/nodes/__init__.py +179 -0
  84. trilogy/core/processing/nodes/base_node.py +519 -0
  85. trilogy/core/processing/nodes/filter_node.py +75 -0
  86. trilogy/core/processing/nodes/group_node.py +194 -0
  87. trilogy/core/processing/nodes/merge_node.py +420 -0
  88. trilogy/core/processing/nodes/recursive_node.py +46 -0
  89. trilogy/core/processing/nodes/select_node_v2.py +242 -0
  90. trilogy/core/processing/nodes/union_node.py +53 -0
  91. trilogy/core/processing/nodes/unnest_node.py +62 -0
  92. trilogy/core/processing/nodes/window_node.py +56 -0
  93. trilogy/core/processing/utility.py +823 -0
  94. trilogy/core/query_processor.py +596 -0
  95. trilogy/core/statements/README.md +35 -0
  96. trilogy/core/statements/__init__.py +0 -0
  97. trilogy/core/statements/author.py +536 -0
  98. trilogy/core/statements/build.py +0 -0
  99. trilogy/core/statements/common.py +20 -0
  100. trilogy/core/statements/execute.py +155 -0
  101. trilogy/core/table_processor.py +66 -0
  102. trilogy/core/utility.py +8 -0
  103. trilogy/core/validation/README.md +46 -0
  104. trilogy/core/validation/__init__.py +0 -0
  105. trilogy/core/validation/common.py +161 -0
  106. trilogy/core/validation/concept.py +146 -0
  107. trilogy/core/validation/datasource.py +227 -0
  108. trilogy/core/validation/environment.py +73 -0
  109. trilogy/core/validation/fix.py +256 -0
  110. trilogy/dialect/__init__.py +32 -0
  111. trilogy/dialect/base.py +1392 -0
  112. trilogy/dialect/bigquery.py +308 -0
  113. trilogy/dialect/common.py +147 -0
  114. trilogy/dialect/config.py +144 -0
  115. trilogy/dialect/dataframe.py +50 -0
  116. trilogy/dialect/duckdb.py +231 -0
  117. trilogy/dialect/enums.py +147 -0
  118. trilogy/dialect/metadata.py +173 -0
  119. trilogy/dialect/mock.py +190 -0
  120. trilogy/dialect/postgres.py +117 -0
  121. trilogy/dialect/presto.py +110 -0
  122. trilogy/dialect/results.py +89 -0
  123. trilogy/dialect/snowflake.py +129 -0
  124. trilogy/dialect/sql_server.py +137 -0
  125. trilogy/engine.py +48 -0
  126. trilogy/execution/config.py +75 -0
  127. trilogy/executor.py +568 -0
  128. trilogy/hooks/__init__.py +4 -0
  129. trilogy/hooks/base_hook.py +40 -0
  130. trilogy/hooks/graph_hook.py +139 -0
  131. trilogy/hooks/query_debugger.py +166 -0
  132. trilogy/metadata/__init__.py +0 -0
  133. trilogy/parser.py +10 -0
  134. trilogy/parsing/README.md +21 -0
  135. trilogy/parsing/__init__.py +0 -0
  136. trilogy/parsing/common.py +1069 -0
  137. trilogy/parsing/config.py +5 -0
  138. trilogy/parsing/exceptions.py +8 -0
  139. trilogy/parsing/helpers.py +1 -0
  140. trilogy/parsing/parse_engine.py +2813 -0
  141. trilogy/parsing/render.py +769 -0
  142. trilogy/parsing/trilogy.lark +540 -0
  143. trilogy/py.typed +0 -0
  144. trilogy/render.py +42 -0
  145. trilogy/scripts/README.md +9 -0
  146. trilogy/scripts/__init__.py +0 -0
  147. trilogy/scripts/agent.py +41 -0
  148. trilogy/scripts/agent_info.py +303 -0
  149. trilogy/scripts/common.py +355 -0
  150. trilogy/scripts/dependency/Cargo.lock +617 -0
  151. trilogy/scripts/dependency/Cargo.toml +39 -0
  152. trilogy/scripts/dependency/README.md +131 -0
  153. trilogy/scripts/dependency/build.sh +25 -0
  154. trilogy/scripts/dependency/src/directory_resolver.rs +177 -0
  155. trilogy/scripts/dependency/src/lib.rs +16 -0
  156. trilogy/scripts/dependency/src/main.rs +770 -0
  157. trilogy/scripts/dependency/src/parser.rs +435 -0
  158. trilogy/scripts/dependency/src/preql.pest +208 -0
  159. trilogy/scripts/dependency/src/python_bindings.rs +303 -0
  160. trilogy/scripts/dependency/src/resolver.rs +716 -0
  161. trilogy/scripts/dependency/tests/base.preql +3 -0
  162. trilogy/scripts/dependency/tests/cli_integration.rs +377 -0
  163. trilogy/scripts/dependency/tests/customer.preql +6 -0
  164. trilogy/scripts/dependency/tests/main.preql +9 -0
  165. trilogy/scripts/dependency/tests/orders.preql +7 -0
  166. trilogy/scripts/dependency/tests/test_data/base.preql +9 -0
  167. trilogy/scripts/dependency/tests/test_data/consumer.preql +1 -0
  168. trilogy/scripts/dependency.py +323 -0
  169. trilogy/scripts/display.py +512 -0
  170. trilogy/scripts/environment.py +46 -0
  171. trilogy/scripts/fmt.py +32 -0
  172. trilogy/scripts/ingest.py +471 -0
  173. trilogy/scripts/ingest_helpers/__init__.py +1 -0
  174. trilogy/scripts/ingest_helpers/foreign_keys.py +123 -0
  175. trilogy/scripts/ingest_helpers/formatting.py +93 -0
  176. trilogy/scripts/ingest_helpers/typing.py +161 -0
  177. trilogy/scripts/init.py +105 -0
  178. trilogy/scripts/parallel_execution.py +713 -0
  179. trilogy/scripts/plan.py +189 -0
  180. trilogy/scripts/run.py +63 -0
  181. trilogy/scripts/serve.py +140 -0
  182. trilogy/scripts/serve_helpers/__init__.py +41 -0
  183. trilogy/scripts/serve_helpers/file_discovery.py +142 -0
  184. trilogy/scripts/serve_helpers/index_generation.py +206 -0
  185. trilogy/scripts/serve_helpers/models.py +38 -0
  186. trilogy/scripts/single_execution.py +131 -0
  187. trilogy/scripts/testing.py +119 -0
  188. trilogy/scripts/trilogy.py +68 -0
  189. trilogy/std/__init__.py +0 -0
  190. trilogy/std/color.preql +3 -0
  191. trilogy/std/date.preql +13 -0
  192. trilogy/std/display.preql +18 -0
  193. trilogy/std/geography.preql +22 -0
  194. trilogy/std/metric.preql +15 -0
  195. trilogy/std/money.preql +67 -0
  196. trilogy/std/net.preql +14 -0
  197. trilogy/std/ranking.preql +7 -0
  198. trilogy/std/report.preql +5 -0
  199. trilogy/std/semantic.preql +6 -0
  200. trilogy/utility.py +34 -0
@@ -0,0 +1,508 @@
1
+ from dataclasses import dataclass
2
+ from typing import List, Optional
3
+
4
+ from trilogy.constants import logger
5
+ from trilogy.core.enums import Derivation, Granularity
6
+ from trilogy.core.env_processor import generate_graph
7
+ from trilogy.core.exceptions import UnresolvableQueryException
8
+ from trilogy.core.graph_models import ReferenceGraph
9
+ from trilogy.core.models.author import (
10
+ UndefinedConcept,
11
+ )
12
+ from trilogy.core.models.build import (
13
+ BuildConcept,
14
+ BuildWhereClause,
15
+ )
16
+ from trilogy.core.models.build_environment import BuildEnvironment
17
+ from trilogy.core.processing.constants import ROOT_DERIVATIONS, SKIPPED_DERIVATIONS
18
+ from trilogy.core.processing.discovery_node_factory import generate_node
19
+ from trilogy.core.processing.discovery_utility import (
20
+ LOGGER_PREFIX,
21
+ depth_to_prefix,
22
+ get_loop_iteration_targets,
23
+ group_if_required_v2,
24
+ )
25
+ from trilogy.core.processing.discovery_validation import (
26
+ ValidationResult,
27
+ validate_stack,
28
+ )
29
+ from trilogy.core.processing.nodes import (
30
+ History,
31
+ MergeNode,
32
+ StrategyNode,
33
+ )
34
+ from trilogy.utility import unique
35
+
36
+
37
+ def append_existence_check(
38
+ node: StrategyNode,
39
+ environment: BuildEnvironment,
40
+ graph: ReferenceGraph,
41
+ where: BuildWhereClause,
42
+ history: History,
43
+ ):
44
+ # we if we have a where clause doing an existence check
45
+ # treat that as separate subquery
46
+ if where.existence_arguments:
47
+ for subselect in where.existence_arguments:
48
+ if not subselect:
49
+ continue
50
+ if all([x.address in node.input_concepts for x in subselect]):
51
+ logger.info(
52
+ f"{LOGGER_PREFIX} existance clause inputs already found {[str(c) for c in subselect]}"
53
+ )
54
+ continue
55
+ logger.info(
56
+ f"{LOGGER_PREFIX} fetching existence clause inputs {[str(c) for c in subselect]}"
57
+ )
58
+ parent = source_query_concepts(
59
+ [*subselect],
60
+ history=history,
61
+ environment=environment,
62
+ g=graph,
63
+ )
64
+ assert parent, "Could not resolve existence clause"
65
+ node.add_parents([parent])
66
+ logger.info(f"{LOGGER_PREFIX} found {[str(c) for c in subselect]}")
67
+ node.add_existence_concepts([*subselect])
68
+
69
+
70
+ def search_concepts(
71
+ mandatory_list: List[BuildConcept],
72
+ history: History,
73
+ environment: BuildEnvironment,
74
+ depth: int,
75
+ g: ReferenceGraph,
76
+ accept_partial: bool = False,
77
+ conditions: BuildWhereClause | None = None,
78
+ ) -> StrategyNode | None:
79
+ hist = history.get_history(
80
+ search=mandatory_list, accept_partial=accept_partial, conditions=conditions
81
+ )
82
+ if hist is not False:
83
+ logger.info(
84
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Returning search node from history ({'exists' if hist is not None else 'does not exist'}) for {[c.address for c in mandatory_list]} with accept_partial {accept_partial}"
85
+ )
86
+ assert not isinstance(hist, bool)
87
+ return hist
88
+
89
+ result = _search_concepts(
90
+ mandatory_list,
91
+ environment,
92
+ depth=depth,
93
+ g=g,
94
+ accept_partial=accept_partial,
95
+ history=history,
96
+ conditions=conditions,
97
+ )
98
+ # a node may be mutated after be cached; always store a copy
99
+ history.search_to_history(
100
+ mandatory_list,
101
+ accept_partial,
102
+ result.copy() if result else None,
103
+ conditions=conditions,
104
+ )
105
+ return result
106
+
107
+
108
+ @dataclass
109
+ class LoopContext:
110
+ mandatory_list: List[BuildConcept]
111
+ environment: BuildEnvironment
112
+ depth: int
113
+ g: ReferenceGraph
114
+ history: History
115
+ attempted: set[str]
116
+ found: set[str]
117
+ skip: set[str]
118
+ all_mandatory: set[str]
119
+ original_mandatory: List[BuildConcept]
120
+ completion_mandatory: List[BuildConcept]
121
+ stack: List[StrategyNode]
122
+ complete: ValidationResult = ValidationResult.INCOMPLETE
123
+ accept_partial: bool = False
124
+ must_evaluate_condition_on_this_level_not_push_down: bool = False
125
+ conditions: BuildWhereClause | None = None
126
+
127
+ @property
128
+ def incomplete(self) -> bool:
129
+ return self.attempted != self.all_mandatory
130
+
131
+
132
+ def initialize_loop_context(
133
+ mandatory_list: List[BuildConcept],
134
+ environment: BuildEnvironment,
135
+ depth: int,
136
+ g: ReferenceGraph,
137
+ history: History,
138
+ accept_partial: bool = False,
139
+ conditions: BuildWhereClause | None = None,
140
+ ):
141
+ # these are the concepts we need in the output projection
142
+ mandatory_list = unique(mandatory_list, "address")
143
+ # cache our values before an filter injection
144
+ original_mandatory = [*mandatory_list]
145
+ for x in mandatory_list:
146
+ if isinstance(x, UndefinedConcept):
147
+ raise SyntaxError(f"Undefined concept {x.address}")
148
+ all_mandatory = set(c.address for c in mandatory_list)
149
+
150
+ must_evaluate_condition_on_this_level_not_push_down = False
151
+
152
+ # if we have a filter, we may need to get more values to support that.
153
+ if conditions:
154
+ completion_mandatory = unique(
155
+ mandatory_list + list(conditions.row_arguments), "address"
156
+ )
157
+ # if anything we need to get is in the filter set and it's a computed value
158
+ # we need to get _everything_ in this loop
159
+ required_filters = [
160
+ x
161
+ for x in mandatory_list
162
+ if x.derivation not in ROOT_DERIVATIONS
163
+ and not (
164
+ x.derivation == Derivation.AGGREGATE
165
+ and x.granularity == Granularity.SINGLE_ROW
166
+ )
167
+ and x.address in conditions.row_arguments
168
+ ]
169
+ if any(required_filters):
170
+ logger.info(
171
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} derived condition row inputs {[x.address for x in required_filters]} present in mandatory list, forcing condition evaluation at this level. "
172
+ )
173
+ mandatory_list = completion_mandatory
174
+ all_mandatory = set(c.address for c in completion_mandatory)
175
+ must_evaluate_condition_on_this_level_not_push_down = True
176
+ else:
177
+ logger.info(
178
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Do not need to evaluate conditions yet."
179
+ )
180
+ else:
181
+
182
+ completion_mandatory = mandatory_list
183
+ logger.info(
184
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Initialized loop context with mandatory list {[c.address for c in mandatory_list]} and completion mandatory {[c.address for c in completion_mandatory]}"
185
+ )
186
+ return LoopContext(
187
+ mandatory_list=mandatory_list,
188
+ environment=environment,
189
+ depth=depth,
190
+ g=g,
191
+ history=history,
192
+ attempted=set(),
193
+ found=set(),
194
+ skip=set(),
195
+ all_mandatory=all_mandatory,
196
+ original_mandatory=original_mandatory,
197
+ completion_mandatory=completion_mandatory,
198
+ stack=[],
199
+ complete=ValidationResult.INCOMPLETE,
200
+ accept_partial=accept_partial,
201
+ must_evaluate_condition_on_this_level_not_push_down=must_evaluate_condition_on_this_level_not_push_down,
202
+ conditions=conditions,
203
+ )
204
+
205
+
206
+ def check_for_early_exit(
207
+ complete: ValidationResult,
208
+ found: set[str],
209
+ partial: set[str],
210
+ missing: set[str],
211
+ context: LoopContext,
212
+ priority_concept: BuildConcept,
213
+ ) -> bool:
214
+ if complete == ValidationResult.INCOMPLETE_CONDITION:
215
+ cond_dict = {str(node): node.preexisting_conditions for node in context.stack}
216
+ for node in context.stack:
217
+ logger.info(
218
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Node {node} has conditions {node.preexisting_conditions} and {node.conditions}"
219
+ )
220
+ raise SyntaxError(f"Have {cond_dict} and need {str(context.conditions)}")
221
+ # early exit if we have a complete stack with one node
222
+ # we can only early exit if we have a complete stack
223
+ # and we are not looking for more non-partial sources
224
+ if complete == ValidationResult.COMPLETE and (
225
+ not context.accept_partial or (context.accept_partial and not partial)
226
+ ):
227
+ logger.info(
228
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} breaking loop, complete"
229
+ )
230
+ return True
231
+ elif complete == ValidationResult.COMPLETE and context.accept_partial and partial:
232
+ if len(context.attempted) == len(context.mandatory_list):
233
+ logger.info(
234
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Breaking as we have attempted all nodes"
235
+ )
236
+ return True
237
+ elif all(
238
+ [
239
+ x.address in found and x.address not in partial
240
+ for x in context.mandatory_list
241
+ ]
242
+ ):
243
+ logger.info(
244
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Breaking as we have found all mandatory nodes without partials"
245
+ )
246
+ return True
247
+ logger.info(
248
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Found complete stack with partials {partial}, continuing search, attempted {context.attempted} of total {len(context.mandatory_list)}."
249
+ )
250
+ else:
251
+ logger.info(
252
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Not complete (missing {missing}), continuing search"
253
+ )
254
+ # if we have attempted on root node, we've tried them all.
255
+ if priority_concept.derivation == Derivation.ROOT:
256
+ logger.info(
257
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Breaking as attempted root with no results"
258
+ )
259
+ return True
260
+ return False
261
+
262
+
263
+ def generate_loop_completion(context: LoopContext, virtual: set[str]) -> StrategyNode:
264
+ condition_required = True
265
+ non_virtual = [c for c in context.completion_mandatory if c.address not in virtual]
266
+ non_virtual_different = len(context.completion_mandatory) != len(
267
+ context.original_mandatory
268
+ )
269
+ non_virtual_difference_values = set(
270
+ [x.address for x in context.completion_mandatory]
271
+ ).difference(set([x.address for x in context.original_mandatory]))
272
+ if not context.conditions:
273
+ condition_required = False
274
+ non_virtual = [c for c in context.mandatory_list if c.address not in virtual]
275
+
276
+ elif all(
277
+ [
278
+ x.preexisting_conditions == context.conditions.conditional
279
+ for x in context.stack
280
+ ]
281
+ ):
282
+ condition_required = False
283
+ non_virtual = [c for c in context.mandatory_list if c.address not in virtual]
284
+
285
+ if context.conditions and not condition_required:
286
+ parent_map = {
287
+ str(x): x.preexisting_conditions == context.conditions.conditional
288
+ for x in context.stack
289
+ }
290
+ logger.info(
291
+ f"Condition {context.conditions} not required, parents included filtering! {parent_map}"
292
+ )
293
+
294
+ if len(context.stack) == 1:
295
+ output: StrategyNode = context.stack[0]
296
+ if non_virtual_different:
297
+ logger.info(
298
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Found added non-virtual output concepts ({non_virtual_difference_values})"
299
+ )
300
+ else:
301
+ logger.info(
302
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} wrapping multiple parent nodes {[type(x) for x in context.stack]} in merge node"
303
+ )
304
+ output = MergeNode(
305
+ input_concepts=non_virtual,
306
+ output_concepts=non_virtual,
307
+ environment=context.environment,
308
+ parents=context.stack,
309
+ depth=context.depth,
310
+ )
311
+
312
+ # ensure we can resolve our final merge
313
+ output.resolve()
314
+ if condition_required and context.conditions:
315
+ output.add_condition(context.conditions.conditional)
316
+ if context.conditions.existence_arguments:
317
+ append_existence_check(
318
+ output,
319
+ context.environment,
320
+ context.g,
321
+ where=context.conditions,
322
+ history=context.history,
323
+ )
324
+ elif context.conditions:
325
+ output.preexisting_conditions = context.conditions.conditional
326
+ logger.info(
327
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Graph is connected, returning {type(output)} node output {[x.address for x in output.usable_outputs]} partial {[c.address for c in output.partial_concepts or []]} with {context.conditions}"
328
+ )
329
+
330
+ if condition_required and context.conditions and non_virtual_different:
331
+ logger.info(
332
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Conditions {context.conditions} were injected, checking if we need a group to restore grain"
333
+ )
334
+ return group_if_required_v2(
335
+ output,
336
+ context.original_mandatory,
337
+ context.environment,
338
+ non_virtual_difference_values,
339
+ depth=context.depth,
340
+ )
341
+
342
+ return group_if_required_v2(
343
+ output,
344
+ context.original_mandatory,
345
+ context.environment,
346
+ non_virtual_difference_values,
347
+ depth=context.depth,
348
+ )
349
+
350
+
351
+ def _search_concepts(
352
+ mandatory_list: List[BuildConcept],
353
+ environment: BuildEnvironment,
354
+ depth: int,
355
+ g: ReferenceGraph,
356
+ history: History,
357
+ accept_partial: bool = False,
358
+ conditions: BuildWhereClause | None = None,
359
+ ) -> StrategyNode | None:
360
+ # check for direct materialization first
361
+ candidate = history.gen_select_node(
362
+ mandatory_list,
363
+ environment,
364
+ g,
365
+ depth + 1,
366
+ fail_if_not_found=False,
367
+ accept_partial=accept_partial,
368
+ conditions=conditions,
369
+ )
370
+
371
+ # if we get a can
372
+ if candidate:
373
+ return candidate
374
+ context = initialize_loop_context(
375
+ mandatory_list=mandatory_list,
376
+ environment=environment,
377
+ depth=depth,
378
+ g=g,
379
+ history=history,
380
+ accept_partial=accept_partial,
381
+ conditions=conditions,
382
+ )
383
+ partial: set[str] = set()
384
+ virtual: set[str] = set()
385
+ complete = ValidationResult.INCOMPLETE
386
+ while context.incomplete:
387
+ priority_concept, candidate_list, local_conditions = get_loop_iteration_targets(
388
+ mandatory=context.mandatory_list,
389
+ conditions=context.conditions,
390
+ attempted=context.attempted,
391
+ force_conditions=context.must_evaluate_condition_on_this_level_not_push_down,
392
+ found=context.found,
393
+ partial=partial,
394
+ depth=depth,
395
+ materialized_canonical=(
396
+ environment.non_partial_materialized_canonical_concepts
397
+ if not accept_partial
398
+ else environment.materialized_canonical_concepts
399
+ ),
400
+ )
401
+ logger.info(
402
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} priority concept is {str(priority_concept)} derivation {priority_concept.derivation} granularity {priority_concept.granularity} with conditions {local_conditions}"
403
+ )
404
+
405
+ logger.info(
406
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Beginning sourcing loop for {priority_concept.address}, accept_partial {accept_partial}, optional {[v.address for v in candidate_list]}, exhausted {[c for c in context.skip]}"
407
+ )
408
+ node = generate_node(
409
+ priority_concept,
410
+ candidate_list,
411
+ environment,
412
+ g,
413
+ depth,
414
+ source_concepts=search_concepts,
415
+ accept_partial=accept_partial,
416
+ history=history,
417
+ conditions=local_conditions,
418
+ )
419
+ if node:
420
+ context.stack.append(node)
421
+ node.resolve()
422
+ # these concepts should not be attempted to be sourced again
423
+ # as fetching them requires operating on a subset of concepts
424
+ if priority_concept.derivation in SKIPPED_DERIVATIONS:
425
+ context.skip.add(priority_concept.address)
426
+ context.attempted.add(priority_concept.address)
427
+ complete, found_c, missing_c, partial, virtual = validate_stack(
428
+ environment,
429
+ context.stack,
430
+ context.mandatory_list,
431
+ context.completion_mandatory,
432
+ conditions=context.conditions,
433
+ accept_partial=accept_partial,
434
+ )
435
+ # assign
436
+ context.found = found_c
437
+ early_exit = check_for_early_exit(
438
+ complete, found_c, partial, missing_c, context, priority_concept
439
+ )
440
+ if early_exit:
441
+ break
442
+
443
+ logger.info(
444
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} finished sourcing loop (complete: {complete}), have {context.found} from {[n for n in context.stack]} (missing {context.all_mandatory - context.found}), attempted {context.attempted}, virtual {virtual}"
445
+ )
446
+ if complete == ValidationResult.COMPLETE:
447
+ return generate_loop_completion(context, virtual)
448
+
449
+ # if we can't find it after expanding to a merge, then
450
+ # accept partials in join paths
451
+ if not accept_partial:
452
+ logger.info(
453
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Stack is not connected graph, flag for accepting partial addresses is {accept_partial}, changing flag"
454
+ )
455
+ partial_search = search_concepts(
456
+ # use the original mandatory list
457
+ mandatory_list=mandatory_list,
458
+ environment=environment,
459
+ depth=depth,
460
+ g=g,
461
+ accept_partial=True,
462
+ history=history,
463
+ conditions=conditions,
464
+ )
465
+ if partial_search:
466
+ logger.info(
467
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Found {[c.address for c in mandatory_list]} by accepting partials"
468
+ )
469
+ return partial_search
470
+ logger.error(
471
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Could not resolve concepts {[c.address for c in mandatory_list]}, network outcome was {complete}, missing {context.all_mandatory - context.found},"
472
+ )
473
+
474
+ return None
475
+
476
+
477
+ def source_query_concepts(
478
+ output_concepts: List[BuildConcept],
479
+ history: History,
480
+ environment: BuildEnvironment,
481
+ g: Optional[ReferenceGraph] = None,
482
+ conditions: Optional[BuildWhereClause] = None,
483
+ ):
484
+ if not output_concepts:
485
+ raise ValueError(f"No output concepts provided {output_concepts}")
486
+ if not g:
487
+ g = generate_graph(environment)
488
+
489
+ root = search_concepts(
490
+ mandatory_list=output_concepts,
491
+ environment=environment,
492
+ g=g,
493
+ depth=0,
494
+ history=history,
495
+ conditions=conditions,
496
+ )
497
+ if not root:
498
+ error_strings = [
499
+ f"{c.address}<{c.purpose}>{c.derivation}>" for c in output_concepts
500
+ ]
501
+ raise UnresolvableQueryException(
502
+ f"Could not resolve connections for query with output {error_strings} from current model."
503
+ )
504
+ final = [x for x in root.output_concepts if x.address not in root.hidden_concepts]
505
+ logger.info(
506
+ f"{depth_to_prefix(0)}{LOGGER_PREFIX} final concepts are {[x.address for x in final]}"
507
+ )
508
+ return group_if_required_v2(root, output_concepts, environment, depth=0)
@@ -0,0 +1,15 @@
1
+ from trilogy.core.enums import Derivation
2
+
3
+ ROOT_DERIVATIONS: list[Derivation] = [Derivation.ROOT, Derivation.CONSTANT]
4
+ SKIPPED_DERIVATIONS: list[Derivation] = [
5
+ Derivation.AGGREGATE,
6
+ Derivation.FILTER,
7
+ Derivation.WINDOW,
8
+ Derivation.UNNEST,
9
+ Derivation.RECURSIVE,
10
+ Derivation.ROWSET,
11
+ Derivation.BASIC,
12
+ Derivation.GROUP_TO,
13
+ Derivation.MULTISELECT,
14
+ Derivation.UNION,
15
+ ]