pytrilogy 0.0.3.55__py3-none-any.whl → 0.0.3.57__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

Files changed (39) hide show
  1. {pytrilogy-0.0.3.55.dist-info → pytrilogy-0.0.3.57.dist-info}/METADATA +1 -1
  2. {pytrilogy-0.0.3.55.dist-info → pytrilogy-0.0.3.57.dist-info}/RECORD +39 -34
  3. {pytrilogy-0.0.3.55.dist-info → pytrilogy-0.0.3.57.dist-info}/WHEEL +1 -1
  4. trilogy/__init__.py +1 -1
  5. trilogy/authoring/__init__.py +12 -1
  6. trilogy/core/enums.py +1 -0
  7. trilogy/core/models/author.py +6 -4
  8. trilogy/core/models/execute.py +4 -1
  9. trilogy/core/optimization.py +4 -4
  10. trilogy/core/processing/concept_strategies_v3.py +324 -895
  11. trilogy/core/processing/discovery_loop.py +0 -0
  12. trilogy/core/processing/discovery_node_factory.py +475 -0
  13. trilogy/core/processing/discovery_utility.py +123 -0
  14. trilogy/core/processing/discovery_validation.py +155 -0
  15. trilogy/core/processing/node_generators/basic_node.py +29 -11
  16. trilogy/core/processing/node_generators/node_merge_node.py +1 -1
  17. trilogy/core/processing/node_generators/select_node.py +6 -8
  18. trilogy/core/processing/node_generators/synonym_node.py +2 -1
  19. trilogy/core/processing/node_generators/unnest_node.py +7 -1
  20. trilogy/core/processing/nodes/__init__.py +2 -4
  21. trilogy/core/processing/nodes/base_node.py +0 -13
  22. trilogy/core/processing/nodes/group_node.py +1 -1
  23. trilogy/core/processing/utility.py +38 -11
  24. trilogy/core/query_processor.py +3 -3
  25. trilogy/core/statements/author.py +6 -2
  26. trilogy/core/statements/execute.py +3 -2
  27. trilogy/dialect/base.py +3 -30
  28. trilogy/dialect/snowflake.py +1 -1
  29. trilogy/executor.py +13 -4
  30. trilogy/parsing/common.py +1 -3
  31. trilogy/parsing/parse_engine.py +14 -2
  32. trilogy/parsing/trilogy.lark +1 -1
  33. trilogy/std/date.preql +3 -1
  34. trilogy/std/geography.preql +4 -0
  35. trilogy/std/money.preql +65 -4
  36. trilogy/std/net.preql +8 -0
  37. {pytrilogy-0.0.3.55.dist-info → pytrilogy-0.0.3.57.dist-info}/entry_points.txt +0 -0
  38. {pytrilogy-0.0.3.55.dist-info → pytrilogy-0.0.3.57.dist-info}/licenses/LICENSE.md +0 -0
  39. {pytrilogy-0.0.3.55.dist-info → pytrilogy-0.0.3.57.dist-info}/top_level.txt +0 -0
File without changes
@@ -0,0 +1,475 @@
1
+ from dataclasses import dataclass
2
+ from typing import List, Optional, Protocol, Union
3
+
4
+ from trilogy.constants import logger
5
+ from trilogy.core.enums import Derivation, Granularity
6
+ from trilogy.core.graph_models import ReferenceGraph
7
+ from trilogy.core.models.build import (
8
+ BuildConcept,
9
+ BuildWhereClause,
10
+ )
11
+ from trilogy.core.models.build_environment import BuildEnvironment
12
+ from trilogy.core.processing.discovery_utility import LOGGER_PREFIX, depth_to_prefix
13
+ from trilogy.core.processing.node_generators import (
14
+ gen_basic_node,
15
+ gen_filter_node,
16
+ gen_group_node,
17
+ gen_group_to_node,
18
+ gen_merge_node,
19
+ gen_multiselect_node,
20
+ gen_recursive_node,
21
+ gen_rowset_node,
22
+ gen_synonym_node,
23
+ gen_union_node,
24
+ gen_unnest_node,
25
+ gen_window_node,
26
+ )
27
+ from trilogy.core.processing.nodes import (
28
+ History,
29
+ StrategyNode,
30
+ )
31
+
32
+
33
+ class SearchConceptsType(Protocol):
34
+ def __call__(
35
+ self,
36
+ mandatory_list: List[BuildConcept],
37
+ history: History,
38
+ environment: BuildEnvironment,
39
+ depth: int,
40
+ g: ReferenceGraph,
41
+ accept_partial: bool = False,
42
+ conditions: Optional[BuildWhereClause] = None,
43
+ ) -> Union[StrategyNode, None]: ...
44
+
45
+
46
+ @dataclass
47
+ class NodeGenerationContext:
48
+ """Encapsulates common parameters for node generation."""
49
+
50
+ concept: BuildConcept
51
+ local_optional: List[BuildConcept]
52
+ environment: BuildEnvironment
53
+ g: ReferenceGraph
54
+ depth: int
55
+ source_concepts: SearchConceptsType
56
+ history: History
57
+ accept_partial: bool = False
58
+ conditions: Optional[BuildWhereClause] = None
59
+
60
+ @property
61
+ def next_depth(self) -> int:
62
+ return self.depth + 1
63
+
64
+ def log_generation(self, node_type: str, extra_info: str = "") -> None:
65
+ """Centralized logging for node generation."""
66
+ optional_addresses = [x.address for x in self.local_optional]
67
+ base_msg = f"for {self.concept.address}, generating {node_type} node with optional {optional_addresses}"
68
+
69
+ if extra_info:
70
+ base_msg += f" and {extra_info}"
71
+
72
+ logger.info(f"{depth_to_prefix(self.depth)}{LOGGER_PREFIX} {base_msg}")
73
+
74
+
75
+ def restrict_node_outputs_targets(
76
+ node: StrategyNode, targets: list[BuildConcept], depth: int
77
+ ) -> list[BuildConcept]:
78
+ """Restricts node outputs to target concepts and returns extra concepts."""
79
+ ex_resolve = node.resolve()
80
+ target_addresses = {y.address for y in targets}
81
+
82
+ extra = [x for x in ex_resolve.output_concepts if x.address not in target_addresses]
83
+
84
+ base = [
85
+ x
86
+ for x in ex_resolve.output_concepts
87
+ if x.address not in {c.address for c in extra}
88
+ ]
89
+
90
+ logger.info(
91
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} reducing final outputs, "
92
+ f"was {[c.address for c in ex_resolve.output_concepts]} "
93
+ f"with extra {[c.address for c in extra]}, remaining {base}"
94
+ )
95
+
96
+ # Add missing targets
97
+ for target in targets:
98
+ if target.address not in {c.address for c in base}:
99
+ base.append(target)
100
+
101
+ node.set_output_concepts(base)
102
+ return extra
103
+
104
+
105
+ # Simple factory functions for basic derivation types
106
+ def _generate_window_node(ctx: NodeGenerationContext) -> StrategyNode | None:
107
+ ctx.log_generation("window")
108
+ return gen_window_node(
109
+ ctx.concept,
110
+ ctx.local_optional,
111
+ history=ctx.history,
112
+ environment=ctx.environment,
113
+ g=ctx.g,
114
+ depth=ctx.next_depth,
115
+ source_concepts=ctx.source_concepts,
116
+ conditions=ctx.conditions,
117
+ )
118
+
119
+
120
+ def _generate_filter_node(ctx: NodeGenerationContext) -> StrategyNode | None:
121
+ ctx.log_generation("filter")
122
+ return gen_filter_node(
123
+ ctx.concept,
124
+ ctx.local_optional,
125
+ history=ctx.history,
126
+ environment=ctx.environment,
127
+ g=ctx.g,
128
+ depth=ctx.next_depth,
129
+ source_concepts=ctx.source_concepts,
130
+ conditions=ctx.conditions,
131
+ )
132
+
133
+
134
+ def _generate_unnest_node(ctx: NodeGenerationContext) -> StrategyNode | None:
135
+ ctx.log_generation("unnest", f"condition {ctx.conditions}")
136
+ return gen_unnest_node(
137
+ ctx.concept,
138
+ ctx.local_optional,
139
+ history=ctx.history,
140
+ environment=ctx.environment,
141
+ g=ctx.g,
142
+ depth=ctx.next_depth,
143
+ source_concepts=ctx.source_concepts,
144
+ conditions=ctx.conditions,
145
+ )
146
+
147
+
148
+ def _generate_recursive_node(ctx: NodeGenerationContext) -> StrategyNode | None:
149
+ ctx.log_generation("recursive", f"condition {ctx.conditions}")
150
+ return gen_recursive_node(
151
+ ctx.concept,
152
+ ctx.local_optional,
153
+ history=ctx.history,
154
+ environment=ctx.environment,
155
+ g=ctx.g,
156
+ depth=ctx.next_depth,
157
+ source_concepts=ctx.source_concepts,
158
+ conditions=ctx.conditions,
159
+ )
160
+
161
+
162
+ def _generate_union_node(ctx: NodeGenerationContext) -> StrategyNode | None:
163
+ ctx.log_generation("union", f"condition {ctx.conditions}")
164
+ return gen_union_node(
165
+ ctx.concept,
166
+ ctx.local_optional,
167
+ ctx.environment,
168
+ ctx.g,
169
+ ctx.next_depth,
170
+ ctx.source_concepts,
171
+ ctx.history,
172
+ conditions=ctx.conditions,
173
+ )
174
+
175
+
176
+ def _generate_aggregate_node(ctx: NodeGenerationContext) -> StrategyNode | None:
177
+ # Filter out constants to avoid multiplication issues
178
+ agg_optional = [
179
+ x for x in ctx.local_optional if x.granularity != Granularity.SINGLE_ROW
180
+ ]
181
+
182
+ logger.info(
183
+ f"{depth_to_prefix(ctx.depth)}{LOGGER_PREFIX} "
184
+ f"for {ctx.concept.address}, generating aggregate node with {agg_optional}"
185
+ )
186
+
187
+ return gen_group_node(
188
+ ctx.concept,
189
+ agg_optional,
190
+ history=ctx.history,
191
+ environment=ctx.environment,
192
+ g=ctx.g,
193
+ depth=ctx.next_depth,
194
+ source_concepts=ctx.source_concepts,
195
+ conditions=ctx.conditions,
196
+ )
197
+
198
+
199
+ def _generate_rowset_node(ctx: NodeGenerationContext) -> StrategyNode | None:
200
+ ctx.log_generation("rowset")
201
+ return gen_rowset_node(
202
+ ctx.concept,
203
+ ctx.local_optional,
204
+ ctx.environment,
205
+ ctx.g,
206
+ ctx.next_depth,
207
+ ctx.source_concepts,
208
+ ctx.history,
209
+ conditions=ctx.conditions,
210
+ )
211
+
212
+
213
+ def _generate_multiselect_node(ctx: NodeGenerationContext) -> StrategyNode | None:
214
+ ctx.log_generation("multiselect")
215
+ return gen_multiselect_node(
216
+ ctx.concept,
217
+ ctx.local_optional,
218
+ ctx.environment,
219
+ ctx.g,
220
+ ctx.next_depth,
221
+ ctx.source_concepts,
222
+ ctx.history,
223
+ conditions=ctx.conditions,
224
+ )
225
+
226
+
227
+ def _generate_group_to_node(ctx: NodeGenerationContext) -> StrategyNode | None:
228
+ ctx.log_generation("group to grain")
229
+ return gen_group_to_node(
230
+ ctx.concept,
231
+ ctx.local_optional,
232
+ ctx.environment,
233
+ ctx.g,
234
+ ctx.next_depth,
235
+ ctx.source_concepts,
236
+ ctx.history,
237
+ conditions=ctx.conditions,
238
+ )
239
+
240
+
241
+ def _generate_basic_node(ctx: NodeGenerationContext) -> StrategyNode | None:
242
+ ctx.log_generation("basic")
243
+ return gen_basic_node(
244
+ ctx.concept,
245
+ ctx.local_optional,
246
+ history=ctx.history,
247
+ environment=ctx.environment,
248
+ g=ctx.g,
249
+ depth=ctx.next_depth,
250
+ source_concepts=ctx.source_concepts,
251
+ conditions=ctx.conditions,
252
+ )
253
+
254
+
255
+ class RootNodeHandler:
256
+ """Handles complex root node generation logic."""
257
+
258
+ def __init__(self, context: NodeGenerationContext):
259
+ self.ctx = context
260
+
261
+ def generate(self) -> Optional[StrategyNode]:
262
+ self.ctx.log_generation("select", "including condition inputs")
263
+
264
+ root_targets = [self.ctx.concept] + self.ctx.local_optional
265
+
266
+ if self._has_non_root_concepts():
267
+ return self._handle_non_root_concepts(root_targets)
268
+
269
+ return self._resolve_root_concepts(root_targets)
270
+
271
+ def _has_non_root_concepts(self) -> bool:
272
+ return any(
273
+ x.derivation not in (Derivation.ROOT, Derivation.CONSTANT)
274
+ for x in self.ctx.local_optional
275
+ )
276
+
277
+ def _handle_non_root_concepts(
278
+ self, root_targets: List[BuildConcept]
279
+ ) -> Optional[StrategyNode]:
280
+ non_root = [
281
+ x.address
282
+ for x in self.ctx.local_optional
283
+ if x.derivation not in (Derivation.ROOT, Derivation.CONSTANT)
284
+ ]
285
+
286
+ logger.info(
287
+ f"{depth_to_prefix(self.ctx.depth)}{LOGGER_PREFIX} "
288
+ f"including any filters, there are non-root concepts we should expand first: {non_root}. "
289
+ f"Recursing with all of these as mandatory"
290
+ )
291
+
292
+ self.ctx.history.log_start(
293
+ root_targets,
294
+ accept_partial=self.ctx.accept_partial,
295
+ conditions=self.ctx.conditions,
296
+ )
297
+
298
+ return self.ctx.source_concepts(
299
+ mandatory_list=root_targets,
300
+ environment=self.ctx.environment,
301
+ g=self.ctx.g,
302
+ depth=self.ctx.next_depth,
303
+ accept_partial=self.ctx.accept_partial,
304
+ history=self.ctx.history,
305
+ )
306
+
307
+ def _resolve_root_concepts(
308
+ self, root_targets: List[BuildConcept]
309
+ ) -> Optional[StrategyNode]:
310
+ synonym_node = self._try_synonym_resolution(root_targets)
311
+ if synonym_node:
312
+ logger.info(
313
+ f"{depth_to_prefix(self.ctx.depth)}{LOGGER_PREFIX} "
314
+ f"resolved root concepts through synonyms"
315
+ )
316
+ return synonym_node
317
+ expanded_node = self._try_merge_expansion(root_targets)
318
+ if expanded_node:
319
+ return expanded_node
320
+
321
+ return None
322
+
323
+ def _try_merge_expansion(
324
+ self, root_targets: List[BuildConcept]
325
+ ) -> Optional[StrategyNode]:
326
+ for accept_partial in [False, True]:
327
+ expanded = gen_merge_node(
328
+ all_concepts=root_targets,
329
+ environment=self.ctx.environment,
330
+ g=self.ctx.g,
331
+ depth=self.ctx.next_depth,
332
+ source_concepts=self.ctx.source_concepts,
333
+ history=self.ctx.history,
334
+ search_conditions=self.ctx.conditions,
335
+ accept_partial=accept_partial,
336
+ )
337
+
338
+ if expanded:
339
+ self._handle_expanded_node(expanded, root_targets)
340
+ return expanded
341
+
342
+ logger.info(
343
+ f"{depth_to_prefix(self.ctx.depth)}{LOGGER_PREFIX} "
344
+ f"could not find additional concept(s) to inject"
345
+ )
346
+ return None
347
+
348
+ def _handle_expanded_node(
349
+ self, expanded: StrategyNode, root_targets: List[BuildConcept]
350
+ ) -> None:
351
+ extra = restrict_node_outputs_targets(expanded, root_targets, self.ctx.depth)
352
+
353
+ pseudonyms = [
354
+ x for x in extra if any(x.address in y.pseudonyms for y in root_targets)
355
+ ]
356
+
357
+ if pseudonyms:
358
+ expanded.add_output_concepts(pseudonyms)
359
+ logger.info(
360
+ f"{depth_to_prefix(self.ctx.depth)}{LOGGER_PREFIX} "
361
+ f"Hiding pseudonyms {[c.address for c in pseudonyms]}"
362
+ )
363
+ expanded.hide_output_concepts(pseudonyms)
364
+
365
+ logger.info(
366
+ f"{depth_to_prefix(self.ctx.depth)}{LOGGER_PREFIX} "
367
+ f"Found connections for {[c.address for c in root_targets]} "
368
+ f"via concept addition; removing extra {[c.address for c in extra]}"
369
+ )
370
+
371
+ def _try_synonym_resolution(
372
+ self, root_targets: List[BuildConcept]
373
+ ) -> Optional[StrategyNode]:
374
+ logger.info(
375
+ f"{depth_to_prefix(self.ctx.depth)}{LOGGER_PREFIX} "
376
+ f"Could not resolve root concepts, checking for synonyms"
377
+ )
378
+
379
+ if not self.ctx.history.check_started(
380
+ root_targets,
381
+ accept_partial=self.ctx.accept_partial,
382
+ conditions=self.ctx.conditions,
383
+ ):
384
+ self.ctx.history.log_start(
385
+ root_targets,
386
+ accept_partial=self.ctx.accept_partial,
387
+ conditions=self.ctx.conditions,
388
+ )
389
+
390
+ resolved = gen_synonym_node(
391
+ all_concepts=root_targets,
392
+ environment=self.ctx.environment,
393
+ g=self.ctx.g,
394
+ depth=self.ctx.next_depth,
395
+ source_concepts=self.ctx.source_concepts,
396
+ history=self.ctx.history,
397
+ conditions=self.ctx.conditions,
398
+ accept_partial=self.ctx.accept_partial,
399
+ )
400
+
401
+ if resolved:
402
+ logger.info(
403
+ f"{depth_to_prefix(self.ctx.depth)}{LOGGER_PREFIX} "
404
+ f"resolved concepts through synonyms"
405
+ )
406
+ return resolved
407
+ else:
408
+ logger.info(
409
+ f"{depth_to_prefix(self.ctx.depth)}{LOGGER_PREFIX} "
410
+ f"skipping synonym search, already in a recursion for these concepts"
411
+ )
412
+
413
+ return None
414
+
415
+
416
+ def generate_node(
417
+ concept: BuildConcept,
418
+ local_optional: List[BuildConcept],
419
+ environment: BuildEnvironment,
420
+ g: ReferenceGraph,
421
+ depth: int,
422
+ source_concepts: SearchConceptsType,
423
+ history: History,
424
+ accept_partial: bool = False,
425
+ conditions: BuildWhereClause | None = None,
426
+ ) -> StrategyNode | None:
427
+
428
+ context = NodeGenerationContext(
429
+ concept=concept,
430
+ local_optional=local_optional,
431
+ environment=environment,
432
+ g=g,
433
+ depth=depth,
434
+ source_concepts=source_concepts,
435
+ history=history,
436
+ accept_partial=accept_partial,
437
+ conditions=conditions,
438
+ )
439
+
440
+ # Try materialized concept first
441
+ candidate = history.gen_select_node(
442
+ concept,
443
+ local_optional,
444
+ environment,
445
+ g,
446
+ depth + 1,
447
+ fail_if_not_found=False,
448
+ accept_partial=accept_partial,
449
+ accept_partial_optional=False,
450
+ conditions=conditions,
451
+ )
452
+
453
+ if candidate:
454
+ return candidate
455
+
456
+ # Delegate to appropriate handler based on derivation
457
+ derivation_handlers = {
458
+ Derivation.WINDOW: lambda: _generate_window_node(context),
459
+ Derivation.FILTER: lambda: _generate_filter_node(context),
460
+ Derivation.UNNEST: lambda: _generate_unnest_node(context),
461
+ Derivation.RECURSIVE: lambda: _generate_recursive_node(context),
462
+ Derivation.UNION: lambda: _generate_union_node(context),
463
+ Derivation.AGGREGATE: lambda: _generate_aggregate_node(context),
464
+ Derivation.ROWSET: lambda: _generate_rowset_node(context),
465
+ Derivation.MULTISELECT: lambda: _generate_multiselect_node(context),
466
+ Derivation.GROUP_TO: lambda: _generate_group_to_node(context),
467
+ Derivation.BASIC: lambda: _generate_basic_node(context),
468
+ Derivation.ROOT: lambda: RootNodeHandler(context).generate(),
469
+ }
470
+
471
+ handler = derivation_handlers.get(concept.derivation)
472
+ if not handler:
473
+ raise ValueError(f"Unknown derivation {concept.derivation} on {concept}")
474
+
475
+ return handler()
@@ -0,0 +1,123 @@
1
+ from typing import List
2
+
3
+ from trilogy.constants import logger
4
+ from trilogy.core.enums import Derivation, Granularity
5
+ from trilogy.core.models.build import (
6
+ BuildConcept,
7
+ BuildRowsetItem,
8
+ )
9
+
10
+
11
+ def depth_to_prefix(depth: int) -> str:
12
+ return "\t" * depth
13
+
14
+
15
+ LOGGER_PREFIX = "[DISCOVERY LOOP]"
16
+
17
+
18
+ def get_upstream_concepts(base: BuildConcept, nested: bool = False) -> set[str]:
19
+ upstream = set()
20
+ if nested:
21
+ upstream.add(base.address)
22
+ if not base.lineage:
23
+ return upstream
24
+ for x in base.lineage.concept_arguments:
25
+ # if it's derived from any value in a rowset, ALL rowset items are upstream
26
+ if x.derivation == Derivation.ROWSET:
27
+ assert isinstance(x.lineage, BuildRowsetItem), type(x.lineage)
28
+ for y in x.lineage.rowset.select.output_components:
29
+ upstream.add(f"{x.lineage.rowset.name}.{y.address}")
30
+ # upstream = upstream.union(get_upstream_concepts(y, nested=True))
31
+ upstream = upstream.union(get_upstream_concepts(x, nested=True))
32
+ return upstream
33
+
34
+
35
+ def get_priority_concept(
36
+ all_concepts: List[BuildConcept],
37
+ attempted_addresses: set[str],
38
+ found_concepts: set[str],
39
+ depth: int,
40
+ ) -> BuildConcept:
41
+ # optimized search for missing concepts
42
+ pass_one = sorted(
43
+ [
44
+ c
45
+ for c in all_concepts
46
+ if c.address not in attempted_addresses and c.address not in found_concepts
47
+ ],
48
+ key=lambda x: x.address,
49
+ )
50
+ # sometimes we need to scan intermediate concepts to get merge keys or filter keys,
51
+ # so do an exhaustive search
52
+ # pass_two = [c for c in all_concepts+filter_only if c.address not in attempted_addresses]
53
+ for remaining_concept in (pass_one,):
54
+ priority = (
55
+ # find anything that needs no joins first, so we can exit early
56
+ [
57
+ c
58
+ for c in remaining_concept
59
+ if c.derivation == Derivation.CONSTANT
60
+ and c.granularity == Granularity.SINGLE_ROW
61
+ ]
62
+ +
63
+ # then multiselects to remove them from scope
64
+ [c for c in remaining_concept if c.derivation == Derivation.MULTISELECT]
65
+ +
66
+ # then rowsets to remove them from scope, as they cannot get partials
67
+ [c for c in remaining_concept if c.derivation == Derivation.ROWSET]
68
+ +
69
+ # then rowsets to remove them from scope, as they cannot get partials
70
+ [c for c in remaining_concept if c.derivation == Derivation.UNION]
71
+ # we should be home-free here
72
+ +
73
+ # then aggregates to remove them from scope, as they cannot get partials
74
+ [c for c in remaining_concept if c.derivation == Derivation.AGGREGATE]
75
+ # then windows to remove them from scope, as they cannot get partials
76
+ + [c for c in remaining_concept if c.derivation == Derivation.WINDOW]
77
+ # then filters to remove them from scope, also cannot get partials
78
+ + [c for c in remaining_concept if c.derivation == Derivation.FILTER]
79
+ # unnests are weird?
80
+ + [c for c in remaining_concept if c.derivation == Derivation.UNNEST]
81
+ + [c for c in remaining_concept if c.derivation == Derivation.RECURSIVE]
82
+ + [c for c in remaining_concept if c.derivation == Derivation.BASIC]
83
+ + [c for c in remaining_concept if c.derivation == Derivation.GROUP_TO]
84
+ # finally our plain selects
85
+ + [
86
+ c for c in remaining_concept if c.derivation == Derivation.ROOT
87
+ ] # and any non-single row constants
88
+ + [c for c in remaining_concept if c.derivation == Derivation.CONSTANT]
89
+ )
90
+
91
+ priority += [
92
+ c
93
+ for c in remaining_concept
94
+ if c.address not in [x.address for x in priority]
95
+ ]
96
+ final = []
97
+ # if any thing is derived from another concept
98
+ # get the derived copy first
99
+ # as this will usually resolve cleaner
100
+ for x in priority:
101
+ if any(
102
+ [
103
+ x.address
104
+ in get_upstream_concepts(
105
+ c,
106
+ )
107
+ for c in priority
108
+ ]
109
+ ):
110
+ logger.info(
111
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} delaying fetch of {x.address} as parent of another concept"
112
+ )
113
+ continue
114
+ final.append(x)
115
+ # then append anything we didn't get
116
+ for x2 in priority:
117
+ if x2 not in final:
118
+ final.append(x2)
119
+ if final:
120
+ return final[0]
121
+ raise ValueError(
122
+ f"Cannot resolve query. No remaining priority concepts, have attempted {attempted_addresses}"
123
+ )