pytrilogy 0.0.3.102__tar.gz → 0.0.3.104__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

Files changed (161) hide show
  1. {pytrilogy-0.0.3.102/pytrilogy.egg-info → pytrilogy-0.0.3.104}/PKG-INFO +2 -1
  2. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104/pytrilogy.egg-info}/PKG-INFO +2 -1
  3. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/pytrilogy.egg-info/SOURCES.txt +2 -0
  4. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/pytrilogy.egg-info/requires.txt +1 -0
  5. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_partial_handling.py +2 -2
  6. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/__init__.py +1 -1
  7. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/constants.py +1 -1
  8. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/models/execute.py +1 -6
  9. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/optimization.py +13 -4
  10. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/optimizations/__init__.py +2 -0
  11. pytrilogy-0.0.3.104/trilogy/core/optimizations/hide_unused_concept.py +51 -0
  12. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/optimizations/predicate_pushdown.py +9 -1
  13. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/concept_strategies_v3.py +35 -14
  14. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/discovery_node_factory.py +6 -6
  15. pytrilogy-0.0.3.104/trilogy/core/processing/discovery_utility.py +294 -0
  16. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/basic_node.py +1 -0
  17. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/common.py +1 -0
  18. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/filter_node.py +0 -10
  19. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/group_node.py +36 -0
  20. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/multiselect_node.py +1 -1
  21. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/node_merge_node.py +2 -6
  22. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/rowset_node.py +1 -21
  23. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/union_node.py +1 -1
  24. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/unnest_node.py +24 -8
  25. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/base_node.py +13 -3
  26. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/group_node.py +9 -91
  27. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/merge_node.py +9 -0
  28. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/utility.py +8 -0
  29. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/dialect/base.py +20 -7
  30. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/dialect/common.py +5 -0
  31. pytrilogy-0.0.3.104/trilogy/std/color.preql +3 -0
  32. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/std/display.preql +3 -3
  33. pytrilogy-0.0.3.102/trilogy/core/processing/discovery_utility.py +0 -145
  34. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/LICENSE.md +0 -0
  35. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/README.md +0 -0
  36. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/pyproject.toml +0 -0
  37. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/pytrilogy.egg-info/dependency_links.txt +0 -0
  38. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/pytrilogy.egg-info/entry_points.txt +0 -0
  39. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/pytrilogy.egg-info/top_level.txt +0 -0
  40. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/setup.cfg +0 -0
  41. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/setup.py +0 -0
  42. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_datatypes.py +0 -0
  43. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_declarations.py +0 -0
  44. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_derived_concepts.py +0 -0
  45. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_discovery_nodes.py +0 -0
  46. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_enums.py +0 -0
  47. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_environment.py +0 -0
  48. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_execute_models.py +0 -0
  49. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_executor.py +0 -0
  50. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_failure.py +0 -0
  51. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_functions.py +0 -0
  52. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_imports.py +0 -0
  53. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_metadata.py +0 -0
  54. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_models.py +0 -0
  55. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_multi_join_assignments.py +0 -0
  56. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_parse_engine.py +0 -0
  57. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_parsing.py +0 -0
  58. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_parsing_failures.py +0 -0
  59. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_query_processing.py +0 -0
  60. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_query_render.py +0 -0
  61. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_select.py +0 -0
  62. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_show.py +0 -0
  63. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_statements.py +0 -0
  64. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_typing.py +0 -0
  65. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_undefined_concept.py +0 -0
  66. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_user_functions.py +0 -0
  67. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_validators.py +0 -0
  68. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/tests/test_where_clause.py +0 -0
  69. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/authoring/__init__.py +0 -0
  70. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/__init__.py +0 -0
  71. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/constants.py +0 -0
  72. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/enums.py +0 -0
  73. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/env_processor.py +0 -0
  74. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/environment_helpers.py +0 -0
  75. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/ergonomics.py +0 -0
  76. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/exceptions.py +0 -0
  77. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/functions.py +0 -0
  78. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/graph_models.py +0 -0
  79. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/internal.py +0 -0
  80. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/models/__init__.py +0 -0
  81. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/models/author.py +0 -0
  82. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/models/build.py +0 -0
  83. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/models/build_environment.py +0 -0
  84. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/models/core.py +0 -0
  85. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/models/datasource.py +0 -0
  86. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/models/environment.py +0 -0
  87. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/optimizations/base_optimization.py +0 -0
  88. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/optimizations/inline_datasource.py +0 -0
  89. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/__init__.py +0 -0
  90. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/discovery_validation.py +0 -0
  91. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/graph_utils.py +0 -0
  92. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/__init__.py +0 -0
  93. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/constant_node.py +0 -0
  94. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/group_to_node.py +0 -0
  95. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/recursive_node.py +0 -0
  96. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
  97. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +0 -0
  98. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/select_merge_node.py +0 -0
  99. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/select_node.py +0 -0
  100. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/synonym_node.py +0 -0
  101. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/window_node.py +0 -0
  102. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/__init__.py +0 -0
  103. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/filter_node.py +0 -0
  104. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/recursive_node.py +0 -0
  105. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/select_node_v2.py +0 -0
  106. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/union_node.py +0 -0
  107. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/unnest_node.py +0 -0
  108. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/window_node.py +0 -0
  109. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/query_processor.py +0 -0
  110. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/statements/__init__.py +0 -0
  111. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/statements/author.py +0 -0
  112. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/statements/build.py +0 -0
  113. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/statements/common.py +0 -0
  114. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/statements/execute.py +0 -0
  115. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/utility.py +0 -0
  116. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/validation/__init__.py +0 -0
  117. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/validation/common.py +0 -0
  118. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/validation/concept.py +0 -0
  119. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/validation/datasource.py +0 -0
  120. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/validation/environment.py +0 -0
  121. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/core/validation/fix.py +0 -0
  122. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/dialect/__init__.py +0 -0
  123. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/dialect/bigquery.py +0 -0
  124. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/dialect/config.py +0 -0
  125. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/dialect/dataframe.py +0 -0
  126. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/dialect/duckdb.py +0 -0
  127. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/dialect/enums.py +0 -0
  128. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/dialect/metadata.py +0 -0
  129. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/dialect/postgres.py +0 -0
  130. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/dialect/presto.py +0 -0
  131. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/dialect/snowflake.py +0 -0
  132. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/dialect/sql_server.py +0 -0
  133. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/engine.py +0 -0
  134. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/executor.py +0 -0
  135. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/hooks/__init__.py +0 -0
  136. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/hooks/base_hook.py +0 -0
  137. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/hooks/graph_hook.py +0 -0
  138. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/hooks/query_debugger.py +0 -0
  139. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/metadata/__init__.py +0 -0
  140. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/parser.py +0 -0
  141. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/parsing/__init__.py +0 -0
  142. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/parsing/common.py +0 -0
  143. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/parsing/config.py +0 -0
  144. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/parsing/exceptions.py +0 -0
  145. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/parsing/helpers.py +0 -0
  146. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/parsing/parse_engine.py +0 -0
  147. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/parsing/render.py +0 -0
  148. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/parsing/trilogy.lark +0 -0
  149. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/py.typed +0 -0
  150. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/render.py +0 -0
  151. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/scripts/__init__.py +0 -0
  152. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/scripts/trilogy.py +0 -0
  153. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/std/__init__.py +0 -0
  154. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/std/date.preql +0 -0
  155. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/std/geography.preql +0 -0
  156. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/std/metric.preql +0 -0
  157. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/std/money.preql +0 -0
  158. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/std/net.preql +0 -0
  159. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/std/ranking.preql +0 -0
  160. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/std/report.preql +0 -0
  161. {pytrilogy-0.0.3.102 → pytrilogy-0.0.3.104}/trilogy/utility.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.102
3
+ Version: 0.0.3.104
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -19,6 +19,7 @@ Requires-Dist: sqlalchemy<2.0.0
19
19
  Requires-Dist: networkx
20
20
  Requires-Dist: pyodbc
21
21
  Requires-Dist: pydantic
22
+ Requires-Dist: duckdb<1.4.0
22
23
  Requires-Dist: duckdb-engine
23
24
  Requires-Dist: click
24
25
  Provides-Extra: postgres
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.102
3
+ Version: 0.0.3.104
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -19,6 +19,7 @@ Requires-Dist: sqlalchemy<2.0.0
19
19
  Requires-Dist: networkx
20
20
  Requires-Dist: pyodbc
21
21
  Requires-Dist: pydantic
22
+ Requires-Dist: duckdb<1.4.0
22
23
  Requires-Dist: duckdb-engine
23
24
  Requires-Dist: click
24
25
  Provides-Extra: postgres
@@ -68,6 +68,7 @@ trilogy/core/models/environment.py
68
68
  trilogy/core/models/execute.py
69
69
  trilogy/core/optimizations/__init__.py
70
70
  trilogy/core/optimizations/base_optimization.py
71
+ trilogy/core/optimizations/hide_unused_concept.py
71
72
  trilogy/core/optimizations/inline_datasource.py
72
73
  trilogy/core/optimizations/predicate_pushdown.py
73
74
  trilogy/core/processing/__init__.py
@@ -146,6 +147,7 @@ trilogy/parsing/trilogy.lark
146
147
  trilogy/scripts/__init__.py
147
148
  trilogy/scripts/trilogy.py
148
149
  trilogy/std/__init__.py
150
+ trilogy/std/color.preql
149
151
  trilogy/std/date.preql
150
152
  trilogy/std/display.preql
151
153
  trilogy/std/geography.preql
@@ -4,6 +4,7 @@ sqlalchemy<2.0.0
4
4
  networkx
5
5
  pyodbc
6
6
  pydantic
7
+ duckdb<1.4.0
7
8
  duckdb-engine
8
9
  click
9
10
 
@@ -2,7 +2,7 @@ from sqlalchemy import create_engine
2
2
 
3
3
  from trilogy import Dialects, Executor
4
4
  from trilogy.core.enums import Purpose
5
- from trilogy.core.models.author import Concept
5
+ from trilogy.core.models.author import Concept, Grain
6
6
  from trilogy.core.models.core import (
7
7
  DataType,
8
8
  )
@@ -90,6 +90,7 @@ def setup_titanic(env: Environment):
90
90
  ColumnAssignment(alias="name", concept=name),
91
91
  ColumnAssignment(alias="fare", concept=fare),
92
92
  ],
93
+ grain=Grain(components=[id.address]),
93
94
  ),
94
95
  )
95
96
  return env
@@ -140,7 +141,6 @@ def test_partial_assignment():
140
141
  depth=0,
141
142
  )
142
143
  assert isinstance(sourced, FilterNode)
143
- assert len(sourced.parents) == 1
144
144
 
145
145
 
146
146
  def test_filter_query():
@@ -4,6 +4,6 @@ from trilogy.dialect.enums import Dialects
4
4
  from trilogy.executor import Executor
5
5
  from trilogy.parser import parse
6
6
 
7
- __version__ = "0.0.3.102"
7
+ __version__ = "0.0.3.104"
8
8
 
9
9
  __all__ = ["parse", "Executor", "Dialects", "Environment", "CONFIG"]
@@ -37,7 +37,7 @@ class Comments:
37
37
 
38
38
  show: bool = False
39
39
  basic: bool = True
40
- joins: bool = True
40
+ joins: bool = False
41
41
  nullable: bool = True
42
42
  partial: bool = True
43
43
  source_map: bool = False
@@ -711,8 +711,6 @@ class QueryDatasource(BaseModel):
711
711
  f" {[c.address for c in self.output_concepts]} concepts and"
712
712
  f" {other.name} with {[c.address for c in other.output_concepts]} concepts"
713
713
  )
714
- logger.info(self.source_map)
715
- logger.info(other.source_map)
716
714
 
717
715
  merged_datasources: dict[str, Union[BuildDatasource, "QueryDatasource"]] = {}
718
716
 
@@ -816,10 +814,7 @@ class QueryDatasource(BaseModel):
816
814
  use_raw_name,
817
815
  force_alias=force_alias,
818
816
  )
819
- except ValueError as e:
820
- from trilogy.constants import logger
821
-
822
- logger.debug(e)
817
+ except ValueError:
823
818
  continue
824
819
  existing = [c.with_grain(self.grain) for c in self.output_concepts]
825
820
  if concept in existing:
@@ -5,6 +5,7 @@ from trilogy.core.models.build import (
5
5
  )
6
6
  from trilogy.core.models.execute import CTE, RecursiveCTE, UnionCTE
7
7
  from trilogy.core.optimizations import (
8
+ HideUnusedConcepts,
8
9
  InlineDatasource,
9
10
  OptimizationRule,
10
11
  PredicatePushdown,
@@ -84,11 +85,18 @@ def filter_irrelevant_ctes(
84
85
  # child.existence_source_map[x2].append(parent.name)
85
86
  # else:
86
87
  relevant_ctes.add(cte.name)
87
- for cte in cte.parent_ctes:
88
- recurse(cte, inverse_map)
88
+
89
+ for parent in cte.parent_ctes:
90
+ if parent.name in relevant_ctes:
91
+ logger.info(
92
+ f"[Optimization][Irrelevent CTE filtering] Already visited {parent.name} when visting {cte.name}, potential recursive dag"
93
+ )
94
+ continue
95
+
96
+ recurse(parent, inverse_map)
89
97
  if isinstance(cte, UnionCTE):
90
- for cte in cte.internal_ctes:
91
- recurse(cte, inverse_map)
98
+ for internal in cte.internal_ctes:
99
+ recurse(internal, inverse_map)
92
100
 
93
101
  inverse_map = gen_inverse_map(input)
94
102
  recurse(root_cte, inverse_map)
@@ -220,6 +228,7 @@ def optimize_ctes(
220
228
  REGISTERED_RULES.append(PredicatePushdown())
221
229
  if CONFIG.optimizations.predicate_pushdown:
222
230
  REGISTERED_RULES.append(PredicatePushdownRemove())
231
+ REGISTERED_RULES.append(HideUnusedConcepts())
223
232
  for rule in REGISTERED_RULES:
224
233
  loops = 0
225
234
  complete = False
@@ -1,4 +1,5 @@
1
1
  from .base_optimization import OptimizationRule
2
+ from .hide_unused_concept import HideUnusedConcepts
2
3
  from .inline_datasource import InlineDatasource
3
4
  from .predicate_pushdown import PredicatePushdown, PredicatePushdownRemove
4
5
 
@@ -7,4 +8,5 @@ __all__ = [
7
8
  "InlineDatasource",
8
9
  "PredicatePushdown",
9
10
  "PredicatePushdownRemove",
11
+ "HideUnusedConcepts",
10
12
  ]
@@ -0,0 +1,51 @@
1
+ from trilogy.core.models.build import (
2
+ BuildConcept,
3
+ )
4
+ from trilogy.core.models.execute import CTE, UnionCTE
5
+ from trilogy.core.optimizations.base_optimization import OptimizationRule
6
+
7
+
8
+ class HideUnusedConcepts(OptimizationRule):
9
+ def __init__(self, *args, **kwargs) -> None:
10
+ super().__init__(*args, **kwargs)
11
+
12
+ def optimize(
13
+ self, cte: CTE | UnionCTE, inverse_map: dict[str, list[CTE | UnionCTE]]
14
+ ) -> bool:
15
+ used = set()
16
+ from trilogy.dialect.base import BaseDialect
17
+
18
+ renderer = BaseDialect()
19
+ children = inverse_map.get(cte.name, [])
20
+ if not children:
21
+ return False
22
+ for v in children:
23
+ self.log(f"Analyzing usage of {cte.name} in {v.name}")
24
+ renderer.render_cte(v)
25
+ used = renderer.used_map.get(cte.name, set())
26
+ self.log(f"Used concepts for {cte.name}: {used} from {renderer.used_map}")
27
+ add_to_hidden: list[BuildConcept] = []
28
+ for concept in cte.output_columns:
29
+ if concept.address not in used:
30
+ add_to_hidden.append(concept)
31
+ newly_hidden = [
32
+ x.address for x in add_to_hidden if x.address not in cte.hidden_concepts
33
+ ]
34
+ non_hidden = [
35
+ x for x in cte.output_columns if x.address not in cte.hidden_concepts
36
+ ]
37
+ if not newly_hidden or len(non_hidden) <= 1:
38
+ return False
39
+ self.log(
40
+ f"Hiding unused concepts {[x.address for x in add_to_hidden]} from {cte.name} (used: {used}, all: {[x.address for x in cte.output_columns]})"
41
+ )
42
+ candidates = [
43
+ x.address
44
+ for x in cte.output_columns
45
+ if x.address not in used and x.address not in cte.hidden_concepts
46
+ ]
47
+ if len(candidates) == len(set([x.address for x in cte.output_columns])):
48
+ # pop one out
49
+ candidates.pop()
50
+ cte.hidden_concepts = set(candidates)
51
+ return True
@@ -1,5 +1,6 @@
1
1
  from trilogy.core.enums import (
2
2
  BooleanOperator,
3
+ SourceType,
3
4
  )
4
5
  from trilogy.core.models.build import (
5
6
  BuildComparison,
@@ -59,12 +60,19 @@ class PredicatePushdown(OptimizationRule):
59
60
  )
60
61
  return False
61
62
  materialized = {k for k, v in parent_cte.source_map.items() if v != []}
63
+
62
64
  if not row_conditions or not materialized:
63
65
  return False
64
66
  output_addresses = {x.address for x in parent_cte.output_columns}
65
67
  # if any of the existence conditions are created on the asset, we can't push up to it
66
68
  if existence_conditions and existence_conditions.intersection(output_addresses):
67
69
  return False
70
+ if existence_conditions:
71
+ self.log(
72
+ f"Not pushing up existence {candidate} to {parent_cte.name} as it is a filter node"
73
+ )
74
+ if parent_cte.source.source_type == SourceType.FILTER:
75
+ return False
68
76
  # if it's a root datasource, we can filter on _any_ of the output concepts
69
77
  if parent_cte.is_root_datasource:
70
78
  extra_check = {
@@ -81,7 +89,7 @@ class PredicatePushdown(OptimizationRule):
81
89
  children = inverse_map.get(parent_cte.name, [])
82
90
  if all([is_child_of(candidate, child.condition) for child in children]):
83
91
  self.log(
84
- f"All concepts are found on {parent_cte.name} with existing {parent_cte.condition} and all it's {len(children)} children include same filter; pushing up {candidate}"
92
+ f"All concepts [{row_conditions}] and existence conditions [{existence_conditions}] not block pushup of [{output_addresses}]found on {parent_cte.name} with existing {parent_cte.condition} and all it's {len(children)} children include same filter; pushing up {candidate}"
85
93
  )
86
94
  if parent_cte.condition and not is_scalar_condition(
87
95
  parent_cte.condition
@@ -19,7 +19,7 @@ from trilogy.core.processing.discovery_utility import (
19
19
  LOGGER_PREFIX,
20
20
  depth_to_prefix,
21
21
  get_priority_concept,
22
- group_if_required,
22
+ group_if_required_v2,
23
23
  )
24
24
  from trilogy.core.processing.discovery_validation import (
25
25
  ValidationResult,
@@ -66,7 +66,19 @@ def generate_candidates_restrictive(
66
66
 
67
67
  # if it's single row, joins are irrelevant. Fetch without keys.
68
68
  if priority_concept.granularity == Granularity.SINGLE_ROW:
69
- return [], conditions
69
+ logger.info("Have single row concept, including only other single row optional")
70
+ optional = (
71
+ [
72
+ x
73
+ for x in candidates
74
+ if x.granularity == Granularity.SINGLE_ROW
75
+ and x.address not in priority_concept.pseudonyms
76
+ and priority_concept.address not in x.pseudonyms
77
+ ]
78
+ if priority_concept.derivation == Derivation.AGGREGATE
79
+ else []
80
+ )
81
+ return optional, conditions
70
82
 
71
83
  if conditions and priority_concept.derivation in ROOT_DERIVATIONS:
72
84
  logger.info(
@@ -374,15 +386,21 @@ def generate_loop_completion(context: LoopContext, virtual: set[str]) -> Strateg
374
386
  logger.info(
375
387
  f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Found different non-virtual output concepts ({non_virtual_difference_values}), removing condition injected values by setting outputs to {[x.address for x in output.output_concepts if x.address in non_virtual_output]}"
376
388
  )
377
- output.set_output_concepts(
378
- [
379
- x
380
- for x in output.output_concepts
381
- if x.address in non_virtual_output
382
- or any(c in non_virtual_output for c in x.pseudonyms)
383
- ],
384
- rebuild=False,
385
- )
389
+ # output.set_output_concepts(
390
+ # [
391
+ # x
392
+ # for x in output.output_concepts
393
+ # if x.address not in non_virtual_difference_values
394
+ # or any(c in non_virtual_output for c in x.pseudonyms)
395
+ # ],
396
+ # rebuild=True,
397
+ # change_visibility=False
398
+ # )
399
+ # output.set_output_concepts(context.original_mandatory)
400
+
401
+ # if isinstance(output, MergeNode):
402
+ # output.force_group = True
403
+ # output.rebuild_cache()
386
404
 
387
405
  logger.info(
388
406
  f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Source stack has single node, returning that {type(output)}"
@@ -416,14 +434,17 @@ def generate_loop_completion(context: LoopContext, virtual: set[str]) -> Strateg
416
434
  logger.info(
417
435
  f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Graph is connected, returning {type(output)} node output {[x.address for x in output.usable_outputs]} partial {[c.address for c in output.partial_concepts or []]} with {context.conditions}"
418
436
  )
437
+ from trilogy.core.processing.discovery_utility import group_if_required_v2
438
+
419
439
  if condition_required and context.conditions and non_virtual_different:
420
440
  logger.info(
421
441
  f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Conditions {context.conditions} were injected, checking if we need a group to restore grain"
422
442
  )
423
- return group_if_required(
443
+ return group_if_required_v2(
424
444
  output, context.original_mandatory, context.environment
425
445
  )
426
- return output
446
+
447
+ return group_if_required_v2(output, context.original_mandatory, context.environment)
427
448
 
428
449
 
429
450
  def _search_concepts(
@@ -588,4 +609,4 @@ def source_query_concepts(
588
609
  logger.info(
589
610
  f"{depth_to_prefix(0)}{LOGGER_PREFIX} final concepts are {[x.address for x in final]}"
590
611
  )
591
- return group_if_required(root, output_concepts, environment)
612
+ return group_if_required_v2(root, output_concepts, environment)
@@ -177,7 +177,12 @@ def _generate_union_node(ctx: NodeGenerationContext) -> StrategyNode | None:
177
177
  def _generate_aggregate_node(ctx: NodeGenerationContext) -> StrategyNode | None:
178
178
  # Filter out constants to avoid multiplication issues
179
179
  agg_optional = [
180
- x for x in ctx.local_optional if x.granularity != Granularity.SINGLE_ROW
180
+ x
181
+ for x in ctx.local_optional
182
+ if not (
183
+ x.granularity == Granularity.SINGLE_ROW
184
+ and x.derivation != Derivation.AGGREGATE
185
+ )
181
186
  ]
182
187
 
183
188
  logger.info(
@@ -376,11 +381,6 @@ class RootNodeHandler:
376
381
 
377
382
  if pseudonyms:
378
383
  expanded.add_output_concepts(pseudonyms)
379
- logger.info(
380
- f"{depth_to_prefix(self.ctx.depth)}{LOGGER_PREFIX} "
381
- f"Hiding pseudonyms {[c.address for c in pseudonyms]}"
382
- )
383
- expanded.hide_output_concepts(pseudonyms)
384
384
 
385
385
  logger.info(
386
386
  f"{depth_to_prefix(self.ctx.depth)}{LOGGER_PREFIX} "
@@ -0,0 +1,294 @@
1
+ from typing import List
2
+
3
+ from trilogy.constants import logger
4
+ from trilogy.core.enums import Derivation, Purpose
5
+ from trilogy.core.models.build import (
6
+ BuildConcept,
7
+ BuildDatasource,
8
+ BuildFilterItem,
9
+ BuildGrain,
10
+ BuildRowsetItem,
11
+ )
12
+ from trilogy.core.models.build_environment import BuildEnvironment
13
+ from trilogy.core.models.execute import QueryDatasource, UnnestJoin
14
+ from trilogy.core.processing.nodes import GroupNode, MergeNode, StrategyNode
15
+ from trilogy.core.processing.utility import GroupRequiredResponse
16
+
17
+
18
+ def depth_to_prefix(depth: int) -> str:
19
+ return "\t" * depth
20
+
21
+
22
+ LOGGER_PREFIX = "[DISCOVERY LOOP]"
23
+
24
+
25
+ def calculate_effective_parent_grain(
26
+ node: QueryDatasource | BuildDatasource,
27
+ ) -> BuildGrain:
28
+ # calculate the effective grain of the parent node
29
+ # this is the union of all parent grains
30
+ if isinstance(node, MergeNode):
31
+ grain = BuildGrain()
32
+ qds = node.resolve()
33
+ if not qds.joins:
34
+ return qds.datasources[0].grain
35
+ for join in qds.joins:
36
+ if isinstance(join, UnnestJoin):
37
+ continue
38
+ pairs = join.concept_pairs or []
39
+ for key in pairs:
40
+ left = key.existing_datasource
41
+ grain += left.grain
42
+ keys = [key.right for key in pairs]
43
+ join_grain = BuildGrain.from_concepts(keys)
44
+ if join_grain == join.right_datasource.grain:
45
+ logger.info(f"irrelevant right join {join}, does not change grain")
46
+ else:
47
+ logger.info(
48
+ f"join changes grain, adding {join.right_datasource.grain} to {grain}"
49
+ )
50
+ grain += join.right_datasource.grain
51
+ return grain
52
+ else:
53
+ return node.grain or BuildGrain()
54
+
55
+
56
+ def check_if_group_required(
57
+ downstream_concepts: List[BuildConcept],
58
+ parents: list[QueryDatasource | BuildDatasource],
59
+ environment: BuildEnvironment,
60
+ depth: int = 0,
61
+ ) -> GroupRequiredResponse:
62
+ padding = "\t" * depth
63
+ target_grain = BuildGrain.from_concepts(
64
+ downstream_concepts,
65
+ environment=environment,
66
+ )
67
+
68
+ comp_grain = BuildGrain()
69
+ for source in parents:
70
+ # comp_grain += source.grain
71
+ comp_grain += calculate_effective_parent_grain(source)
72
+
73
+ # dynamically select if we need to group
74
+ # we must avoid grouping if we are already at grain
75
+ if comp_grain.issubset(target_grain):
76
+
77
+ logger.info(
78
+ f"{padding}{LOGGER_PREFIX} Group requirement check: {comp_grain}, target: {target_grain}, grain is subset of target, no group node required"
79
+ )
80
+ return GroupRequiredResponse(target_grain, comp_grain, False)
81
+ # find out what extra is in the comp grain vs target grain
82
+ difference = [
83
+ environment.concepts[c] for c in (comp_grain - target_grain).components
84
+ ]
85
+ logger.info(
86
+ f"{padding}{LOGGER_PREFIX} Group requirement check: upstream grain: {comp_grain}, desired grain: {target_grain} from , difference {[x.address for x in difference]}"
87
+ )
88
+ for x in difference:
89
+ logger.info(
90
+ f"{padding}{LOGGER_PREFIX} Difference concept {x.address} purpose {x.purpose} keys {x.keys}"
91
+ )
92
+
93
+ # if the difference is all unique properties whose keys are in the source grain
94
+ # we can also suppress the group
95
+ if all(
96
+ [
97
+ x.keys
98
+ and all(
99
+ environment.concepts[z].address in comp_grain.components for z in x.keys
100
+ )
101
+ for x in difference
102
+ ]
103
+ ):
104
+ logger.info(
105
+ f"{padding}{LOGGER_PREFIX} Group requirement check: skipped due to unique property validation"
106
+ )
107
+ return GroupRequiredResponse(target_grain, comp_grain, False)
108
+ if all([x.purpose == Purpose.KEY for x in difference]):
109
+ logger.info(
110
+ f"{padding}{LOGGER_PREFIX} checking if downstream is unique properties of key"
111
+ )
112
+ replaced_grain_raw: list[set[str]] = [
113
+ (
114
+ x.keys or set()
115
+ if x.purpose == Purpose.UNIQUE_PROPERTY
116
+ else set([x.address])
117
+ )
118
+ for x in downstream_concepts
119
+ if x.address in target_grain.components
120
+ ]
121
+ # flatten the list of lists
122
+ replaced_grain = [item for sublist in replaced_grain_raw for item in sublist]
123
+ # if the replaced grain is a subset of the comp grain, we can skip the group
124
+ unique_grain_comp = BuildGrain.from_concepts(
125
+ replaced_grain, environment=environment
126
+ )
127
+ if comp_grain.issubset(unique_grain_comp):
128
+ logger.info(
129
+ f"{padding}{LOGGER_PREFIX} Group requirement check: skipped due to unique property validation"
130
+ )
131
+ return GroupRequiredResponse(target_grain, comp_grain, False)
132
+ logger.info(
133
+ f"{padding}{LOGGER_PREFIX} Checking for grain equivalence for filters and rowsets"
134
+ )
135
+ ngrain = []
136
+ for con in target_grain.components:
137
+ full = environment.concepts[con]
138
+ if full.derivation == Derivation.ROWSET:
139
+ ngrain.append(full.address.split(".", 1)[1])
140
+ elif full.derivation == Derivation.FILTER:
141
+ assert isinstance(full.lineage, BuildFilterItem)
142
+ if isinstance(full.lineage.content, BuildConcept):
143
+ ngrain.append(full.lineage.content.address)
144
+ else:
145
+ ngrain.append(full.address)
146
+ target_grain2 = BuildGrain.from_concepts(
147
+ ngrain,
148
+ environment=environment,
149
+ )
150
+ if comp_grain.issubset(target_grain2):
151
+ logger.info(
152
+ f"{padding}{LOGGER_PREFIX} Group requirement check: {comp_grain}, {target_grain2}, pre rowset grain is subset of target, no group node required"
153
+ )
154
+ return GroupRequiredResponse(target_grain2, comp_grain, False)
155
+
156
+ logger.info(f"{padding}{LOGGER_PREFIX} Group requirement check: group required")
157
+ return GroupRequiredResponse(target_grain, comp_grain, True)
158
+
159
+
160
+ def group_if_required_v2(
161
+ root: StrategyNode, final: List[BuildConcept], environment: BuildEnvironment
162
+ ):
163
+ required = check_if_group_required(
164
+ downstream_concepts=final, parents=[root.resolve()], environment=environment
165
+ )
166
+ targets = [
167
+ x
168
+ for x in root.output_concepts
169
+ if x.address in final or any(c in final for c in x.pseudonyms)
170
+ ]
171
+ if required.required:
172
+ if isinstance(root, MergeNode):
173
+ root.force_group = True
174
+ root.set_output_concepts(targets, rebuild=False, change_visibility=False)
175
+ root.rebuild_cache()
176
+ return root
177
+ elif isinstance(root, GroupNode):
178
+ # root.set_output_concepts(final, rebuild=False)
179
+ # root.rebuild_cache()
180
+ return root
181
+ return GroupNode(
182
+ output_concepts=targets,
183
+ input_concepts=targets,
184
+ environment=environment,
185
+ parents=[root],
186
+ partial_concepts=root.partial_concepts,
187
+ preexisting_conditions=root.preexisting_conditions,
188
+ )
189
+ elif isinstance(root, GroupNode):
190
+
191
+ return root
192
+ else:
193
+ root.set_output_concepts(targets, rebuild=False, change_visibility=False)
194
+ return root
195
+
196
+
197
+ def get_upstream_concepts(base: BuildConcept, nested: bool = False) -> set[str]:
198
+ upstream = set()
199
+ if nested:
200
+ upstream.add(base.address)
201
+ if not base.lineage:
202
+ return upstream
203
+ for x in base.lineage.concept_arguments:
204
+ # if it's derived from any value in a rowset, ALL rowset items are upstream
205
+ if x.derivation == Derivation.ROWSET:
206
+ assert isinstance(x.lineage, BuildRowsetItem), type(x.lineage)
207
+ for y in x.lineage.rowset.select.output_components:
208
+ upstream.add(f"{x.lineage.rowset.name}.{y.address}")
209
+ # upstream = upstream.union(get_upstream_concepts(y, nested=True))
210
+ upstream = upstream.union(get_upstream_concepts(x, nested=True))
211
+ return upstream
212
+
213
+
214
+ def get_priority_concept(
215
+ all_concepts: List[BuildConcept],
216
+ attempted_addresses: set[str],
217
+ found_concepts: set[str],
218
+ depth: int,
219
+ ) -> BuildConcept:
220
+ # optimized search for missing concepts
221
+ pass_one = sorted(
222
+ [
223
+ c
224
+ for c in all_concepts
225
+ if c.address not in attempted_addresses and c.address not in found_concepts
226
+ ],
227
+ key=lambda x: x.address,
228
+ )
229
+ # sometimes we need to scan intermediate concepts to get merge keys or filter keys,
230
+ # so do an exhaustive search
231
+ # pass_two = [c for c in all_concepts+filter_only if c.address not in attempted_addresses]
232
+ for remaining_concept in (pass_one,):
233
+ priority = (
234
+ # then multiselects to remove them from scope
235
+ [c for c in remaining_concept if c.derivation == Derivation.MULTISELECT]
236
+ +
237
+ # then rowsets to remove them from scope, as they cannot get partials
238
+ [c for c in remaining_concept if c.derivation == Derivation.ROWSET]
239
+ +
240
+ # then rowsets to remove them from scope, as they cannot get partials
241
+ [c for c in remaining_concept if c.derivation == Derivation.UNION]
242
+ # we should be home-free here
243
+ +
244
+ # then aggregates to remove them from scope, as they cannot get partials
245
+ [c for c in remaining_concept if c.derivation == Derivation.AGGREGATE]
246
+ # then windows to remove them from scope, as they cannot get partials
247
+ + [c for c in remaining_concept if c.derivation == Derivation.WINDOW]
248
+ # then filters to remove them from scope, also cannot get partials
249
+ + [c for c in remaining_concept if c.derivation == Derivation.FILTER]
250
+ # unnests are weird?
251
+ + [c for c in remaining_concept if c.derivation == Derivation.UNNEST]
252
+ + [c for c in remaining_concept if c.derivation == Derivation.RECURSIVE]
253
+ + [c for c in remaining_concept if c.derivation == Derivation.BASIC]
254
+ + [c for c in remaining_concept if c.derivation == Derivation.GROUP_TO]
255
+ + [c for c in remaining_concept if c.derivation == Derivation.CONSTANT]
256
+ # finally our plain selects
257
+ + [
258
+ c for c in remaining_concept if c.derivation == Derivation.ROOT
259
+ ] # and any non-single row constants
260
+ )
261
+
262
+ priority += [
263
+ c
264
+ for c in remaining_concept
265
+ if c.address not in [x.address for x in priority]
266
+ ]
267
+ final = []
268
+ # if any thing is derived from another concept
269
+ # get the derived copy first
270
+ # as this will usually resolve cleaner
271
+ for x in priority:
272
+ if any(
273
+ [
274
+ x.address
275
+ in get_upstream_concepts(
276
+ c,
277
+ )
278
+ for c in priority
279
+ ]
280
+ ):
281
+ logger.info(
282
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} delaying fetch of {x.address} as parent of another concept"
283
+ )
284
+ continue
285
+ final.append(x)
286
+ # then append anything we didn't get
287
+ for x2 in priority:
288
+ if x2 not in final:
289
+ final.append(x2)
290
+ if final:
291
+ return final[0]
292
+ raise ValueError(
293
+ f"Cannot resolve query. No remaining priority concepts, have attempted {attempted_addresses}"
294
+ )
@@ -143,4 +143,5 @@ def gen_basic_node(
143
143
  logger.info(
144
144
  f"{depth_prefix}{LOGGER_PREFIX} Returning basic select for {concept}: input: {[x.address for x in parent_node.input_concepts]} output {[x.address for x in parent_node.output_concepts]} hidden {[x for x in parent_node.hidden_concepts]}"
145
145
  )
146
+
146
147
  return parent_node