pytrilogy 0.0.3.55__tar.gz → 0.0.3.56__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

Files changed (150) hide show
  1. {pytrilogy-0.0.3.55/pytrilogy.egg-info → pytrilogy-0.0.3.56}/PKG-INFO +1 -1
  2. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56/pytrilogy.egg-info}/PKG-INFO +1 -1
  3. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/pytrilogy.egg-info/SOURCES.txt +5 -0
  4. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/__init__.py +1 -1
  5. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/enums.py +1 -0
  6. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/models/author.py +6 -4
  7. pytrilogy-0.0.3.56/trilogy/core/processing/concept_strategies_v3.py +592 -0
  8. pytrilogy-0.0.3.56/trilogy/core/processing/discovery_node_factory.py +469 -0
  9. pytrilogy-0.0.3.56/trilogy/core/processing/discovery_utility.py +123 -0
  10. pytrilogy-0.0.3.56/trilogy/core/processing/discovery_validation.py +155 -0
  11. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/select_node.py +6 -8
  12. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/nodes/__init__.py +2 -4
  13. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/dialect/snowflake.py +1 -1
  14. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/parsing/common.py +1 -3
  15. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/parsing/parse_engine.py +6 -0
  16. pytrilogy-0.0.3.56/trilogy/std/__init__.py +0 -0
  17. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/std/date.preql +3 -1
  18. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/std/geography.preql +4 -0
  19. pytrilogy-0.0.3.56/trilogy/std/money.preql +67 -0
  20. pytrilogy-0.0.3.56/trilogy/std/net.preql +8 -0
  21. pytrilogy-0.0.3.55/trilogy/core/processing/concept_strategies_v3.py +0 -1164
  22. pytrilogy-0.0.3.55/trilogy/std/money.preql +0 -6
  23. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/LICENSE.md +0 -0
  24. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/README.md +0 -0
  25. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/pyproject.toml +0 -0
  26. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/pytrilogy.egg-info/dependency_links.txt +0 -0
  27. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/pytrilogy.egg-info/entry_points.txt +0 -0
  28. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/pytrilogy.egg-info/requires.txt +0 -0
  29. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/pytrilogy.egg-info/top_level.txt +0 -0
  30. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/setup.cfg +0 -0
  31. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/setup.py +0 -0
  32. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_datatypes.py +0 -0
  33. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_declarations.py +0 -0
  34. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_derived_concepts.py +0 -0
  35. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_discovery_nodes.py +0 -0
  36. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_enums.py +0 -0
  37. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_environment.py +0 -0
  38. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_executor.py +0 -0
  39. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_failure.py +0 -0
  40. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_functions.py +0 -0
  41. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_imports.py +0 -0
  42. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_metadata.py +0 -0
  43. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_models.py +0 -0
  44. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_multi_join_assignments.py +0 -0
  45. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_parse_engine.py +0 -0
  46. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_parsing.py +0 -0
  47. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_parsing_failures.py +0 -0
  48. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_partial_handling.py +0 -0
  49. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_query_processing.py +0 -0
  50. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_query_render.py +0 -0
  51. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_select.py +0 -0
  52. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_show.py +0 -0
  53. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_statements.py +0 -0
  54. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_typing.py +0 -0
  55. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_undefined_concept.py +0 -0
  56. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_user_functions.py +0 -0
  57. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/tests/test_where_clause.py +0 -0
  58. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/authoring/__init__.py +0 -0
  59. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/compiler.py +0 -0
  60. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/constants.py +0 -0
  61. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/__init__.py +0 -0
  62. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/constants.py +0 -0
  63. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/env_processor.py +0 -0
  64. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/environment_helpers.py +0 -0
  65. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/ergonomics.py +0 -0
  66. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/exceptions.py +0 -0
  67. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/functions.py +0 -0
  68. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/graph_models.py +0 -0
  69. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/internal.py +0 -0
  70. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/models/__init__.py +0 -0
  71. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/models/build.py +0 -0
  72. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/models/build_environment.py +0 -0
  73. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/models/core.py +0 -0
  74. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/models/datasource.py +0 -0
  75. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/models/environment.py +0 -0
  76. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/models/execute.py +0 -0
  77. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/optimization.py +0 -0
  78. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/optimizations/__init__.py +0 -0
  79. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/optimizations/base_optimization.py +0 -0
  80. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/optimizations/inline_datasource.py +0 -0
  81. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/optimizations/predicate_pushdown.py +0 -0
  82. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/__init__.py +0 -0
  83. /pytrilogy-0.0.3.55/trilogy/core/processing/node_generators/select_helpers/__init__.py → /pytrilogy-0.0.3.56/trilogy/core/processing/discovery_loop.py +0 -0
  84. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/graph_utils.py +0 -0
  85. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/__init__.py +0 -0
  86. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/basic_node.py +0 -0
  87. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/common.py +0 -0
  88. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/filter_node.py +0 -0
  89. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/group_node.py +0 -0
  90. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/group_to_node.py +0 -0
  91. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/multiselect_node.py +0 -0
  92. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/node_merge_node.py +0 -0
  93. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/recursive_node.py +0 -0
  94. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/rowset_node.py +0 -0
  95. {pytrilogy-0.0.3.55/trilogy/core/statements → pytrilogy-0.0.3.56/trilogy/core/processing/node_generators/select_helpers}/__init__.py +0 -0
  96. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +0 -0
  97. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/select_merge_node.py +0 -0
  98. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/synonym_node.py +0 -0
  99. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/union_node.py +0 -0
  100. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/unnest_node.py +0 -0
  101. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/node_generators/window_node.py +0 -0
  102. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/nodes/base_node.py +0 -0
  103. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/nodes/filter_node.py +0 -0
  104. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/nodes/group_node.py +0 -0
  105. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/nodes/merge_node.py +0 -0
  106. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/nodes/recursive_node.py +0 -0
  107. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/nodes/select_node_v2.py +0 -0
  108. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/nodes/union_node.py +0 -0
  109. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/nodes/unnest_node.py +0 -0
  110. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/nodes/window_node.py +0 -0
  111. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/processing/utility.py +0 -0
  112. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/query_processor.py +0 -0
  113. {pytrilogy-0.0.3.55/trilogy/dialect → pytrilogy-0.0.3.56/trilogy/core/statements}/__init__.py +0 -0
  114. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/statements/author.py +0 -0
  115. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/statements/build.py +0 -0
  116. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/statements/common.py +0 -0
  117. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/statements/execute.py +0 -0
  118. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/core/utility.py +0 -0
  119. {pytrilogy-0.0.3.55/trilogy/metadata → pytrilogy-0.0.3.56/trilogy/dialect}/__init__.py +0 -0
  120. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/dialect/base.py +0 -0
  121. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/dialect/bigquery.py +0 -0
  122. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/dialect/common.py +0 -0
  123. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/dialect/config.py +0 -0
  124. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/dialect/dataframe.py +0 -0
  125. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/dialect/duckdb.py +0 -0
  126. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/dialect/enums.py +0 -0
  127. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/dialect/postgres.py +0 -0
  128. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/dialect/presto.py +0 -0
  129. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/dialect/sql_server.py +0 -0
  130. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/engine.py +0 -0
  131. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/executor.py +0 -0
  132. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/hooks/__init__.py +0 -0
  133. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/hooks/base_hook.py +0 -0
  134. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/hooks/graph_hook.py +0 -0
  135. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/hooks/query_debugger.py +0 -0
  136. {pytrilogy-0.0.3.55/trilogy/parsing → pytrilogy-0.0.3.56/trilogy/metadata}/__init__.py +0 -0
  137. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/parser.py +0 -0
  138. {pytrilogy-0.0.3.55/trilogy/scripts → pytrilogy-0.0.3.56/trilogy/parsing}/__init__.py +0 -0
  139. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/parsing/config.py +0 -0
  140. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/parsing/exceptions.py +0 -0
  141. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/parsing/helpers.py +0 -0
  142. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/parsing/render.py +0 -0
  143. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/parsing/trilogy.lark +0 -0
  144. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/py.typed +0 -0
  145. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/render.py +0 -0
  146. {pytrilogy-0.0.3.55/trilogy/std → pytrilogy-0.0.3.56/trilogy/scripts}/__init__.py +0 -0
  147. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/scripts/trilogy.py +0 -0
  148. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/std/display.preql +0 -0
  149. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/std/report.preql +0 -0
  150. {pytrilogy-0.0.3.55 → pytrilogy-0.0.3.56}/trilogy/utility.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.55
3
+ Version: 0.0.3.56
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.55
3
+ Version: 0.0.3.56
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -71,6 +71,10 @@ trilogy/core/optimizations/inline_datasource.py
71
71
  trilogy/core/optimizations/predicate_pushdown.py
72
72
  trilogy/core/processing/__init__.py
73
73
  trilogy/core/processing/concept_strategies_v3.py
74
+ trilogy/core/processing/discovery_loop.py
75
+ trilogy/core/processing/discovery_node_factory.py
76
+ trilogy/core/processing/discovery_utility.py
77
+ trilogy/core/processing/discovery_validation.py
74
78
  trilogy/core/processing/graph_utils.py
75
79
  trilogy/core/processing/utility.py
76
80
  trilogy/core/processing/node_generators/__init__.py
@@ -138,4 +142,5 @@ trilogy/std/date.preql
138
142
  trilogy/std/display.preql
139
143
  trilogy/std/geography.preql
140
144
  trilogy/std/money.preql
145
+ trilogy/std/net.preql
141
146
  trilogy/std/report.preql
@@ -4,6 +4,6 @@ from trilogy.dialect.enums import Dialects
4
4
  from trilogy.executor import Executor
5
5
  from trilogy.parser import parse
6
6
 
7
- __version__ = "0.0.3.55"
7
+ __version__ = "0.0.3.56"
8
8
 
9
9
  __all__ = ["parse", "Executor", "Dialects", "Environment", "CONFIG"]
@@ -42,6 +42,7 @@ class Purpose(Enum):
42
42
 
43
43
  class Derivation(Enum):
44
44
  BASIC = "basic"
45
+ GROUP_TO = "group_to"
45
46
  WINDOW = "window"
46
47
  AGGREGATE = "aggregate"
47
48
  FILTER = "filter"
@@ -1176,6 +1176,12 @@ class Concept(Addressable, DataTyped, ConceptArgs, Mergeable, Namespaced, BaseMo
1176
1176
  and lineage.operator == FunctionType.UNION
1177
1177
  ):
1178
1178
  return Derivation.UNION
1179
+ elif (
1180
+ lineage
1181
+ and isinstance(lineage, (BuildFunction, Function))
1182
+ and lineage.operator == FunctionType.GROUP
1183
+ ):
1184
+ return Derivation.GROUP_TO
1179
1185
  elif (
1180
1186
  lineage
1181
1187
  and isinstance(lineage, (BuildFunction, Function))
@@ -1195,10 +1201,6 @@ class Concept(Addressable, DataTyped, ConceptArgs, Mergeable, Namespaced, BaseMo
1195
1201
  return Derivation.CONSTANT
1196
1202
  return Derivation.ROOT
1197
1203
 
1198
- # @property
1199
- # def derivation(self) -> Derivation:
1200
- # return self.calculate_derivation(self.lineage, self.purpose)
1201
-
1202
1204
  @classmethod
1203
1205
  def calculate_granularity(cls, derivation: Derivation, grain: Grain, lineage):
1204
1206
  from trilogy.core.models.build import BuildFunction
@@ -0,0 +1,592 @@
1
+ from dataclasses import dataclass
2
+ from typing import List, Optional
3
+
4
+ from trilogy.constants import logger
5
+ from trilogy.core.enums import Derivation, Granularity
6
+ from trilogy.core.env_processor import generate_graph
7
+ from trilogy.core.exceptions import UnresolvableQueryException
8
+ from trilogy.core.graph_models import ReferenceGraph
9
+ from trilogy.core.models.author import (
10
+ UndefinedConcept,
11
+ )
12
+ from trilogy.core.models.build import (
13
+ BuildConcept,
14
+ BuildWhereClause,
15
+ )
16
+ from trilogy.core.models.build_environment import BuildEnvironment
17
+ from trilogy.core.processing.discovery_node_factory import generate_node
18
+ from trilogy.core.processing.discovery_utility import (
19
+ LOGGER_PREFIX,
20
+ depth_to_prefix,
21
+ get_priority_concept,
22
+ )
23
+ from trilogy.core.processing.discovery_validation import (
24
+ ValidationResult,
25
+ validate_stack,
26
+ )
27
+ from trilogy.core.processing.nodes import (
28
+ GroupNode,
29
+ History,
30
+ MergeNode,
31
+ StrategyNode,
32
+ )
33
+ from trilogy.utility import unique
34
+
35
+ SKIPPED_DERIVATIONS = [
36
+ Derivation.AGGREGATE,
37
+ Derivation.FILTER,
38
+ Derivation.WINDOW,
39
+ Derivation.UNNEST,
40
+ Derivation.RECURSIVE,
41
+ Derivation.ROWSET,
42
+ Derivation.BASIC,
43
+ Derivation.GROUP_TO,
44
+ Derivation.MULTISELECT,
45
+ Derivation.UNION,
46
+ ]
47
+
48
+ ROOT_DERIVATIONS = [Derivation.ROOT, Derivation.CONSTANT]
49
+
50
+
51
+ def generate_candidates_restrictive(
52
+ priority_concept: BuildConcept,
53
+ candidates: list[BuildConcept],
54
+ exhausted: set[str],
55
+ depth: int,
56
+ conditions: BuildWhereClause | None = None,
57
+ ) -> List[BuildConcept]:
58
+ # if it's single row, joins are irrelevant. Fetch without keys.
59
+ if priority_concept.granularity == Granularity.SINGLE_ROW:
60
+ return []
61
+
62
+ local_candidates = [
63
+ x
64
+ for x in list(candidates)
65
+ if x.address not in exhausted
66
+ and x.granularity != Granularity.SINGLE_ROW
67
+ and x.address not in priority_concept.pseudonyms
68
+ and priority_concept.address not in x.pseudonyms
69
+ ]
70
+ if conditions and priority_concept.derivation in ROOT_DERIVATIONS:
71
+ logger.info(
72
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Injecting additional conditional row arguments as all remaining concepts are roots or constant"
73
+ )
74
+ return unique(list(conditions.row_arguments) + local_candidates, "address")
75
+ return local_candidates
76
+
77
+
78
+ def append_existence_check(
79
+ node: StrategyNode,
80
+ environment: BuildEnvironment,
81
+ graph: ReferenceGraph,
82
+ where: BuildWhereClause,
83
+ history: History,
84
+ ):
85
+ # we if we have a where clause doing an existence check
86
+ # treat that as separate subquery
87
+ if where.existence_arguments:
88
+ for subselect in where.existence_arguments:
89
+ if not subselect:
90
+ continue
91
+ if all([x.address in node.input_concepts for x in subselect]):
92
+ logger.info(
93
+ f"{LOGGER_PREFIX} existance clause inputs already found {[str(c) for c in subselect]}"
94
+ )
95
+ continue
96
+ logger.info(
97
+ f"{LOGGER_PREFIX} fetching existence clause inputs {[str(c) for c in subselect]}"
98
+ )
99
+ parent = source_query_concepts(
100
+ [*subselect],
101
+ history=history,
102
+ environment=environment,
103
+ g=graph,
104
+ )
105
+ assert parent, "Could not resolve existence clause"
106
+ node.add_parents([parent])
107
+ logger.info(
108
+ f"{LOGGER_PREFIX} fetching existence clause inputs {[str(c) for c in subselect]}"
109
+ )
110
+ node.add_existence_concepts([*subselect])
111
+
112
+
113
+ def search_concepts(
114
+ mandatory_list: List[BuildConcept],
115
+ history: History,
116
+ environment: BuildEnvironment,
117
+ depth: int,
118
+ g: ReferenceGraph,
119
+ accept_partial: bool = False,
120
+ conditions: BuildWhereClause | None = None,
121
+ ) -> StrategyNode | None:
122
+ hist = history.get_history(
123
+ search=mandatory_list, accept_partial=accept_partial, conditions=conditions
124
+ )
125
+ if hist is not False:
126
+ logger.info(
127
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Returning search node from history ({'exists' if hist is not None else 'does not exist'}) for {[c.address for c in mandatory_list]} with accept_partial {accept_partial}"
128
+ )
129
+ assert not isinstance(hist, bool)
130
+ return hist
131
+
132
+ result = _search_concepts(
133
+ mandatory_list,
134
+ environment,
135
+ depth=depth,
136
+ g=g,
137
+ accept_partial=accept_partial,
138
+ history=history,
139
+ conditions=conditions,
140
+ )
141
+ # a node may be mutated after be cached; always store a copy
142
+ history.search_to_history(
143
+ mandatory_list,
144
+ accept_partial,
145
+ result.copy() if result else None,
146
+ conditions=conditions,
147
+ )
148
+ return result
149
+
150
+
151
+ @dataclass
152
+ class LoopContext:
153
+ mandatory_list: List[BuildConcept]
154
+ environment: BuildEnvironment
155
+ depth: int
156
+ g: ReferenceGraph
157
+ history: History
158
+ attempted: set[str]
159
+ found: set[str]
160
+ skip: set[str]
161
+ all_mandatory: set[str]
162
+ original_mandatory: List[BuildConcept]
163
+ completion_mandatory: List[BuildConcept]
164
+ stack: List[StrategyNode]
165
+ complete: ValidationResult = ValidationResult.INCOMPLETE
166
+ accept_partial: bool = False
167
+ must_evaluate_condition_on_this_level_not_push_down: bool = False
168
+ conditions: BuildWhereClause | None = None
169
+
170
+ @property
171
+ def incomplete(self) -> bool:
172
+ return self.attempted != self.all_mandatory
173
+
174
+
175
+ def initialize_loop_context(
176
+ mandatory_list: List[BuildConcept],
177
+ environment: BuildEnvironment,
178
+ depth: int,
179
+ g: ReferenceGraph,
180
+ history: History,
181
+ accept_partial: bool = False,
182
+ conditions: BuildWhereClause | None = None,
183
+ ):
184
+ # these are the concepts we need in the output projection
185
+ mandatory_list = unique(mandatory_list, "address")
186
+ # cache our values before an filter injection
187
+ original_mandatory = [*mandatory_list]
188
+ for x in mandatory_list:
189
+ if isinstance(x, UndefinedConcept):
190
+ raise SyntaxError(f"Undefined concept {x.address}")
191
+ all_mandatory = set(c.address for c in mandatory_list)
192
+
193
+ must_evaluate_condition_on_this_level_not_push_down = False
194
+
195
+ # if we have a filter, we may need to get more values to support that.
196
+ if conditions:
197
+ completion_mandatory = unique(
198
+ mandatory_list + list(conditions.row_arguments), "address"
199
+ )
200
+ # if anything we need to get is in the filter set and it's a computed value
201
+ # we need to get _everything_ in this loop
202
+ required_filters = [
203
+ x
204
+ for x in mandatory_list
205
+ if x.derivation not in ROOT_DERIVATIONS
206
+ and not (
207
+ x.derivation == Derivation.AGGREGATE
208
+ and x.granularity == Granularity.SINGLE_ROW
209
+ )
210
+ and x.address in conditions.row_arguments
211
+ ]
212
+ if any(required_filters):
213
+ logger.info(
214
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} derived condition row inputs {[x.address for x in required_filters]} present in mandatory list, forcing condition evaluation at this level. "
215
+ )
216
+ mandatory_list = completion_mandatory
217
+ must_evaluate_condition_on_this_level_not_push_down = True
218
+ else:
219
+ logger.info(
220
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Do not need to evaluate conditions yet."
221
+ )
222
+ else:
223
+
224
+ completion_mandatory = mandatory_list
225
+ return LoopContext(
226
+ mandatory_list=mandatory_list,
227
+ environment=environment,
228
+ depth=depth,
229
+ g=g,
230
+ history=history,
231
+ attempted=set(),
232
+ found=set(),
233
+ skip=set(),
234
+ all_mandatory=all_mandatory,
235
+ original_mandatory=original_mandatory,
236
+ completion_mandatory=completion_mandatory,
237
+ stack=[],
238
+ complete=ValidationResult.INCOMPLETE,
239
+ accept_partial=accept_partial,
240
+ must_evaluate_condition_on_this_level_not_push_down=must_evaluate_condition_on_this_level_not_push_down,
241
+ conditions=conditions,
242
+ )
243
+
244
+
245
+ def evaluate_loop_conditions(
246
+ context: LoopContext, priority_concept: BuildConcept
247
+ ) -> BuildWhereClause | None:
248
+ # filter evaluation
249
+ # always pass the filter up when we aren't looking at all filter inputs
250
+ # or there are any non-filter complex types
251
+ if context.conditions:
252
+ should_evaluate_filter_on_this_level_not_push_down = all(
253
+ [
254
+ x.address in context.mandatory_list
255
+ for x in context.conditions.row_arguments
256
+ ]
257
+ ) and not any(
258
+ [
259
+ x.derivation not in ROOT_DERIVATIONS
260
+ for x in context.mandatory_list
261
+ if x.address not in context.conditions.row_arguments
262
+ ]
263
+ )
264
+ else:
265
+ should_evaluate_filter_on_this_level_not_push_down = True
266
+ local_conditions = (
267
+ context.conditions
268
+ if context.conditions
269
+ and not context.must_evaluate_condition_on_this_level_not_push_down
270
+ and not should_evaluate_filter_on_this_level_not_push_down
271
+ else None
272
+ )
273
+ # but if it's not basic, and it's not condition;
274
+ # we do need to push it down (and have another layer of filter evaluation)
275
+ # to ensure filtering happens before something like a SUM
276
+ if (
277
+ context.conditions
278
+ and priority_concept.derivation not in ROOT_DERIVATIONS
279
+ and priority_concept.address not in context.conditions.row_arguments
280
+ ):
281
+ logger.info(
282
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Force including conditions in {priority_concept.address} to push filtering above complex condition that is not condition member or parent"
283
+ )
284
+ local_conditions = context.conditions
285
+ return local_conditions
286
+
287
+
288
+ def check_for_early_exit(
289
+ complete, partial, context: LoopContext, priority_concept: BuildConcept
290
+ ) -> bool:
291
+ if complete == ValidationResult.INCOMPLETE_CONDITION:
292
+ cond_dict = {str(node): node.preexisting_conditions for node in context.stack}
293
+ for node in context.stack:
294
+ logger.info(
295
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Node {node} has conditions {node.preexisting_conditions} and {node.conditions}"
296
+ )
297
+ raise SyntaxError(f"Have {cond_dict} and need {str(context.conditions)}")
298
+ # early exit if we have a complete stack with one node
299
+ # we can only early exit if we have a complete stack
300
+ # and we are not looking for more non-partial sources
301
+ if complete == ValidationResult.COMPLETE and (
302
+ not context.accept_partial or (context.accept_partial and not partial)
303
+ ):
304
+ logger.info(
305
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} breaking loop, complete"
306
+ )
307
+ return True
308
+ elif complete == ValidationResult.COMPLETE and context.accept_partial and partial:
309
+ if len(context.attempted) == len(context.mandatory_list):
310
+ logger.info(
311
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Breaking as we have attempted all nodes"
312
+ )
313
+ return True
314
+ logger.info(
315
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Found complete stack with partials {partial}, continuing search, attempted {context.attempted} all {len(context.mandatory_list)}"
316
+ )
317
+ else:
318
+ logger.info(
319
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Not complete, continuing search"
320
+ )
321
+ # if we have attempted on root node, we've tried them all.
322
+ # inject in another search with filter concepts
323
+ if priority_concept.derivation == Derivation.ROOT:
324
+ logger.info(
325
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Breaking as attempted root with no results"
326
+ )
327
+ return True
328
+ return False
329
+
330
+
331
+ def generate_loop_completion(context: LoopContext, virtual) -> StrategyNode:
332
+ condition_required = True
333
+ non_virtual = [c for c in context.completion_mandatory if c.address not in virtual]
334
+ non_virtual_output = [
335
+ c for c in context.original_mandatory if c.address not in virtual
336
+ ]
337
+ non_virtual_different = len(context.completion_mandatory) != len(
338
+ context.original_mandatory
339
+ )
340
+ non_virtual_difference_values = set(
341
+ [x.address for x in context.completion_mandatory]
342
+ ).difference(set([x.address for x in context.original_mandatory]))
343
+ if not context.conditions:
344
+ condition_required = False
345
+ non_virtual = [c for c in context.mandatory_list if c.address not in virtual]
346
+
347
+ elif all(
348
+ [
349
+ x.preexisting_conditions == context.conditions.conditional
350
+ for x in context.stack
351
+ ]
352
+ ):
353
+ condition_required = False
354
+ non_virtual = [c for c in context.mandatory_list if c.address not in virtual]
355
+
356
+ if context.conditions and not condition_required:
357
+ parent_map = {
358
+ str(x): x.preexisting_conditions == context.conditions.conditional
359
+ for x in context.stack
360
+ }
361
+ logger.info(
362
+ f"Condition {context.conditions} not required, parents included filtering! {parent_map }"
363
+ )
364
+ if len(context.stack) == 1:
365
+ output: StrategyNode = context.stack[0]
366
+ if non_virtual_different:
367
+ logger.info(
368
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Found different non-virtual output concepts ({non_virtual_difference_values}), removing condition injected values"
369
+ )
370
+ output.set_output_concepts(
371
+ [x for x in output.output_concepts if x.address in non_virtual_output],
372
+ rebuild=False,
373
+ )
374
+
375
+ logger.info(
376
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Source stack has single node, returning that {type(output)}"
377
+ )
378
+ else:
379
+ logger.info(
380
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} wrapping multiple parent nodes {[type(x) for x in context.stack]} in merge node"
381
+ )
382
+ output = MergeNode(
383
+ input_concepts=non_virtual,
384
+ output_concepts=non_virtual,
385
+ environment=context.environment,
386
+ parents=context.stack,
387
+ depth=context.depth,
388
+ )
389
+
390
+ # ensure we can resolve our final merge
391
+ output.resolve()
392
+ if condition_required and context.conditions:
393
+ output.add_condition(context.conditions.conditional)
394
+ if context.conditions.existence_arguments:
395
+ append_existence_check(
396
+ output,
397
+ context.environment,
398
+ context.g,
399
+ where=context.conditions,
400
+ history=context.history,
401
+ )
402
+ elif context.conditions:
403
+ output.preexisting_conditions = context.conditions.conditional
404
+ logger.info(
405
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Graph is connected, returning {type(output)} node partial {[c.address for c in output.partial_concepts]} with {context.conditions}"
406
+ )
407
+ if condition_required and context.conditions and non_virtual_different:
408
+ logger.info(
409
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Conditions {context.conditions} were injected, checking if we need a group to restore grain"
410
+ )
411
+ result = GroupNode.check_if_required(
412
+ downstream_concepts=context.original_mandatory,
413
+ parents=[output.resolve()],
414
+ environment=context.environment,
415
+ depth=context.depth,
416
+ )
417
+ if result.required:
418
+ logger.info(
419
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Adding group node"
420
+ )
421
+ return GroupNode(
422
+ output_concepts=context.original_mandatory,
423
+ input_concepts=context.original_mandatory,
424
+ environment=context.environment,
425
+ parents=[output],
426
+ partial_concepts=output.partial_concepts,
427
+ preexisting_conditions=context.conditions.conditional,
428
+ depth=context.depth,
429
+ )
430
+ return output
431
+
432
+
433
+ def _search_concepts(
434
+ mandatory_list: List[BuildConcept],
435
+ environment: BuildEnvironment,
436
+ depth: int,
437
+ g: ReferenceGraph,
438
+ history: History,
439
+ accept_partial: bool = False,
440
+ conditions: BuildWhereClause | None = None,
441
+ ) -> StrategyNode | None:
442
+
443
+ context = initialize_loop_context(
444
+ mandatory_list=mandatory_list,
445
+ environment=environment,
446
+ depth=depth,
447
+ g=g,
448
+ history=history,
449
+ accept_partial=accept_partial,
450
+ conditions=conditions,
451
+ )
452
+
453
+ while context.incomplete:
454
+ priority_concept = get_priority_concept(
455
+ context.mandatory_list,
456
+ context.attempted,
457
+ found_concepts=context.found,
458
+ depth=depth,
459
+ )
460
+
461
+ local_conditions = evaluate_loop_conditions(context, priority_concept)
462
+ logger.info(
463
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} priority concept is {str(priority_concept)} derivation {priority_concept.derivation} granularity {priority_concept.granularity} with conditions {local_conditions}"
464
+ )
465
+
466
+ candidates = [
467
+ c for c in context.mandatory_list if c.address != priority_concept.address
468
+ ]
469
+ candidate_list = generate_candidates_restrictive(
470
+ priority_concept,
471
+ candidates,
472
+ context.skip,
473
+ depth=depth,
474
+ conditions=context.conditions,
475
+ )
476
+
477
+ logger.info(
478
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Beginning sourcing loop for {priority_concept.address}, accept_partial {accept_partial}, optional {[v.address for v in candidate_list]}, exhausted {[c for c in context.skip]}"
479
+ )
480
+ node = generate_node(
481
+ priority_concept,
482
+ candidate_list,
483
+ environment,
484
+ g,
485
+ depth,
486
+ source_concepts=search_concepts,
487
+ accept_partial=accept_partial,
488
+ history=history,
489
+ conditions=local_conditions,
490
+ )
491
+ if node:
492
+ context.stack.append(node)
493
+ node.resolve()
494
+ # these concepts should not be attempted to be sourced again
495
+ # as fetching them requires operating on a subset of concepts
496
+ if priority_concept.derivation in SKIPPED_DERIVATIONS:
497
+ context.skip.add(priority_concept.address)
498
+ context.attempted.add(priority_concept.address)
499
+ complete, found_c, missing_c, partial, virtual = validate_stack(
500
+ environment,
501
+ context.stack,
502
+ context.mandatory_list,
503
+ context.completion_mandatory,
504
+ conditions=context.conditions,
505
+ accept_partial=accept_partial,
506
+ )
507
+ # assig
508
+ context.found = found_c
509
+ early_exit = check_for_early_exit(complete, partial, context, priority_concept)
510
+ if early_exit:
511
+ break
512
+
513
+ logger.info(
514
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} finished sourcing loop (complete: {complete}), have {context.found} from {[n for n in context.stack]} (missing {context.all_mandatory - context.found}), attempted {context.attempted}, virtual {virtual}"
515
+ )
516
+ if complete == ValidationResult.COMPLETE:
517
+ return generate_loop_completion(context, virtual)
518
+
519
+ # if we can't find it after expanding to a merge, then
520
+ # accept partials in join paths
521
+ if not accept_partial:
522
+ logger.info(
523
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Stack is not connected graph, flag for accepting partial addresses is {accept_partial}, changing flag"
524
+ )
525
+ partial_search = search_concepts(
526
+ # use the original mandatory list
527
+ mandatory_list=mandatory_list,
528
+ environment=environment,
529
+ depth=depth,
530
+ g=g,
531
+ accept_partial=True,
532
+ history=history,
533
+ conditions=conditions,
534
+ )
535
+ if partial_search:
536
+ logger.info(
537
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Found {[c.address for c in mandatory_list]} by accepting partials"
538
+ )
539
+ return partial_search
540
+ logger.error(
541
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Could not resolve concepts {[c.address for c in mandatory_list]}, network outcome was {complete}, missing {context.all_mandatory - context.found},"
542
+ )
543
+
544
+ return None
545
+
546
+
547
+ def source_query_concepts(
548
+ output_concepts: List[BuildConcept],
549
+ history: History,
550
+ environment: BuildEnvironment,
551
+ g: Optional[ReferenceGraph] = None,
552
+ conditions: Optional[BuildWhereClause] = None,
553
+ ):
554
+ if not output_concepts:
555
+ raise ValueError(f"No output concepts provided {output_concepts}")
556
+ if not g:
557
+ g = generate_graph(environment)
558
+
559
+ root = search_concepts(
560
+ mandatory_list=output_concepts,
561
+ environment=environment,
562
+ g=g,
563
+ depth=0,
564
+ history=history,
565
+ conditions=conditions,
566
+ )
567
+ if not root:
568
+ error_strings = [
569
+ f"{c.address}<{c.purpose}>{c.derivation}>" for c in output_concepts
570
+ ]
571
+ raise UnresolvableQueryException(
572
+ f"Could not resolve connections for query with output {error_strings} from current model."
573
+ )
574
+ final = [x for x in root.output_concepts if x.address not in root.hidden_concepts]
575
+ logger.info(
576
+ f"{depth_to_prefix(0)}{LOGGER_PREFIX} final concepts are {[x.address for x in final]}"
577
+ )
578
+ if GroupNode.check_if_required(
579
+ downstream_concepts=final,
580
+ parents=[root.resolve()],
581
+ environment=environment,
582
+ ).required:
583
+ candidate: StrategyNode = GroupNode(
584
+ output_concepts=final,
585
+ input_concepts=final,
586
+ environment=environment,
587
+ parents=[root],
588
+ partial_concepts=root.partial_concepts,
589
+ )
590
+ else:
591
+ candidate = root
592
+ return candidate