pytrilogy 0.0.3.61__tar.gz → 0.0.3.64__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

Files changed (150) hide show
  1. {pytrilogy-0.0.3.61/pytrilogy.egg-info → pytrilogy-0.0.3.64}/PKG-INFO +1 -1
  2. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64/pytrilogy.egg-info}/PKG-INFO +1 -1
  3. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_select.py +1 -0
  4. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/__init__.py +1 -1
  5. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/models/build.py +6 -1
  6. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/concept_strategies_v3.py +31 -15
  7. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/discovery_node_factory.py +2 -3
  8. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/basic_node.py +7 -1
  9. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/node_merge_node.py +0 -1
  10. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/select_merge_node.py +52 -1
  11. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/synonym_node.py +34 -3
  12. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/nodes/__init__.py +11 -29
  13. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/statements/author.py +1 -1
  14. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/dialect/base.py +2 -0
  15. pytrilogy-0.0.3.64/trilogy/hooks/graph_hook.py +140 -0
  16. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/parsing/common.py +2 -2
  17. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/parsing/render.py +5 -1
  18. pytrilogy-0.0.3.61/trilogy/hooks/graph_hook.py +0 -86
  19. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/LICENSE.md +0 -0
  20. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/README.md +0 -0
  21. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/pyproject.toml +0 -0
  22. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/pytrilogy.egg-info/SOURCES.txt +0 -0
  23. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/pytrilogy.egg-info/dependency_links.txt +0 -0
  24. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/pytrilogy.egg-info/entry_points.txt +0 -0
  25. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/pytrilogy.egg-info/requires.txt +0 -0
  26. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/pytrilogy.egg-info/top_level.txt +0 -0
  27. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/setup.cfg +0 -0
  28. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/setup.py +0 -0
  29. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_datatypes.py +0 -0
  30. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_declarations.py +0 -0
  31. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_derived_concepts.py +0 -0
  32. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_discovery_nodes.py +0 -0
  33. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_enums.py +0 -0
  34. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_environment.py +0 -0
  35. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_executor.py +0 -0
  36. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_failure.py +0 -0
  37. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_functions.py +0 -0
  38. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_imports.py +0 -0
  39. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_metadata.py +0 -0
  40. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_models.py +0 -0
  41. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_multi_join_assignments.py +0 -0
  42. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_parse_engine.py +0 -0
  43. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_parsing.py +0 -0
  44. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_parsing_failures.py +0 -0
  45. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_partial_handling.py +0 -0
  46. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_query_processing.py +0 -0
  47. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_query_render.py +0 -0
  48. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_show.py +0 -0
  49. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_statements.py +0 -0
  50. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_typing.py +0 -0
  51. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_undefined_concept.py +0 -0
  52. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_user_functions.py +0 -0
  53. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/tests/test_where_clause.py +0 -0
  54. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/authoring/__init__.py +0 -0
  55. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/compiler.py +0 -0
  56. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/constants.py +0 -0
  57. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/__init__.py +0 -0
  58. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/constants.py +0 -0
  59. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/enums.py +0 -0
  60. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/env_processor.py +0 -0
  61. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/environment_helpers.py +0 -0
  62. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/ergonomics.py +0 -0
  63. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/exceptions.py +0 -0
  64. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/functions.py +0 -0
  65. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/graph_models.py +0 -0
  66. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/internal.py +0 -0
  67. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/models/__init__.py +0 -0
  68. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/models/author.py +0 -0
  69. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/models/build_environment.py +0 -0
  70. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/models/core.py +0 -0
  71. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/models/datasource.py +0 -0
  72. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/models/environment.py +0 -0
  73. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/models/execute.py +0 -0
  74. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/optimization.py +0 -0
  75. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/optimizations/__init__.py +0 -0
  76. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/optimizations/base_optimization.py +0 -0
  77. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/optimizations/inline_datasource.py +0 -0
  78. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/optimizations/predicate_pushdown.py +0 -0
  79. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/__init__.py +0 -0
  80. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/discovery_loop.py +0 -0
  81. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/discovery_utility.py +0 -0
  82. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/discovery_validation.py +0 -0
  83. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/graph_utils.py +0 -0
  84. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/__init__.py +0 -0
  85. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/common.py +0 -0
  86. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/filter_node.py +0 -0
  87. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/group_node.py +0 -0
  88. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/group_to_node.py +0 -0
  89. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/multiselect_node.py +0 -0
  90. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/recursive_node.py +0 -0
  91. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/rowset_node.py +0 -0
  92. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
  93. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +0 -0
  94. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/select_node.py +0 -0
  95. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/union_node.py +0 -0
  96. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/unnest_node.py +0 -0
  97. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/node_generators/window_node.py +0 -0
  98. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/nodes/base_node.py +0 -0
  99. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/nodes/filter_node.py +0 -0
  100. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/nodes/group_node.py +0 -0
  101. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/nodes/merge_node.py +0 -0
  102. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/nodes/recursive_node.py +0 -0
  103. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/nodes/select_node_v2.py +0 -0
  104. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/nodes/union_node.py +0 -0
  105. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/nodes/unnest_node.py +0 -0
  106. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/nodes/window_node.py +0 -0
  107. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/processing/utility.py +0 -0
  108. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/query_processor.py +0 -0
  109. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/statements/__init__.py +0 -0
  110. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/statements/build.py +0 -0
  111. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/statements/common.py +0 -0
  112. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/statements/execute.py +0 -0
  113. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/core/utility.py +0 -0
  114. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/dialect/__init__.py +0 -0
  115. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/dialect/bigquery.py +0 -0
  116. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/dialect/common.py +0 -0
  117. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/dialect/config.py +0 -0
  118. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/dialect/dataframe.py +0 -0
  119. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/dialect/duckdb.py +0 -0
  120. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/dialect/enums.py +0 -0
  121. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/dialect/postgres.py +0 -0
  122. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/dialect/presto.py +0 -0
  123. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/dialect/snowflake.py +0 -0
  124. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/dialect/sql_server.py +0 -0
  125. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/engine.py +0 -0
  126. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/executor.py +0 -0
  127. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/hooks/__init__.py +0 -0
  128. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/hooks/base_hook.py +0 -0
  129. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/hooks/query_debugger.py +0 -0
  130. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/metadata/__init__.py +0 -0
  131. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/parser.py +0 -0
  132. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/parsing/__init__.py +0 -0
  133. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/parsing/config.py +0 -0
  134. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/parsing/exceptions.py +0 -0
  135. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/parsing/helpers.py +0 -0
  136. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/parsing/parse_engine.py +0 -0
  137. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/parsing/trilogy.lark +0 -0
  138. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/py.typed +0 -0
  139. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/render.py +0 -0
  140. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/scripts/__init__.py +0 -0
  141. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/scripts/trilogy.py +0 -0
  142. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/std/__init__.py +0 -0
  143. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/std/date.preql +0 -0
  144. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/std/display.preql +0 -0
  145. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/std/geography.preql +0 -0
  146. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/std/money.preql +0 -0
  147. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/std/net.preql +0 -0
  148. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/std/ranking.preql +0 -0
  149. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/std/report.preql +0 -0
  150. {pytrilogy-0.0.3.61 → pytrilogy-0.0.3.64}/trilogy/utility.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.61
3
+ Version: 0.0.3.64
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.61
3
+ Version: 0.0.3.64
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -122,6 +122,7 @@ def test_double_aggregate():
122
122
 
123
123
 
124
124
  def test_modifiers():
125
+
125
126
  q1 = """
126
127
  const a <- 1;
127
128
  const b <- 2;
@@ -4,6 +4,6 @@ from trilogy.dialect.enums import Dialects
4
4
  from trilogy.executor import Executor
5
5
  from trilogy.parser import parse
6
6
 
7
- __version__ = "0.0.3.61"
7
+ __version__ = "0.0.3.64"
8
8
 
9
9
  __all__ = ["parse", "Executor", "Dialects", "Environment", "CONFIG"]
@@ -1586,7 +1586,10 @@ class Factory:
1586
1586
 
1587
1587
  return BuildFunction.model_construct(
1588
1588
  operator=base.operator,
1589
- arguments=[rval, *[self.build(c) for c in raw_args[1:]]],
1589
+ arguments=[
1590
+ rval,
1591
+ *[self.handle_constant(self.build(c)) for c in raw_args[1:]],
1592
+ ],
1590
1593
  output_datatype=base.output_datatype,
1591
1594
  output_purpose=base.output_purpose,
1592
1595
  valid_inputs=base.valid_inputs,
@@ -2042,4 +2045,6 @@ class Factory:
2042
2045
  and base.lineage.operator == FunctionType.CONSTANT
2043
2046
  ):
2044
2047
  return BuildParamaterizedConceptReference(concept=base)
2048
+ elif isinstance(base, ConceptRef):
2049
+ return self.handle_constant(self.build(base))
2045
2050
  return base
@@ -54,11 +54,7 @@ def generate_candidates_restrictive(
54
54
  exhausted: set[str],
55
55
  depth: int,
56
56
  conditions: BuildWhereClause | None = None,
57
- ) -> List[BuildConcept]:
58
- # if it's single row, joins are irrelevant. Fetch without keys.
59
- if priority_concept.granularity == Granularity.SINGLE_ROW:
60
- return []
61
-
57
+ ) -> tuple[list[BuildConcept], BuildWhereClause | None]:
62
58
  local_candidates = [
63
59
  x
64
60
  for x in list(candidates)
@@ -71,8 +67,16 @@ def generate_candidates_restrictive(
71
67
  logger.info(
72
68
  f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Injecting additional conditional row arguments as all remaining concepts are roots or constant"
73
69
  )
74
- return unique(list(conditions.row_arguments) + local_candidates, "address")
75
- return local_candidates
70
+ # otherwise, we can ignore the conditions now that we've injected inputs
71
+ return (
72
+ unique(list(conditions.row_arguments) + local_candidates, "address"),
73
+ None,
74
+ )
75
+ # if it's single row, joins are irrelevant. Fetch without keys.
76
+ if priority_concept.granularity == Granularity.SINGLE_ROW:
77
+ return [], conditions
78
+
79
+ return local_candidates, conditions
76
80
 
77
81
 
78
82
  def append_existence_check(
@@ -104,9 +108,7 @@ def append_existence_check(
104
108
  )
105
109
  assert parent, "Could not resolve existence clause"
106
110
  node.add_parents([parent])
107
- logger.info(
108
- f"{LOGGER_PREFIX} fetching existence clause inputs {[str(c) for c in subselect]}"
109
- )
111
+ logger.info(f"{LOGGER_PREFIX} found {[str(c) for c in subselect]}")
110
112
  node.add_existence_concepts([*subselect])
111
113
 
112
114
 
@@ -440,7 +442,19 @@ def _search_concepts(
440
442
  accept_partial: bool = False,
441
443
  conditions: BuildWhereClause | None = None,
442
444
  ) -> StrategyNode | None:
445
+ # check for direct materialization first
446
+ candidate = history.gen_select_node(
447
+ mandatory_list,
448
+ environment,
449
+ g,
450
+ depth + 1,
451
+ fail_if_not_found=False,
452
+ accept_partial=accept_partial,
453
+ conditions=conditions,
454
+ )
443
455
 
456
+ if candidate:
457
+ return candidate
444
458
  context = initialize_loop_context(
445
459
  mandatory_list=mandatory_list,
446
460
  environment=environment,
@@ -460,19 +474,21 @@ def _search_concepts(
460
474
  )
461
475
 
462
476
  local_conditions = evaluate_loop_conditions(context, priority_concept)
463
- logger.info(
464
- f"{depth_to_prefix(depth)}{LOGGER_PREFIX} priority concept is {str(priority_concept)} derivation {priority_concept.derivation} granularity {priority_concept.granularity} with conditions {local_conditions}"
465
- )
466
477
 
467
478
  candidates = [
468
479
  c for c in context.mandatory_list if c.address != priority_concept.address
469
480
  ]
470
- candidate_list = generate_candidates_restrictive(
481
+ # the local conditions list may be override if we end up injecting conditions
482
+ candidate_list, local_conditions = generate_candidates_restrictive(
471
483
  priority_concept,
472
484
  candidates,
473
485
  context.skip,
474
486
  depth=depth,
475
- conditions=context.conditions,
487
+ conditions=local_conditions,
488
+ )
489
+
490
+ logger.info(
491
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} priority concept is {str(priority_concept)} derivation {priority_concept.derivation} granularity {priority_concept.granularity} with conditions {local_conditions}"
476
492
  )
477
493
 
478
494
  logger.info(
@@ -438,15 +438,14 @@ def generate_node(
438
438
  )
439
439
 
440
440
  # Try materialized concept first
441
+ # this is worth checking every loop iteration
441
442
  candidate = history.gen_select_node(
442
- concept,
443
- local_optional,
443
+ [concept] + local_optional,
444
444
  environment,
445
445
  g,
446
446
  depth + 1,
447
447
  fail_if_not_found=False,
448
448
  accept_partial=accept_partial,
449
- accept_partial_optional=False,
450
449
  conditions=conditions,
451
450
  )
452
451
 
@@ -52,6 +52,9 @@ def gen_basic_node(
52
52
  synonyms: list[BuildConcept] = []
53
53
  ignored_optional: set[str] = set()
54
54
  assert isinstance(concept.lineage, BuildFunction)
55
+ # when we are getting an attribute, if there is anything else
56
+ # that is an attribute of the same struct in local optional
57
+ # select that value for discovery as well
55
58
  if concept.lineage.operator == FunctionType.ATTR_ACCESS:
56
59
  logger.info(
57
60
  f"{depth_prefix}{LOGGER_PREFIX} checking for synonyms for attribute access"
@@ -62,7 +65,10 @@ def gen_basic_node(
62
65
  # gate to ensure we don't match to multiple synonyms
63
66
  if found:
64
67
  continue
65
- s_concept = environment.alias_origin_lookup[z]
68
+ if z in environment.concepts:
69
+ s_concept = environment.concepts[z]
70
+ else:
71
+ s_concept = environment.alias_origin_lookup[z]
66
72
  if is_equivalent_basic_function_lineage(concept, s_concept):
67
73
  found = True
68
74
  synonyms.append(s_concept)
@@ -239,7 +239,6 @@ def resolve_weak_components(
239
239
  if "__preql_internal" not in c.address
240
240
  ]
241
241
  )
242
- logger.debug(f"Resolving weak components for {node_list} in {search_graph.nodes}")
243
242
  synonyms: set[str] = set()
244
243
  for x in all_concepts:
245
244
  synonyms = synonyms.union(x.pseudonyms)
@@ -68,6 +68,8 @@ def get_graph_exact_match(
68
68
  if node in datasources:
69
69
  ds = datasources[node]
70
70
  if not isinstance(ds, list):
71
+ if not ds.non_partial_for:
72
+ continue
71
73
  if ds.non_partial_for and conditions == ds.non_partial_for:
72
74
  exact.add(node)
73
75
  continue
@@ -95,6 +97,31 @@ def get_graph_grains(g: nx.DiGraph) -> dict[str, list[str]]:
95
97
  return grain_length
96
98
 
97
99
 
100
+ def subgraph_is_complete(
101
+ nodes: list[str], targets: set[str], mapping: dict[str, str]
102
+ ) -> bool:
103
+ mapped = set([mapping.get(n, n) for n in nodes])
104
+ return all([t in mapped for t in targets])
105
+
106
+
107
+ def prune_sources_for_conditions(
108
+ g: nx.DiGraph,
109
+ depth: int,
110
+ conditions: BuildWhereClause | None,
111
+ ):
112
+
113
+ complete = get_graph_exact_match(g, conditions)
114
+ to_remove = []
115
+ for node in g.nodes:
116
+ if node.startswith("ds~") and node not in complete:
117
+ to_remove.append(node)
118
+ logger.debug(
119
+ f"{padding(depth)}{LOGGER_PREFIX} removing datasource {node} as it is not a match for conditions {conditions}"
120
+ )
121
+ for node in to_remove:
122
+ g.remove_node(node)
123
+
124
+
98
125
  def create_pruned_concept_graph(
99
126
  g: nx.DiGraph,
100
127
  all_concepts: List[BuildConcept],
@@ -104,7 +131,10 @@ def create_pruned_concept_graph(
104
131
  depth: int = 0,
105
132
  ) -> nx.DiGraph:
106
133
  orig_g = g
134
+
107
135
  g = g.copy()
136
+ if conditions:
137
+ prune_sources_for_conditions(g, depth, conditions)
108
138
  union_options = get_union_sources(datasources, all_concepts)
109
139
  for ds_list in union_options:
110
140
  node_address = "ds~" + "-".join([x.name for x in ds_list])
@@ -183,6 +213,13 @@ def create_pruned_concept_graph(
183
213
  )
184
214
 
185
215
  subgraphs = list(nx.connected_components(g.to_undirected()))
216
+
217
+ subgraphs = [
218
+ s
219
+ for s in subgraphs
220
+ if subgraph_is_complete(s, target_addresses, relevant_concepts_pre)
221
+ ]
222
+
186
223
  if not subgraphs:
187
224
  logger.info(
188
225
  f"{padding(depth)}{LOGGER_PREFIX} cannot resolve root graph - no subgraphs after node prune"
@@ -486,6 +523,20 @@ def gen_select_merge_node(
486
523
  non_constant = [c for c in all_concepts if c.derivation != Derivation.CONSTANT]
487
524
  constants = [c for c in all_concepts if c.derivation == Derivation.CONSTANT]
488
525
  if not non_constant and constants:
526
+ logger.info(
527
+ f"{padding(depth)}{LOGGER_PREFIX} only constant inputs to discovery, returning constant node directly"
528
+ )
529
+ if conditions:
530
+ if not all(
531
+ [x.derivation == Derivation.CONSTANT for x in conditions.row_arguments]
532
+ ):
533
+ logger.info(
534
+ f"{padding(depth)}{LOGGER_PREFIX} conditions being passed in to constant node {conditions}, but not all concepts are constants."
535
+ )
536
+ return None
537
+ else:
538
+ constants += conditions.row_arguments
539
+
489
540
  return ConstantNode(
490
541
  output_concepts=constants,
491
542
  input_concepts=[],
@@ -494,7 +545,7 @@ def gen_select_merge_node(
494
545
  depth=depth,
495
546
  partial_concepts=[],
496
547
  force_group=False,
497
- preexisting_conditions=conditions.conditional if conditions else None,
548
+ conditions=conditions.conditional if conditions else None,
498
549
  )
499
550
  for attempt in [False, True]:
500
551
  pruned_concept_graph = create_pruned_concept_graph(
@@ -43,18 +43,49 @@ def gen_synonym_node(
43
43
  elif y in environment.concepts:
44
44
  synonyms[x.address].append(environment.concepts[y])
45
45
  synonym_count += 1
46
+ for address in synonyms:
47
+ synonyms[address].sort(key=lambda obj: obj.address)
46
48
  if synonym_count == 0:
47
49
  return None
48
50
 
49
51
  logger.info(f"{local_prefix} Generating Synonym Node with {len(synonyms)} synonyms")
52
+ sorted_keys = sorted(synonyms.keys())
53
+ combinations_list: list[tuple[BuildConcept, ...]] = list(
54
+ itertools.product(*(synonyms[obj] for obj in sorted_keys))
55
+ )
56
+
57
+ def similarity_sort_key(combo):
58
+ addresses = [x.address for x in combo]
59
+
60
+ # Calculate similarity score - count how many pairs share prefixes
61
+ similarity_score = 0
62
+ for i in range(len(addresses)):
63
+ for j in range(i + 1, len(addresses)):
64
+ # Find common prefix length
65
+ addr1_parts = addresses[i].split(".")
66
+ addr2_parts = addresses[j].split(".")
67
+ common_prefix_len = 0
68
+ for k in range(min(len(addr1_parts), len(addr2_parts))):
69
+ if addr1_parts[k] == addr2_parts[k]:
70
+ common_prefix_len += 1
71
+ else:
72
+ break
73
+ similarity_score += common_prefix_len
50
74
 
51
- combinations = itertools.product(*(synonyms[obj] for obj in synonyms.keys()))
52
- for combo in combinations:
75
+ # Sort by similarity (descending), then by addresses (ascending) for ties
76
+ return (-similarity_score, addresses)
77
+
78
+ combinations_list.sort(key=similarity_sort_key)
79
+ logger.info(combinations_list)
80
+ for combo in combinations_list:
53
81
  fingerprint = tuple([x.address for x in combo])
54
82
  if fingerprint == base_fingerprint:
55
83
  continue
84
+ logger.info(
85
+ f"{local_prefix} checking combination {fingerprint} with {len(combo)} concepts"
86
+ )
56
87
  attempt: StrategyNode | None = source_concepts(
57
- combo,
88
+ list(combo),
58
89
  history=history,
59
90
  environment=environment,
60
91
  depth=depth,
@@ -124,51 +124,31 @@ class History(BaseModel):
124
124
  in self.started
125
125
  )
126
126
 
127
- def _select_concepts_to_lookup(
128
- self,
129
- main: BuildConcept,
130
- search: list[BuildConcept],
131
- accept_partial: bool,
132
- fail_if_not_found: bool,
133
- accept_partial_optional: bool,
134
- conditions: BuildWhereClause | None = None,
135
- ) -> str:
136
- return (
137
- str(main.address)
138
- + "|"
139
- + "-".join([c.address for c in search])
140
- + str(accept_partial)
141
- + str(fail_if_not_found)
142
- + str(accept_partial_optional)
143
- + str(conditions)
144
- )
145
-
146
127
  def gen_select_node(
147
128
  self,
148
- concept: BuildConcept,
149
- local_optional: list[BuildConcept],
129
+ concepts: list[BuildConcept],
150
130
  environment: BuildEnvironment,
151
131
  g,
152
132
  depth: int,
153
133
  fail_if_not_found: bool = False,
154
134
  accept_partial: bool = False,
155
- accept_partial_optional: bool = False,
156
135
  conditions: BuildWhereClause | None = None,
157
136
  ) -> StrategyNode | None:
158
137
  from trilogy.core.processing.node_generators.select_node import gen_select_node
159
138
 
160
- fingerprint = self._select_concepts_to_lookup(
161
- concept,
162
- local_optional,
139
+ fingerprint = self._concepts_to_lookup(
140
+ concepts,
163
141
  accept_partial,
164
- fail_if_not_found,
165
- accept_partial_optional=accept_partial_optional,
166
142
  conditions=conditions,
167
143
  )
168
144
  if fingerprint in self.select_history:
169
- return self.select_history[fingerprint]
145
+ rval = self.select_history[fingerprint]
146
+ if rval:
147
+ # all nodes must be copied before returning
148
+ return rval.copy()
149
+ return rval
170
150
  gen = gen_select_node(
171
- [concept] + local_optional,
151
+ concepts,
172
152
  environment,
173
153
  g,
174
154
  depth + 1,
@@ -177,6 +157,8 @@ class History(BaseModel):
177
157
  conditions=conditions,
178
158
  )
179
159
  self.select_history[fingerprint] = gen
160
+ if gen:
161
+ return gen.copy()
180
162
  return gen
181
163
 
182
164
 
@@ -448,7 +448,7 @@ class TypeDeclaration(BaseModel):
448
448
  type: CustomType
449
449
 
450
450
 
451
- class FunctionDeclaration(BaseModel):
451
+ class FunctionDeclaration(HasUUID, BaseModel):
452
452
  name: str
453
453
  args: list[ArgBinding]
454
454
  expr: Expr
@@ -58,6 +58,7 @@ from trilogy.core.query_processor import process_copy, process_persist, process_
58
58
  from trilogy.core.statements.author import (
59
59
  ConceptDeclarationStatement,
60
60
  CopyStatement,
61
+ FunctionDeclaration,
61
62
  ImportStatement,
62
63
  MergeStatementV2,
63
64
  MultiSelectStatement,
@@ -980,6 +981,7 @@ class BaseDialect:
980
981
  ImportStatement,
981
982
  RowsetDerivationStatement,
982
983
  Datasource,
984
+ FunctionDeclaration,
983
985
  ),
984
986
  ):
985
987
  continue
@@ -0,0 +1,140 @@
1
+ import sys
2
+ from os import environ
3
+
4
+ import networkx as nx
5
+
6
+ from trilogy.hooks.base_hook import BaseHook
7
+
8
+ if not environ.get("TCL_LIBRARY"):
9
+ minor = sys.version_info.minor
10
+ if minor == 13:
11
+ environ["TCL_LIBRARY"] = r"C:\Program Files\Python313\tcl\tcl8.6"
12
+ elif minor == 12:
13
+ environ["TCL_LIBRARY"] = r"C:\Program Files\Python312\tcl\tcl8.6"
14
+ else:
15
+ pass
16
+
17
+
18
+ class GraphHook(BaseHook):
19
+ def __init__(self):
20
+ super().__init__()
21
+ try:
22
+ pass
23
+ except ImportError:
24
+ raise ImportError("GraphHook requires matplotlib and scipy to be installed")
25
+ # https://github.com/python/cpython/issues/125235#issuecomment-2412948604
26
+
27
+ def query_graph_built(
28
+ self,
29
+ graph: nx.DiGraph,
30
+ target: str | None = None,
31
+ highlight_nodes: list[str] | None = None,
32
+ remove_isolates: bool = True,
33
+ ):
34
+ from matplotlib import pyplot as plt
35
+
36
+ graph = graph.copy()
37
+ nodes = [*graph.nodes]
38
+ for node in nodes:
39
+ if "__preql_internal" in node:
40
+ graph.remove_node(node)
41
+
42
+ if remove_isolates:
43
+ graph.remove_nodes_from(list(nx.isolates(graph)))
44
+
45
+ color_map = []
46
+ highlight_nodes = highlight_nodes or []
47
+ for node in graph:
48
+ if node in highlight_nodes:
49
+ color_map.append("orange")
50
+ elif str(node).startswith("ds"):
51
+ color_map.append("blue")
52
+ else:
53
+ color_map.append("green")
54
+
55
+ pos = nx.spring_layout(graph)
56
+ kwargs = {}
57
+
58
+ if target:
59
+ edge_colors = []
60
+ descendents = nx.descendants(graph, target)
61
+ for edge in graph.edges():
62
+ if edge[0] == target:
63
+ edge_colors.append("blue")
64
+ elif edge[1] == target:
65
+ edge_colors.append("blue")
66
+ elif edge[1] in descendents:
67
+ edge_colors.append("green")
68
+ else:
69
+ edge_colors.append("black")
70
+ kwargs["edge_color"] = edge_colors
71
+
72
+ # Draw the graph without labels first
73
+ nx.draw(
74
+ graph,
75
+ pos=pos,
76
+ node_color=color_map,
77
+ connectionstyle="arc3, rad = 0.1",
78
+ with_labels=False, # Important: don't draw labels with nx.draw
79
+ **kwargs
80
+ )
81
+
82
+ # Draw labels with manual spacing
83
+ self._draw_labels_with_manual_spacing(graph, pos)
84
+
85
+ plt.show()
86
+
87
+ def _draw_labels_with_manual_spacing(self, graph, pos):
88
+ """Fallback method for manual label spacing when adjustText is not available"""
89
+ import numpy as np
90
+
91
+ pos_labels = {}
92
+ node_positions = list(pos.values())
93
+
94
+ # Calculate average distance between nodes to determine spacing
95
+ if len(node_positions) > 1:
96
+ distances = []
97
+ for i, (x1, y1) in enumerate(node_positions):
98
+ for j, (x2, y2) in enumerate(node_positions[i + 1 :], i + 1):
99
+ dist = np.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2)
100
+ distances.append(dist)
101
+
102
+ avg_distance = np.mean(distances)
103
+ min_spacing = max(
104
+ 0.1, avg_distance * 0.3
105
+ ) # Minimum spacing as fraction of average distance
106
+ else:
107
+ min_spacing = 0.1
108
+
109
+ # Simple spacing algorithm - offset labels that are too close
110
+ for i, node in enumerate(graph.nodes()):
111
+ x, y = pos[node]
112
+
113
+ # Check for nearby labels and adjust position
114
+ adjusted_x, adjusted_y = x, y
115
+ for j, other_node in enumerate(
116
+ list(graph.nodes())[:i]
117
+ ): # Only check previous nodes
118
+ other_x, other_y = pos_labels.get(other_node, pos[other_node])
119
+ distance = np.sqrt(
120
+ (adjusted_x - other_x) ** 2 + (adjusted_y - other_y) ** 2
121
+ )
122
+
123
+ if distance < min_spacing:
124
+ # Calculate offset direction
125
+ if distance > 0:
126
+ offset_x = (adjusted_x - other_x) / distance * min_spacing
127
+ offset_y = (adjusted_y - other_y) / distance * min_spacing
128
+ else:
129
+ # If nodes are at exact same position, use random offset
130
+ angle = np.random.random() * 2 * np.pi
131
+ offset_x = np.cos(angle) * min_spacing
132
+ offset_y = np.sin(angle) * min_spacing
133
+
134
+ adjusted_x = other_x + offset_x
135
+ adjusted_y = other_y + offset_y
136
+
137
+ pos_labels[node] = (adjusted_x, adjusted_y)
138
+
139
+ # Draw the labels at adjusted positions
140
+ nx.draw_networkx_labels(graph, pos=pos_labels, font_size=10)
@@ -86,7 +86,7 @@ def process_function_arg(
86
86
  if concept.metadata and meta:
87
87
  concept.metadata.line_number = meta.line
88
88
  environment.add_concept(concept, meta=meta)
89
- return concept
89
+ return concept.reference
90
90
  elif isinstance(
91
91
  arg,
92
92
  (ListWrapper, MapWrapper),
@@ -103,7 +103,7 @@ def process_function_arg(
103
103
  if concept.metadata and meta:
104
104
  concept.metadata.line_number = meta.line
105
105
  environment.add_concept(concept, meta=meta)
106
- return concept
106
+ return concept.reference
107
107
  elif isinstance(arg, Concept):
108
108
  return arg.reference
109
109
  elif isinstance(arg, ConceptRef):
@@ -506,7 +506,11 @@ class Renderer:
506
506
  return f"{args[0]} % {args[1]}"
507
507
  if arg.operator == FunctionType.PARENTHETICAL:
508
508
  return f"({args[0]})"
509
-
509
+ if arg.operator == FunctionType.GROUP:
510
+ arg_string = ", ".join(args[1:])
511
+ if len(args) == 1:
512
+ return f"group({args[0]})"
513
+ return f"group({args[0]}) by {arg_string}"
510
514
  inputs = ",".join(args)
511
515
 
512
516
  if arg.operator == FunctionType.CONSTANT: