pytrilogy 0.0.3.105__tar.gz → 0.0.3.107__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

Files changed (160) hide show
  1. {pytrilogy-0.0.3.105/pytrilogy.egg-info → pytrilogy-0.0.3.107}/PKG-INFO +1 -1
  2. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107/pytrilogy.egg-info}/PKG-INFO +1 -1
  3. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/__init__.py +1 -1
  4. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/concept_strategies_v3.py +12 -13
  5. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/discovery_utility.py +22 -3
  6. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/group_node.py +1 -0
  7. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/select_merge_node.py +66 -0
  8. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/window_node.py +7 -5
  9. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/nodes/group_node.py +7 -0
  10. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/utility.py +145 -58
  11. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/parsing/render.py +16 -1
  12. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/LICENSE.md +0 -0
  13. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/README.md +0 -0
  14. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/pyproject.toml +0 -0
  15. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/pytrilogy.egg-info/SOURCES.txt +0 -0
  16. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/pytrilogy.egg-info/dependency_links.txt +0 -0
  17. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/pytrilogy.egg-info/entry_points.txt +0 -0
  18. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/pytrilogy.egg-info/requires.txt +0 -0
  19. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/pytrilogy.egg-info/top_level.txt +0 -0
  20. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/setup.cfg +0 -0
  21. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/setup.py +0 -0
  22. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_datatypes.py +0 -0
  23. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_declarations.py +0 -0
  24. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_derived_concepts.py +0 -0
  25. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_discovery_nodes.py +0 -0
  26. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_enums.py +0 -0
  27. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_environment.py +0 -0
  28. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_execute_models.py +0 -0
  29. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_executor.py +0 -0
  30. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_failure.py +0 -0
  31. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_functions.py +0 -0
  32. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_imports.py +0 -0
  33. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_metadata.py +0 -0
  34. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_models.py +0 -0
  35. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_multi_join_assignments.py +0 -0
  36. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_parse_engine.py +0 -0
  37. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_parsing.py +0 -0
  38. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_parsing_failures.py +0 -0
  39. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_partial_handling.py +0 -0
  40. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_query_processing.py +0 -0
  41. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_query_render.py +0 -0
  42. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_select.py +0 -0
  43. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_show.py +0 -0
  44. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_statements.py +0 -0
  45. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_typing.py +0 -0
  46. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_undefined_concept.py +0 -0
  47. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_user_functions.py +0 -0
  48. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_validators.py +0 -0
  49. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/tests/test_where_clause.py +0 -0
  50. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/authoring/__init__.py +0 -0
  51. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/constants.py +0 -0
  52. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/__init__.py +0 -0
  53. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/constants.py +0 -0
  54. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/enums.py +0 -0
  55. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/env_processor.py +0 -0
  56. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/environment_helpers.py +0 -0
  57. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/ergonomics.py +0 -0
  58. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/exceptions.py +0 -0
  59. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/functions.py +0 -0
  60. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/graph_models.py +0 -0
  61. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/internal.py +0 -0
  62. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/models/__init__.py +0 -0
  63. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/models/author.py +0 -0
  64. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/models/build.py +0 -0
  65. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/models/build_environment.py +0 -0
  66. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/models/core.py +0 -0
  67. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/models/datasource.py +0 -0
  68. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/models/environment.py +0 -0
  69. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/models/execute.py +0 -0
  70. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/optimization.py +0 -0
  71. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/optimizations/__init__.py +0 -0
  72. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/optimizations/base_optimization.py +0 -0
  73. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/optimizations/hide_unused_concept.py +0 -0
  74. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/optimizations/inline_datasource.py +0 -0
  75. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/optimizations/predicate_pushdown.py +0 -0
  76. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/__init__.py +0 -0
  77. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/discovery_node_factory.py +0 -0
  78. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/discovery_validation.py +0 -0
  79. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/graph_utils.py +0 -0
  80. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/__init__.py +0 -0
  81. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/basic_node.py +0 -0
  82. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/common.py +0 -0
  83. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/constant_node.py +0 -0
  84. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/filter_node.py +0 -0
  85. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/group_to_node.py +0 -0
  86. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/multiselect_node.py +0 -0
  87. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/node_merge_node.py +0 -0
  88. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/recursive_node.py +0 -0
  89. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/rowset_node.py +0 -0
  90. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
  91. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +0 -0
  92. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/select_node.py +0 -0
  93. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/synonym_node.py +0 -0
  94. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/union_node.py +0 -0
  95. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/node_generators/unnest_node.py +0 -0
  96. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/nodes/__init__.py +0 -0
  97. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/nodes/base_node.py +0 -0
  98. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/nodes/filter_node.py +0 -0
  99. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/nodes/merge_node.py +0 -0
  100. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/nodes/recursive_node.py +0 -0
  101. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/nodes/select_node_v2.py +0 -0
  102. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/nodes/union_node.py +0 -0
  103. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/nodes/unnest_node.py +0 -0
  104. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/processing/nodes/window_node.py +0 -0
  105. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/query_processor.py +0 -0
  106. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/statements/__init__.py +0 -0
  107. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/statements/author.py +0 -0
  108. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/statements/build.py +0 -0
  109. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/statements/common.py +0 -0
  110. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/statements/execute.py +0 -0
  111. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/utility.py +0 -0
  112. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/validation/__init__.py +0 -0
  113. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/validation/common.py +0 -0
  114. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/validation/concept.py +0 -0
  115. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/validation/datasource.py +0 -0
  116. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/validation/environment.py +0 -0
  117. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/core/validation/fix.py +0 -0
  118. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/dialect/__init__.py +0 -0
  119. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/dialect/base.py +0 -0
  120. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/dialect/bigquery.py +0 -0
  121. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/dialect/common.py +0 -0
  122. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/dialect/config.py +0 -0
  123. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/dialect/dataframe.py +0 -0
  124. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/dialect/duckdb.py +0 -0
  125. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/dialect/enums.py +0 -0
  126. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/dialect/metadata.py +0 -0
  127. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/dialect/postgres.py +0 -0
  128. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/dialect/presto.py +0 -0
  129. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/dialect/snowflake.py +0 -0
  130. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/dialect/sql_server.py +0 -0
  131. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/engine.py +0 -0
  132. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/executor.py +0 -0
  133. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/hooks/__init__.py +0 -0
  134. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/hooks/base_hook.py +0 -0
  135. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/hooks/graph_hook.py +0 -0
  136. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/hooks/query_debugger.py +0 -0
  137. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/metadata/__init__.py +0 -0
  138. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/parser.py +0 -0
  139. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/parsing/__init__.py +0 -0
  140. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/parsing/common.py +0 -0
  141. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/parsing/config.py +0 -0
  142. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/parsing/exceptions.py +0 -0
  143. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/parsing/helpers.py +0 -0
  144. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/parsing/parse_engine.py +0 -0
  145. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/parsing/trilogy.lark +0 -0
  146. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/py.typed +0 -0
  147. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/render.py +0 -0
  148. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/scripts/__init__.py +0 -0
  149. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/scripts/trilogy.py +0 -0
  150. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/std/__init__.py +0 -0
  151. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/std/color.preql +0 -0
  152. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/std/date.preql +0 -0
  153. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/std/display.preql +0 -0
  154. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/std/geography.preql +0 -0
  155. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/std/metric.preql +0 -0
  156. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/std/money.preql +0 -0
  157. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/std/net.preql +0 -0
  158. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/std/ranking.preql +0 -0
  159. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/std/report.preql +0 -0
  160. {pytrilogy-0.0.3.105 → pytrilogy-0.0.3.107}/trilogy/utility.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.105
3
+ Version: 0.0.3.107
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.105
3
+ Version: 0.0.3.107
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -4,6 +4,6 @@ from trilogy.dialect.enums import Dialects
4
4
  from trilogy.executor import Executor
5
5
  from trilogy.parser import parse
6
6
 
7
- __version__ = "0.0.3.105"
7
+ __version__ = "0.0.3.107"
8
8
 
9
9
  __all__ = ["parse", "Executor", "Dialects", "Environment", "CONFIG"]
@@ -350,9 +350,6 @@ def check_for_early_exit(
350
350
  def generate_loop_completion(context: LoopContext, virtual: set[str]) -> StrategyNode:
351
351
  condition_required = True
352
352
  non_virtual = [c for c in context.completion_mandatory if c.address not in virtual]
353
- non_virtual_output = [
354
- c for c in context.original_mandatory if c.address not in virtual
355
- ]
356
353
  non_virtual_different = len(context.completion_mandatory) != len(
357
354
  context.original_mandatory
358
355
  )
@@ -380,11 +377,12 @@ def generate_loop_completion(context: LoopContext, virtual: set[str]) -> Strateg
380
377
  logger.info(
381
378
  f"Condition {context.conditions} not required, parents included filtering! {parent_map}"
382
379
  )
380
+
383
381
  if len(context.stack) == 1:
384
382
  output: StrategyNode = context.stack[0]
385
383
  if non_virtual_different:
386
384
  logger.info(
387
- f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Found different non-virtual output concepts ({non_virtual_difference_values}), removing condition injected values by setting outputs to {[x.address for x in output.output_concepts if x.address in non_virtual_output]}"
385
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Found added non-virtual output concepts ({non_virtual_difference_values})"
388
386
  )
389
387
  # output.set_output_concepts(
390
388
  # [
@@ -398,13 +396,6 @@ def generate_loop_completion(context: LoopContext, virtual: set[str]) -> Strateg
398
396
  # )
399
397
  # output.set_output_concepts(context.original_mandatory)
400
398
 
401
- # if isinstance(output, MergeNode):
402
- # output.force_group = True
403
- # output.rebuild_cache()
404
-
405
- logger.info(
406
- f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Source stack has single node, returning that {type(output)}"
407
- )
408
399
  else:
409
400
  logger.info(
410
401
  f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} wrapping multiple parent nodes {[type(x) for x in context.stack]} in merge node"
@@ -441,10 +432,18 @@ def generate_loop_completion(context: LoopContext, virtual: set[str]) -> Strateg
441
432
  f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Conditions {context.conditions} were injected, checking if we need a group to restore grain"
442
433
  )
443
434
  return group_if_required_v2(
444
- output, context.original_mandatory, context.environment
435
+ output,
436
+ context.original_mandatory,
437
+ context.environment,
438
+ non_virtual_difference_values,
445
439
  )
446
440
 
447
- return group_if_required_v2(output, context.original_mandatory, context.environment)
441
+ return group_if_required_v2(
442
+ output,
443
+ context.original_mandatory,
444
+ context.environment,
445
+ non_virtual_difference_values,
446
+ )
448
447
 
449
448
 
450
449
  def _search_concepts(
@@ -180,8 +180,12 @@ def check_if_group_required(
180
180
 
181
181
 
182
182
  def group_if_required_v2(
183
- root: StrategyNode, final: List[BuildConcept], environment: BuildEnvironment
183
+ root: StrategyNode,
184
+ final: List[BuildConcept],
185
+ environment: BuildEnvironment,
186
+ where_injected: set[str] | None = None,
184
187
  ):
188
+ where_injected = where_injected or set()
185
189
  required = check_if_group_required(
186
190
  downstream_concepts=final, parents=[root.resolve()], environment=environment
187
191
  )
@@ -197,8 +201,23 @@ def group_if_required_v2(
197
201
  root.rebuild_cache()
198
202
  return root
199
203
  elif isinstance(root, GroupNode):
200
- # root.set_output_concepts(final, rebuild=False)
201
- # root.rebuild_cache()
204
+
205
+ if set(x.address for x in final) != set(
206
+ x.address for x in root.output_concepts
207
+ ):
208
+ allowed_outputs = [
209
+ x
210
+ for x in root.output_concepts
211
+ if not (
212
+ x.address in where_injected
213
+ and x.address not in (root.required_outputs or set())
214
+ )
215
+ ]
216
+
217
+ logger.info(
218
+ f"Adjusting group node outputs to remove injected concepts {where_injected}: remaining {allowed_outputs}"
219
+ )
220
+ root.set_output_concepts(allowed_outputs)
202
221
  return root
203
222
  return GroupNode(
204
223
  output_concepts=targets,
@@ -176,6 +176,7 @@ def gen_group_node(
176
176
  parents=parents,
177
177
  depth=depth,
178
178
  preexisting_conditions=conditions.conditional if conditions else None,
179
+ required_outputs=parent_concepts,
179
180
  )
180
181
 
181
182
  # early exit if no optional
@@ -224,6 +224,72 @@ def create_pruned_concept_graph(
224
224
  return g
225
225
 
226
226
 
227
+ # def deduplicate_nodes(subgraph: nx.DiGraph, nodes: list[str], partial_map: dict[str, list[str]], depth: int) -> list[str]:
228
+ # """
229
+ # Remove duplicate datasource nodes that are connected to the same concepts
230
+ # and have the same partial state, keeping the one with the most unique concepts.
231
+
232
+ # Args:
233
+ # subgraph: NetworkX DiGraph containing the nodes and edges
234
+ # nodes: List of node names to deduplicate
235
+ # partial_map: Map of datasource to partial nodes
236
+
237
+ # Returns:
238
+ # List of deduplicated node names
239
+ # """
240
+ # # Filter for datasource nodes only
241
+ # ds_nodes = [node for node in nodes if node.startswith("ds~")]
242
+ # non_ds_nodes = [node for node in nodes if not node.startswith("ds~")]
243
+
244
+ # if len(ds_nodes) <= 1:
245
+ # return nodes # No deduplication needed
246
+
247
+ # # Build a map of each datasource to its connected concepts and partial state
248
+ # ds_info = {}
249
+
250
+ # for ds_node in ds_nodes:
251
+ # # Get connected concept nodes (nodes starting with "c~")
252
+ # connected_concepts = set()
253
+ # for neighbor in subgraph.neighbors(ds_node):
254
+ # if neighbor.startswith("c~"):
255
+ # connected_concepts.add(neighbor)
256
+
257
+ # # Get partial state for this datasource
258
+ # partial_state = tuple(sorted(partial_map.get(ds_node, [])))
259
+
260
+ # ds_info[ds_node] = {
261
+ # 'concepts': connected_concepts,
262
+ # 'partial_state': partial_state
263
+ # }
264
+
265
+ # # Find datasources to remove (those that are subsets of others)
266
+ # nodes_to_remove = set()
267
+ # logger.info('LOOK HERE')
268
+ # logger.info(ds_info)
269
+ # for ds_a, info_a in ds_info.items():
270
+ # for ds_b, info_b in ds_info.items():
271
+ # if ds_a != ds_b and ds_a not in nodes_to_remove:
272
+ # # Check if ds_a is a subset of ds_b (same partial state and concepts are subset)
273
+ # if (info_a['partial_state'] == info_b['partial_state'] and
274
+ # info_a['concepts'].issubset(info_b['concepts']) and
275
+ # len(info_a['concepts']) < len(info_b['concepts'])):
276
+ # # ds_a connects to fewer concepts than ds_b, so remove ds_a
277
+ # nodes_to_remove.add(ds_a)
278
+ # elif (info_a['partial_state'] == info_b['partial_state'] and
279
+ # info_a['concepts'] == info_b['concepts']):
280
+ # # Exact same concepts and partial state - keep one arbitrarily
281
+ # # (keep the lexicographically smaller one for consistency)
282
+ # if ds_a > ds_b:
283
+ # nodes_to_remove.add(ds_a)
284
+
285
+ # # Keep datasource nodes that weren't marked for removal
286
+ # logger.info(f"{padding(depth)}{LOGGER_PREFIX} Removing duplicate datasource nodes: {nodes_to_remove}")
287
+ # deduplicated_ds_nodes = [ds for ds in ds_nodes if ds not in nodes_to_remove]
288
+
289
+ # # Return deduplicated datasource nodes plus all non-datasource nodes
290
+ # return deduplicated_ds_nodes + non_ds_nodes
291
+
292
+
227
293
  def resolve_subgraphs(
228
294
  g: ReferenceGraph,
229
295
  relevant: list[BuildConcept],
@@ -27,7 +27,7 @@ WINDOW_TYPES = (BuildWindowItem,)
27
27
 
28
28
 
29
29
  def resolve_window_parent_concepts(
30
- concept: BuildConcept, environment: BuildEnvironment
30
+ concept: BuildConcept, environment: BuildEnvironment, depth: int
31
31
  ) -> tuple[BuildConcept, List[BuildConcept]]:
32
32
  if not isinstance(concept.lineage, WINDOW_TYPES):
33
33
  raise ValueError
@@ -39,7 +39,9 @@ def resolve_window_parent_concepts(
39
39
  base += item.concept_arguments
40
40
  if concept.grain:
41
41
  for gitem in concept.grain.components:
42
- logger.info(f"{LOGGER_PREFIX} appending grain item {gitem} to base")
42
+ logger.info(
43
+ f"{padding(depth)}{LOGGER_PREFIX} appending grain item {gitem} to base"
44
+ )
43
45
  base.append(environment.concepts[gitem])
44
46
  return concept.lineage.content, unique(base, "address")
45
47
 
@@ -54,7 +56,7 @@ def gen_window_node(
54
56
  history: History,
55
57
  conditions: BuildWhereClause | None = None,
56
58
  ) -> StrategyNode | None:
57
- base, parent_concepts = resolve_window_parent_concepts(concept, environment)
59
+ base, parent_concepts = resolve_window_parent_concepts(concept, environment, depth)
58
60
  logger.info(
59
61
  f"{padding(depth)}{LOGGER_PREFIX} generating window node for {concept} with parents {[x.address for x in parent_concepts]} and optional {local_optional}"
60
62
  )
@@ -62,7 +64,7 @@ def gen_window_node(
62
64
  x
63
65
  for x in local_optional
64
66
  if isinstance(x.lineage, WINDOW_TYPES)
65
- and resolve_window_parent_concepts(x, environment)[1] == parent_concepts
67
+ and resolve_window_parent_concepts(x, environment, depth)[1] == parent_concepts
66
68
  ]
67
69
 
68
70
  targets = [base]
@@ -79,7 +81,7 @@ def gen_window_node(
79
81
  if equivalent_optional:
80
82
  for x in equivalent_optional:
81
83
  assert isinstance(x.lineage, WINDOW_TYPES)
82
- base, parents = resolve_window_parent_concepts(x, environment)
84
+ base, parents = resolve_window_parent_concepts(x, environment, depth)
83
85
  logger.info(
84
86
  f"{padding(depth)}{LOGGER_PREFIX} found equivalent optional {x} with parents {parents}"
85
87
  )
@@ -49,6 +49,7 @@ class GroupNode(StrategyNode):
49
49
  existence_concepts: List[BuildConcept] | None = None,
50
50
  hidden_concepts: set[str] | None = None,
51
51
  ordering: BuildOrderBy | None = None,
52
+ required_outputs: List[BuildConcept] | None = None,
52
53
  ):
53
54
  super().__init__(
54
55
  input_concepts=input_concepts,
@@ -66,6 +67,9 @@ class GroupNode(StrategyNode):
66
67
  hidden_concepts=hidden_concepts,
67
68
  ordering=ordering,
68
69
  )
70
+ # the set of concepts required to preserve grain
71
+ # set by group by node generation with aggregates
72
+ self.required_outputs = required_outputs
69
73
 
70
74
  @classmethod
71
75
  def check_if_required(
@@ -184,4 +188,7 @@ class GroupNode(StrategyNode):
184
188
  existence_concepts=list(self.existence_concepts),
185
189
  hidden_concepts=set(self.hidden_concepts),
186
190
  ordering=self.ordering,
191
+ required_outputs=(
192
+ list(self.required_outputs) if self.required_outputs else None
193
+ ),
187
194
  )
@@ -90,13 +90,86 @@ class GroupRequiredResponse:
90
90
  required: bool
91
91
 
92
92
 
93
+ def find_all_connecting_concepts(g: nx.Graph, ds1: str, ds2: str) -> set[str]:
94
+ """Find all concepts that connect two datasources"""
95
+ concepts1 = set(g.neighbors(ds1))
96
+ concepts2 = set(g.neighbors(ds2))
97
+ return concepts1 & concepts2
98
+
99
+
100
+ def get_connection_keys(
101
+ all_connections: dict[tuple[str, str], set[str]], left: str, right: str
102
+ ) -> set[str]:
103
+ """Get all concepts that connect two datasources"""
104
+ lookup = sorted([left, right])
105
+ key: tuple[str, str] = (lookup[0], lookup[1])
106
+ return all_connections.get(key, set())
107
+
108
+
109
+ def get_join_type(
110
+ left: str,
111
+ right: str,
112
+ partials: dict[str, list[str]],
113
+ nullables: dict[str, list[str]],
114
+ all_connecting_keys: set[str],
115
+ ) -> JoinType:
116
+ left_is_partial = any(key in partials.get(left, []) for key in all_connecting_keys)
117
+ left_is_nullable = any(
118
+ key in nullables.get(left, []) for key in all_connecting_keys
119
+ )
120
+ right_is_partial = any(
121
+ key in partials.get(right, []) for key in all_connecting_keys
122
+ )
123
+ right_is_nullable = any(
124
+ key in nullables.get(right, []) for key in all_connecting_keys
125
+ )
126
+
127
+ if left_is_nullable and right_is_nullable:
128
+ join_type = JoinType.FULL
129
+ elif left_is_partial and right_is_partial:
130
+ join_type = JoinType.FULL
131
+ elif left_is_partial:
132
+ join_type = JoinType.FULL
133
+ elif right_is_nullable:
134
+ join_type = JoinType.RIGHT_OUTER
135
+ elif right_is_partial or left_is_nullable:
136
+ join_type = JoinType.LEFT_OUTER
137
+ # we can't inner join if the left was an outer join
138
+ else:
139
+ join_type = JoinType.INNER
140
+ return join_type
141
+
142
+
143
+ def reduce_join_types(join_types: Set[JoinType]) -> JoinType:
144
+ final_join_type = JoinType.INNER
145
+ if any([x == JoinType.FULL for x in join_types]):
146
+ final_join_type = JoinType.FULL
147
+ elif any([x == JoinType.LEFT_OUTER for x in join_types]):
148
+ final_join_type = JoinType.LEFT_OUTER
149
+ elif any([x == JoinType.RIGHT_OUTER for x in join_types]):
150
+ final_join_type = JoinType.RIGHT_OUTER
151
+
152
+ return final_join_type
153
+
154
+
93
155
  def resolve_join_order_v2(
94
156
  g: nx.Graph, partials: dict[str, list[str]], nullables: dict[str, list[str]]
95
157
  ) -> list[JoinOrderOutput]:
96
158
  datasources = [x for x in g.nodes if x.startswith("ds~")]
97
159
  concepts = [x for x in g.nodes if x.startswith("c~")]
98
160
 
161
+ # Pre-compute all possible connections between datasources
162
+ all_connections: dict[tuple[str, str], set[str]] = {}
163
+ for i, ds1 in enumerate(datasources):
164
+ for ds2 in datasources[i + 1 :]:
165
+ connecting_concepts = find_all_connecting_concepts(g, ds1, ds2)
166
+ if connecting_concepts:
167
+ key = tuple(sorted([ds1, ds2]))
168
+ all_connections[key] = connecting_concepts
169
+
99
170
  output: list[JoinOrderOutput] = []
171
+
172
+ # create our map of pivots, or common join concepts
100
173
  pivot_map = {
101
174
  concept: [x for x in g.neighbors(concept) if x in datasources]
102
175
  for concept in concepts
@@ -108,8 +181,9 @@ def resolve_join_order_v2(
108
181
  )
109
182
  )
110
183
  solo = [x for x in pivot_map if len(pivot_map[x]) == 1]
111
- eligible_left = set()
184
+ eligible_left: set[str] = set()
112
185
 
186
+ # while we have pivots, keep joining them in
113
187
  while pivots:
114
188
  next_pivots = [
115
189
  x for x in pivots if any(y in eligible_left for y in pivot_map[x])
@@ -120,7 +194,7 @@ def resolve_join_order_v2(
120
194
  else:
121
195
  root = pivots.pop(0)
122
196
 
123
- # sort so less partials is last and eligible lefts are
197
+ # sort so less partials is last and eligible lefts are first
124
198
  def score_key(x: str) -> tuple[int, int, str]:
125
199
  base = 1
126
200
  # if it's left, higher weight
@@ -133,79 +207,56 @@ def resolve_join_order_v2(
133
207
  base -= 1
134
208
  return (base, len(x), x)
135
209
 
136
- # get remainig un-joined datasets
210
+ # get remaining un-joined datasets
137
211
  to_join = sorted(
138
212
  [x for x in pivot_map[root] if x not in eligible_left], key=score_key
139
213
  )
140
214
  while to_join:
141
215
  # need to sort this to ensure we join on the best match
142
- base = sorted(
143
- [x for x in pivot_map[root] if x in eligible_left], key=score_key
144
- )
216
+ # but check ALL left in case there are non-pivt keys to join on
217
+ base = sorted([x for x in eligible_left], key=score_key)
145
218
  if not base:
146
219
  new = to_join.pop()
147
220
  eligible_left.add(new)
148
221
  base = [new]
149
222
  right = to_join.pop()
150
223
  # we already joined it
151
- # this could happen if the same pivot is shared with multiple Dses
224
+ # this could happen if the same pivot is shared with multiple DSes
152
225
  if right in eligible_left:
153
226
  continue
227
+
154
228
  joinkeys: dict[str, set[str]] = {}
155
229
  # sorting puts the best candidate last for pop
156
230
  # so iterate over the reversed list
157
231
  join_types = set()
232
+
158
233
  for left_candidate in reversed(base):
159
- common = nx.common_neighbors(g, left_candidate, right)
234
+ # Get all concepts that connect these two datasources
235
+ all_connecting_keys = get_connection_keys(
236
+ all_connections, left_candidate, right
237
+ )
160
238
 
161
- if not common:
239
+ if not all_connecting_keys:
162
240
  continue
241
+
242
+ # Check if we already have this exact set of keys
163
243
  exists = False
164
244
  for _, v in joinkeys.items():
165
- if v == common:
245
+ if v == all_connecting_keys:
166
246
  exists = True
167
247
  if exists:
168
248
  continue
169
- left_is_partial = any(
170
- key in partials.get(left_candidate, []) for key in common
171
- )
172
- left_is_nullable = any(
173
- key in nullables.get(left_candidate, []) for key in common
174
- )
175
- right_is_partial = any(key in partials.get(right, []) for key in common)
176
- # we don't care if left is nullable for join type (just keys), but if we did
177
- # left_is_nullable = any(
178
- # key in nullables.get(left_candidate, []) for key in common
179
- # )
180
- right_is_nullable = any(
181
- key in nullables.get(right, []) for key in common
182
- )
183
- if left_is_nullable and right_is_nullable:
184
- join_type = JoinType.FULL
185
- elif left_is_partial and right_is_partial:
186
- join_type = JoinType.FULL
187
- elif left_is_partial:
188
- join_type = JoinType.FULL
189
- elif right_is_nullable:
190
- join_type = JoinType.RIGHT_OUTER
191
- elif right_is_partial or left_is_nullable:
192
- join_type = JoinType.LEFT_OUTER
193
- # we can't inner join if the left was an outer join
194
- else:
195
- join_type = JoinType.INNER
196
249
 
250
+ join_type = get_join_type(
251
+ left_candidate, right, partials, nullables, all_connecting_keys
252
+ )
197
253
  join_types.add(join_type)
198
- joinkeys[left_candidate] = common
199
- final_join_type = JoinType.INNER
200
- if any([x == JoinType.FULL for x in join_types]):
201
- final_join_type = JoinType.FULL
202
- elif any([x == JoinType.LEFT_OUTER for x in join_types]):
203
- final_join_type = JoinType.LEFT_OUTER
204
- elif any([x == JoinType.RIGHT_OUTER for x in join_types]):
205
- final_join_type = JoinType.RIGHT_OUTER
254
+ joinkeys[left_candidate] = all_connecting_keys
255
+
256
+ final_join_type = reduce_join_types(join_types)
257
+
206
258
  output.append(
207
259
  JoinOrderOutput(
208
- # left=left_candidate,
209
260
  right=right,
210
261
  type=final_join_type,
211
262
  keys=joinkeys,
@@ -216,7 +267,6 @@ def resolve_join_order_v2(
216
267
  for concept in solo:
217
268
  for ds in pivot_map[concept]:
218
269
  # if we already have it, skip it
219
-
220
270
  if ds in eligible_left:
221
271
  continue
222
272
  # if we haven't had ANY left datasources yet
@@ -224,17 +274,39 @@ def resolve_join_order_v2(
224
274
  if not eligible_left:
225
275
  eligible_left.add(ds)
226
276
  continue
227
- # otherwise do a full out join
228
- output.append(
229
- JoinOrderOutput(
230
- # pick random one to be left
231
- left=list(eligible_left)[0],
232
- right=ds,
233
- type=JoinType.FULL,
234
- keys={},
277
+ # otherwise do a full outer join
278
+ # Try to find if there are any connecting keys with existing left tables
279
+ best_left = None
280
+ best_keys: set[str] = set()
281
+ for existing_left in eligible_left:
282
+ connecting_keys = get_connection_keys(
283
+ all_connections, existing_left, ds
284
+ )
285
+ if connecting_keys and len(connecting_keys) > len(best_keys):
286
+ best_left = existing_left
287
+ best_keys = connecting_keys
288
+
289
+ if best_left and best_keys:
290
+ output.append(
291
+ JoinOrderOutput(
292
+ left=best_left,
293
+ right=ds,
294
+ type=JoinType.FULL,
295
+ keys={best_left: best_keys},
296
+ )
297
+ )
298
+ else:
299
+ output.append(
300
+ JoinOrderOutput(
301
+ # pick random one to be left
302
+ left=list(eligible_left)[0],
303
+ right=ds,
304
+ type=JoinType.FULL,
305
+ keys={},
306
+ )
235
307
  )
236
- )
237
308
  eligible_left.add(ds)
309
+
238
310
  # only once we have all joins
239
311
  # do we know if some inners need to be left outers
240
312
  for review_join in output:
@@ -248,6 +320,7 @@ def resolve_join_order_v2(
248
320
  ]
249
321
  ):
250
322
  review_join.type = JoinType.LEFT_OUTER
323
+
251
324
  return output
252
325
 
253
326
 
@@ -352,7 +425,9 @@ def resolve_instantiated_concept(
352
425
  )
353
426
 
354
427
 
355
- def reduce_concept_pairs(input: list[ConceptPair]) -> list[ConceptPair]:
428
+ def reduce_concept_pairs(
429
+ input: list[ConceptPair], right_source: QueryDatasource | BuildDatasource
430
+ ) -> list[ConceptPair]:
356
431
  left_keys = set()
357
432
  right_keys = set()
358
433
  for pair in input:
@@ -361,7 +436,10 @@ def reduce_concept_pairs(input: list[ConceptPair]) -> list[ConceptPair]:
361
436
  if pair.right.purpose == Purpose.KEY:
362
437
  right_keys.add(pair.right.address)
363
438
  final: list[ConceptPair] = []
439
+ seen_right_keys = set()
364
440
  for pair in input:
441
+ if pair.right.address in seen_right_keys:
442
+ continue
365
443
  if (
366
444
  pair.left.purpose == Purpose.PROPERTY
367
445
  and pair.left.keys
@@ -374,7 +452,15 @@ def reduce_concept_pairs(input: list[ConceptPair]) -> list[ConceptPair]:
374
452
  and pair.right.keys.issubset(right_keys)
375
453
  ):
376
454
  continue
455
+
456
+ seen_right_keys.add(pair.right.address)
377
457
  final.append(pair)
458
+ all_keys = set([x.right.address for x in final])
459
+ if right_source.grain.components and right_source.grain.components.issubset(
460
+ all_keys
461
+ ):
462
+ return [x for x in final if x.right.address in right_source.grain.components]
463
+
378
464
  return final
379
465
 
380
466
 
@@ -443,7 +529,8 @@ def get_node_joins(
443
529
  )
444
530
  for k, v in j.keys.items()
445
531
  for concept in v
446
- ]
532
+ ],
533
+ ds_node_map[j.right],
447
534
  ),
448
535
  )
449
536
  for j in joins
@@ -8,7 +8,14 @@ from typing import Any
8
8
  from jinja2 import Template
9
9
 
10
10
  from trilogy.constants import DEFAULT_NAMESPACE, VIRTUAL_CONCEPT_PREFIX, MagicConstants
11
- from trilogy.core.enums import ConceptSource, DatePart, FunctionType, Modifier, Purpose
11
+ from trilogy.core.enums import (
12
+ ConceptSource,
13
+ DatePart,
14
+ FunctionType,
15
+ Modifier,
16
+ Purpose,
17
+ ValidationScope,
18
+ )
12
19
  from trilogy.core.models.author import (
13
20
  AggregateWrapper,
14
21
  AlignClause,
@@ -66,6 +73,7 @@ from trilogy.core.statements.author import (
66
73
  SelectItem,
67
74
  SelectStatement,
68
75
  TypeDeclaration,
76
+ ValidateStatement,
69
77
  )
70
78
 
71
79
  QUERY_TEMPLATE = Template(
@@ -445,6 +453,13 @@ class Renderer:
445
453
  final = "".join(prefixes)
446
454
  return f"{final}{self.to_string(arg.content)}"
447
455
 
456
+ @to_string.register
457
+ def _(self, arg: ValidateStatement):
458
+ targets = ",".join(arg.targets) if arg.targets else "*"
459
+ if arg.scope.value == ValidationScope.ALL:
460
+ return "validate all;"
461
+ return f"validate {arg.scope.value} {targets};"
462
+
448
463
  @to_string.register
449
464
  def _(self, arg: SelectStatement):
450
465
  with self.indented():
File without changes
File without changes
File without changes
File without changes