pytrilogy 0.0.3.113__tar.gz → 0.0.3.115__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

Files changed (173) hide show
  1. {pytrilogy-0.0.3.113/pytrilogy.egg-info → pytrilogy-0.0.3.115}/PKG-INFO +1 -1
  2. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115/pytrilogy.egg-info}/PKG-INFO +1 -1
  3. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_parse_engine.py +14 -0
  4. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/__init__.py +1 -1
  5. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/constants.py +28 -0
  6. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/enums.py +5 -1
  7. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/functions.py +25 -0
  8. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/models/author.py +3 -2
  9. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/models/build.py +17 -2
  10. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/concept_strategies_v3.py +24 -5
  11. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/discovery_node_factory.py +2 -2
  12. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/discovery_utility.py +11 -4
  13. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/filter_node.py +7 -0
  14. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/unnest_node.py +77 -6
  15. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/statements/author.py +1 -1
  16. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/dialect/base.py +12 -0
  17. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/parsing/common.py +8 -3
  18. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/parsing/parse_engine.py +57 -4
  19. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/parsing/render.py +2 -1
  20. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/parsing/trilogy.lark +11 -5
  21. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/LICENSE.md +0 -0
  22. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/README.md +0 -0
  23. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/pyproject.toml +0 -0
  24. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/pytrilogy.egg-info/SOURCES.txt +0 -0
  25. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/pytrilogy.egg-info/dependency_links.txt +0 -0
  26. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/pytrilogy.egg-info/entry_points.txt +0 -0
  27. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/pytrilogy.egg-info/requires.txt +0 -0
  28. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/pytrilogy.egg-info/top_level.txt +0 -0
  29. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/requirements.txt +0 -0
  30. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/setup.cfg +0 -0
  31. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_datatypes.py +0 -0
  32. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_declarations.py +0 -0
  33. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_derived_concepts.py +0 -0
  34. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_discovery_nodes.py +0 -0
  35. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_enums.py +0 -0
  36. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_environment.py +0 -0
  37. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_execute_models.py +0 -0
  38. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_executor.py +0 -0
  39. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_failure.py +0 -0
  40. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_functions.py +0 -0
  41. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_imports.py +0 -0
  42. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_metadata.py +0 -0
  43. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_models.py +0 -0
  44. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_multi_join_assignments.py +0 -0
  45. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_parsing.py +0 -0
  46. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_parsing_failures.py +0 -0
  47. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_partial_handling.py +0 -0
  48. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_query_processing.py +0 -0
  49. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_query_render.py +0 -0
  50. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_select.py +0 -0
  51. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_show.py +0 -0
  52. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_statements.py +0 -0
  53. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_typing.py +0 -0
  54. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_undefined_concept.py +0 -0
  55. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_user_functions.py +0 -0
  56. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_validators.py +0 -0
  57. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/tests/test_where_clause.py +0 -0
  58. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/ai/__init__.py +0 -0
  59. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/ai/constants.py +0 -0
  60. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/ai/conversation.py +0 -0
  61. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/ai/enums.py +0 -0
  62. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/ai/execute.py +0 -0
  63. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/ai/models.py +0 -0
  64. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/ai/prompts.py +0 -0
  65. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/ai/providers/__init__.py +0 -0
  66. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/ai/providers/anthropic.py +0 -0
  67. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/ai/providers/base.py +0 -0
  68. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/ai/providers/google.py +0 -0
  69. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/ai/providers/openai.py +0 -0
  70. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/ai/providers/utils.py +0 -0
  71. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/authoring/__init__.py +0 -0
  72. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/__init__.py +0 -0
  73. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/constants.py +0 -0
  74. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/env_processor.py +0 -0
  75. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/environment_helpers.py +0 -0
  76. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/ergonomics.py +0 -0
  77. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/exceptions.py +0 -0
  78. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/graph_models.py +0 -0
  79. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/internal.py +0 -0
  80. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/models/__init__.py +0 -0
  81. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/models/build_environment.py +0 -0
  82. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/models/core.py +0 -0
  83. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/models/datasource.py +0 -0
  84. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/models/environment.py +0 -0
  85. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/models/execute.py +0 -0
  86. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/optimization.py +0 -0
  87. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/optimizations/__init__.py +0 -0
  88. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/optimizations/base_optimization.py +0 -0
  89. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/optimizations/hide_unused_concept.py +0 -0
  90. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/optimizations/inline_datasource.py +0 -0
  91. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/optimizations/predicate_pushdown.py +0 -0
  92. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/__init__.py +0 -0
  93. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/discovery_validation.py +0 -0
  94. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/graph_utils.py +0 -0
  95. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/__init__.py +0 -0
  96. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/basic_node.py +0 -0
  97. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/common.py +0 -0
  98. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/constant_node.py +0 -0
  99. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/group_node.py +0 -0
  100. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/group_to_node.py +0 -0
  101. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/multiselect_node.py +0 -0
  102. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/node_merge_node.py +0 -0
  103. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/recursive_node.py +0 -0
  104. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/rowset_node.py +0 -0
  105. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
  106. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +0 -0
  107. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/select_merge_node.py +0 -0
  108. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/select_node.py +0 -0
  109. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/synonym_node.py +0 -0
  110. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/union_node.py +0 -0
  111. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/node_generators/window_node.py +0 -0
  112. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/nodes/__init__.py +0 -0
  113. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/nodes/base_node.py +0 -0
  114. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/nodes/filter_node.py +0 -0
  115. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/nodes/group_node.py +0 -0
  116. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/nodes/merge_node.py +0 -0
  117. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/nodes/recursive_node.py +0 -0
  118. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/nodes/select_node_v2.py +0 -0
  119. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/nodes/union_node.py +0 -0
  120. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/nodes/unnest_node.py +0 -0
  121. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/nodes/window_node.py +0 -0
  122. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/processing/utility.py +0 -0
  123. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/query_processor.py +0 -0
  124. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/statements/__init__.py +0 -0
  125. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/statements/build.py +0 -0
  126. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/statements/common.py +0 -0
  127. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/statements/execute.py +0 -0
  128. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/utility.py +0 -0
  129. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/validation/__init__.py +0 -0
  130. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/validation/common.py +0 -0
  131. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/validation/concept.py +0 -0
  132. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/validation/datasource.py +0 -0
  133. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/validation/environment.py +0 -0
  134. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/core/validation/fix.py +0 -0
  135. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/dialect/__init__.py +0 -0
  136. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/dialect/bigquery.py +0 -0
  137. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/dialect/common.py +0 -0
  138. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/dialect/config.py +0 -0
  139. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/dialect/dataframe.py +0 -0
  140. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/dialect/duckdb.py +0 -0
  141. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/dialect/enums.py +0 -0
  142. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/dialect/metadata.py +0 -0
  143. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/dialect/postgres.py +0 -0
  144. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/dialect/presto.py +0 -0
  145. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/dialect/snowflake.py +0 -0
  146. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/dialect/sql_server.py +0 -0
  147. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/engine.py +0 -0
  148. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/executor.py +0 -0
  149. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/hooks/__init__.py +0 -0
  150. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/hooks/base_hook.py +0 -0
  151. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/hooks/graph_hook.py +0 -0
  152. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/hooks/query_debugger.py +0 -0
  153. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/metadata/__init__.py +0 -0
  154. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/parser.py +0 -0
  155. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/parsing/__init__.py +0 -0
  156. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/parsing/config.py +0 -0
  157. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/parsing/exceptions.py +0 -0
  158. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/parsing/helpers.py +0 -0
  159. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/py.typed +0 -0
  160. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/render.py +0 -0
  161. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/scripts/__init__.py +0 -0
  162. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/scripts/trilogy.py +0 -0
  163. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/std/__init__.py +0 -0
  164. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/std/color.preql +0 -0
  165. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/std/date.preql +0 -0
  166. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/std/display.preql +0 -0
  167. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/std/geography.preql +0 -0
  168. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/std/metric.preql +0 -0
  169. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/std/money.preql +0 -0
  170. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/std/net.preql +0 -0
  171. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/std/ranking.preql +0 -0
  172. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/std/report.preql +0 -0
  173. {pytrilogy-0.0.3.113 → pytrilogy-0.0.3.115}/trilogy/utility.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.113
3
+ Version: 0.0.3.115
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Classifier: Programming Language :: Python
6
6
  Classifier: Programming Language :: Python :: 3
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.113
3
+ Version: 0.0.3.115
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Classifier: Programming Language :: Python
6
6
  Classifier: Programming Language :: Python :: 3
@@ -121,3 +121,17 @@ def test_alias_error():
121
121
  with raises(InvalidSyntaxException) as e:
122
122
  env.parse(TEXT2)
123
123
  assert ERROR_CODES[201] in str(e.value), e.value
124
+
125
+
126
+ def test_semicolon_error():
127
+ env = Environment()
128
+ TEXT2 = """
129
+ const a <- 1;
130
+
131
+ select
132
+ a+2 as fun,
133
+
134
+ """
135
+ with raises(InvalidSyntaxException) as e:
136
+ env.parse(TEXT2)
137
+ assert ERROR_CODES[202] in str(e.value), e.value
@@ -4,6 +4,6 @@ from trilogy.dialect.enums import Dialects
4
4
  from trilogy.executor import Executor
5
5
  from trilogy.parser import parse
6
6
 
7
- __version__ = "0.0.3.113"
7
+ __version__ = "0.0.3.115"
8
8
 
9
9
  __all__ = ["parse", "Executor", "Dialects", "Environment", "CONFIG"]
@@ -1,7 +1,9 @@
1
1
  import random
2
+ from contextlib import contextmanager
2
3
  from dataclasses import dataclass, field
3
4
  from enum import Enum
4
5
  from logging import getLogger
6
+ from typing import Any
5
7
 
6
8
  logger = getLogger("trilogy")
7
9
 
@@ -50,6 +52,32 @@ class Rendering:
50
52
  parameters: bool = True
51
53
  concise: bool = False
52
54
 
55
+ @contextmanager
56
+ def temporary(self, **kwargs: Any):
57
+ """
58
+ Context manager to temporarily set attributes and revert them afterwards.
59
+
60
+ Usage:
61
+ r = Rendering()
62
+ with r.temporary(parameters=False, concise=True):
63
+ # parameters is False, concise is True here
64
+ do_something()
65
+ # parameters and concise are back to their original values
66
+ """
67
+ # Store original values
68
+ original_values = {key: getattr(self, key) for key in kwargs}
69
+
70
+ # Set new values
71
+ for key, value in kwargs.items():
72
+ setattr(self, key, value)
73
+
74
+ try:
75
+ yield self
76
+ finally:
77
+ # Restore original values
78
+ for key, value in original_values.items():
79
+ setattr(self, key, value)
80
+
53
81
 
54
82
  @dataclass
55
83
  class Parsing:
@@ -169,6 +169,7 @@ class FunctionType(Enum):
169
169
  ARRAY_SORT = "array_sort"
170
170
  ARRAY_TRANSFORM = "array_transform"
171
171
  ARRAY_TO_STRING = "array_to_string"
172
+ ARRAY_FILTER = "array_filter"
172
173
 
173
174
  # MAP
174
175
  MAP_KEYS = "map_keys"
@@ -204,6 +205,7 @@ class FunctionType(Enum):
204
205
  MIN = "min"
205
206
  AVG = "avg"
206
207
  ARRAY_AGG = "array_agg"
208
+ ANY = "any"
207
209
 
208
210
  # String
209
211
  LIKE = "like"
@@ -244,6 +246,7 @@ class FunctionType(Enum):
244
246
  DATE_ADD = "date_add"
245
247
  DATE_SUB = "date_sub"
246
248
  DATE_DIFF = "date_diff"
249
+ DATE_SPINE = "date_spine"
247
250
 
248
251
  # UNIX
249
252
  UNIX_TO_TIMESTAMP = "unix_to_timestamp"
@@ -263,6 +266,7 @@ class FunctionClass(Enum):
263
266
  FunctionType.ARRAY_AGG,
264
267
  FunctionType.COUNT,
265
268
  FunctionType.COUNT_DISTINCT,
269
+ FunctionType.ANY,
266
270
  ]
267
271
  SINGLE_ROW = [
268
272
  FunctionType.CONSTANT,
@@ -270,7 +274,7 @@ class FunctionClass(Enum):
270
274
  FunctionType.CURRENT_DATETIME,
271
275
  ]
272
276
 
273
- ONE_TO_MANY = [FunctionType.UNNEST]
277
+ ONE_TO_MANY = [FunctionType.UNNEST, FunctionType.DATE_SPINE]
274
278
 
275
279
  RECURSIVE = [FunctionType.RECURSE_EDGE]
276
280
 
@@ -212,6 +212,14 @@ FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
212
212
  output_type_function=get_unnest_output_type,
213
213
  arg_count=1,
214
214
  ),
215
+ FunctionType.DATE_SPINE: FunctionConfig(
216
+ valid_inputs={
217
+ DataType.DATE,
218
+ },
219
+ output_purpose=Purpose.KEY,
220
+ output_type=DataType.DATE,
221
+ arg_count=2,
222
+ ),
215
223
  FunctionType.RECURSE_EDGE: FunctionConfig(
216
224
  arg_count=2,
217
225
  ),
@@ -318,6 +326,18 @@ FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
318
326
  output_type_function=get_transform_output_type,
319
327
  arg_count=3,
320
328
  ),
329
+ FunctionType.ARRAY_FILTER: FunctionConfig(
330
+ valid_inputs=[
331
+ {
332
+ DataType.ARRAY,
333
+ },
334
+ {*DataType},
335
+ {*DataType},
336
+ ],
337
+ output_purpose=Purpose.PROPERTY,
338
+ output_type_function=get_transform_output_type,
339
+ arg_count=3,
340
+ ),
321
341
  FunctionType.ARRAY_TO_STRING: FunctionConfig(
322
342
  valid_inputs={
323
343
  DataType.ARRAY,
@@ -887,6 +907,11 @@ FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
887
907
  ),
888
908
  arg_count=1,
889
909
  ),
910
+ FunctionType.ANY: FunctionConfig(
911
+ valid_inputs={*DataType},
912
+ output_purpose=Purpose.PROPERTY,
913
+ arg_count=1,
914
+ ),
890
915
  FunctionType.AVG: FunctionConfig(
891
916
  valid_inputs={
892
917
  DataType.INTEGER,
@@ -1234,7 +1234,7 @@ class Concept(Addressable, DataTyped, ConceptArgs, Mergeable, Namespaced, BaseMo
1234
1234
  elif (
1235
1235
  lineage
1236
1236
  and isinstance(lineage, (BuildFunction, Function))
1237
- and lineage.operator == FunctionType.UNNEST
1237
+ and lineage.operator in FunctionClass.ONE_TO_MANY.value
1238
1238
  ):
1239
1239
  return Derivation.UNNEST
1240
1240
  elif (
@@ -1286,7 +1286,8 @@ class Concept(Addressable, DataTyped, ConceptArgs, Mergeable, Namespaced, BaseMo
1286
1286
  elif (
1287
1287
  lineage
1288
1288
  and isinstance(lineage, (Function, BuildFunction))
1289
- and lineage.operator in (FunctionType.UNNEST, FunctionType.UNION)
1289
+ and lineage.operator
1290
+ in (FunctionType.UNNEST, FunctionType.UNION, FunctionType.DATE_SPINE)
1290
1291
  ):
1291
1292
  return Granularity.MULTI_ROW
1292
1293
  elif lineage and all(
@@ -134,8 +134,9 @@ def concept_is_relevant(
134
134
  if concept.purpose in (Purpose.METRIC,):
135
135
  if all([c in others for c in concept.grain.components]):
136
136
  return False
137
+ if concept.derivation in (Derivation.UNNEST,):
138
+ return True
137
139
  if concept.derivation in (Derivation.BASIC,):
138
-
139
140
  return any(concept_is_relevant(c, others) for c in concept.concept_arguments)
140
141
  if concept.granularity == Granularity.SINGLE_ROW:
141
142
  return False
@@ -1668,7 +1669,6 @@ class Factory:
1668
1669
  valid_inputs=base.valid_inputs,
1669
1670
  arg_count=base.arg_count,
1670
1671
  )
1671
-
1672
1672
  new = BuildFunction(
1673
1673
  operator=base.operator,
1674
1674
  arguments=[self.handle_constant(self.build(c)) for c in raw_args],
@@ -1724,6 +1724,14 @@ class Factory:
1724
1724
  return self._build_concept(base)
1725
1725
 
1726
1726
  def _build_concept(self, base: Concept) -> BuildConcept:
1727
+ try:
1728
+ return self.__build_concept(base)
1729
+ except RecursionError as e:
1730
+ raise RecursionError(
1731
+ f"Recursion error building concept {base.address}. This is likely due to a circular reference."
1732
+ ) from e
1733
+
1734
+ def __build_concept(self, base: Concept) -> BuildConcept:
1727
1735
  # TODO: if we are using parameters, wrap it in a new model and use that in rendering
1728
1736
  if base.address in self.local_concepts:
1729
1737
  return self.local_concepts[base.address]
@@ -2002,6 +2010,13 @@ class Factory:
2002
2010
  def _build_tuple_wrapper(self, base: TupleWrapper) -> TupleWrapper:
2003
2011
  return TupleWrapper(val=[self.build(x) for x in base.val], type=base.type)
2004
2012
 
2013
+ @build.register
2014
+ def _(self, base: ListWrapper) -> ListWrapper:
2015
+ return self._build_list_wrapper(base)
2016
+
2017
+ def _build_list_wrapper(self, base: ListWrapper) -> ListWrapper:
2018
+ return ListWrapper([self.build(x) for x in base], type=base.type)
2019
+
2005
2020
  @build.register
2006
2021
  def _(self, base: FilterItem) -> BuildFilterItem:
2007
2022
  return self._build_filter_item(base)
@@ -306,7 +306,12 @@ def evaluate_loop_conditions(
306
306
 
307
307
 
308
308
  def check_for_early_exit(
309
- complete, partial, missing, context: LoopContext, priority_concept: BuildConcept
309
+ complete: ValidationResult,
310
+ found: set[str],
311
+ partial: set[str],
312
+ missing: set[str],
313
+ context: LoopContext,
314
+ priority_concept: BuildConcept,
310
315
  ) -> bool:
311
316
  if complete == ValidationResult.INCOMPLETE_CONDITION:
312
317
  cond_dict = {str(node): node.preexisting_conditions for node in context.stack}
@@ -331,8 +336,18 @@ def check_for_early_exit(
331
336
  f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Breaking as we have attempted all nodes"
332
337
  )
333
338
  return True
339
+ elif all(
340
+ [
341
+ x.address in found and x.address not in partial
342
+ for x in context.mandatory_list
343
+ ]
344
+ ):
345
+ logger.info(
346
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Breaking as we have found all mandatory nodes without partials"
347
+ )
348
+ return True
334
349
  logger.info(
335
- f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Found complete stack with partials {partial}, continuing search, attempted {context.attempted} all {len(context.mandatory_list)}"
350
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Found complete stack with partials {partial}, continuing search, attempted {context.attempted} of total {len(context.mandatory_list)}."
336
351
  )
337
352
  else:
338
353
  logger.info(
@@ -436,6 +451,7 @@ def generate_loop_completion(context: LoopContext, virtual: set[str]) -> Strateg
436
451
  context.original_mandatory,
437
452
  context.environment,
438
453
  non_virtual_difference_values,
454
+ depth=context.depth,
439
455
  )
440
456
 
441
457
  return group_if_required_v2(
@@ -443,6 +459,7 @@ def generate_loop_completion(context: LoopContext, virtual: set[str]) -> Strateg
443
459
  context.original_mandatory,
444
460
  context.environment,
445
461
  non_virtual_difference_values,
462
+ depth=context.depth,
446
463
  )
447
464
 
448
465
 
@@ -466,6 +483,7 @@ def _search_concepts(
466
483
  conditions=conditions,
467
484
  )
468
485
 
486
+ # if we get a can
469
487
  if candidate:
470
488
  return candidate
471
489
  context = initialize_loop_context(
@@ -477,13 +495,14 @@ def _search_concepts(
477
495
  accept_partial=accept_partial,
478
496
  conditions=conditions,
479
497
  )
480
-
498
+ partial: set[str] = set()
481
499
  while context.incomplete:
482
500
 
483
501
  priority_concept = get_priority_concept(
484
502
  context.mandatory_list,
485
503
  context.attempted,
486
504
  found_concepts=context.found,
505
+ partial_concepts=partial,
487
506
  depth=depth,
488
507
  )
489
508
 
@@ -538,7 +557,7 @@ def _search_concepts(
538
557
  # assign
539
558
  context.found = found_c
540
559
  early_exit = check_for_early_exit(
541
- complete, partial, missing_c, context, priority_concept
560
+ complete, found_c, partial, missing_c, context, priority_concept
542
561
  )
543
562
  if early_exit:
544
563
  break
@@ -608,4 +627,4 @@ def source_query_concepts(
608
627
  logger.info(
609
628
  f"{depth_to_prefix(0)}{LOGGER_PREFIX} final concepts are {[x.address for x in final]}"
610
629
  )
611
- return group_if_required_v2(root, output_concepts, environment)
630
+ return group_if_required_v2(root, output_concepts, environment, depth=0)
@@ -187,7 +187,7 @@ def _generate_aggregate_node(ctx: NodeGenerationContext) -> StrategyNode | None:
187
187
 
188
188
  logger.info(
189
189
  f"{depth_to_prefix(ctx.depth)}{LOGGER_PREFIX} "
190
- f"for {ctx.concept.address}, generating aggregate node with {agg_optional}"
190
+ f"for {ctx.concept.address}, generating aggregate node with optional {agg_optional}"
191
191
  )
192
192
 
193
193
  return gen_group_node(
@@ -441,7 +441,7 @@ def generate_node(
441
441
  depth: int,
442
442
  source_concepts: SearchConceptsType,
443
443
  history: History,
444
- accept_partial: bool = False,
444
+ accept_partial: bool,
445
445
  conditions: BuildWhereClause | None = None,
446
446
  ) -> StrategyNode | None:
447
447
 
@@ -184,10 +184,14 @@ def group_if_required_v2(
184
184
  final: List[BuildConcept],
185
185
  environment: BuildEnvironment,
186
186
  where_injected: set[str] | None = None,
187
+ depth: int = 0,
187
188
  ):
188
189
  where_injected = where_injected or set()
189
190
  required = check_if_group_required(
190
- downstream_concepts=final, parents=[root.resolve()], environment=environment
191
+ downstream_concepts=final,
192
+ parents=[root.resolve()],
193
+ environment=environment,
194
+ depth=depth,
191
195
  )
192
196
  targets = [
193
197
  x
@@ -258,6 +262,7 @@ def get_priority_concept(
258
262
  all_concepts: List[BuildConcept],
259
263
  attempted_addresses: set[str],
260
264
  found_concepts: set[str],
265
+ partial_concepts: set[str],
261
266
  depth: int,
262
267
  ) -> BuildConcept:
263
268
  # optimized search for missing concepts
@@ -265,13 +270,15 @@ def get_priority_concept(
265
270
  [
266
271
  c
267
272
  for c in all_concepts
268
- if c.address not in attempted_addresses and c.address not in found_concepts
273
+ if c.address not in attempted_addresses
274
+ and (c.address not in found_concepts or c.address in partial_concepts)
269
275
  ],
270
276
  key=lambda x: x.address,
271
277
  )
272
278
  # sometimes we need to scan intermediate concepts to get merge keys or filter keys,
273
279
  # so do an exhaustive search
274
- # pass_two = [c for c in all_concepts+filter_only if c.address not in attempted_addresses]
280
+ # pass_two = [c for c in all_concepts if c.address not in attempted_addresses]
281
+
275
282
  for remaining_concept in (pass_one,):
276
283
  priority = (
277
284
  # then multiselects to remove them from scope
@@ -333,5 +340,5 @@ def get_priority_concept(
333
340
  if final:
334
341
  return final[0]
335
342
  raise ValueError(
336
- f"Cannot resolve query. No remaining priority concepts, have attempted {attempted_addresses}"
343
+ f"Cannot resolve query. No remaining priority concepts, have attempted {attempted_addresses} out of {all_concepts} with found {found_concepts}"
337
344
  )
@@ -96,6 +96,8 @@ def build_parent_concepts(
96
96
  continue
97
97
  elif global_filter_is_local_filter:
98
98
  same_filter_optional.append(x)
99
+ # also append it to the parent row concepts
100
+ parent_row_concepts.append(x)
99
101
 
100
102
  # sometimes, it's okay to include other local optional above the filter
101
103
  # in case it is, prep our list
@@ -204,11 +206,16 @@ def gen_filter_node(
204
206
  f"{padding(depth)}{LOGGER_PREFIX} filter node row parents {[x.address for x in parent_row_concepts]} could not be found"
205
207
  )
206
208
  return None
209
+ else:
210
+ logger.info(
211
+ f"{padding(depth)}{LOGGER_PREFIX} filter node has row parents {[x.address for x in parent_row_concepts]} from node with output [{[x.address for x in row_parent.output_concepts]}] partial {row_parent.partial_concepts}"
212
+ )
207
213
  if global_filter_is_local_filter:
208
214
  logger.info(
209
215
  f"{padding(depth)}{LOGGER_PREFIX} filter node conditions match global conditions adding row parent {row_parent.output_concepts} with condition {where.conditional}"
210
216
  )
211
217
  row_parent.add_parents(core_parent_nodes)
218
+ # all local optional will be in the parent already, so we can set outputs
212
219
  row_parent.set_output_concepts([concept] + local_optional)
213
220
  return row_parent
214
221
  if optimized_pushdown:
@@ -9,6 +9,7 @@ from trilogy.core.models.build import (
9
9
  from trilogy.core.models.build_environment import BuildEnvironment
10
10
  from trilogy.core.processing.nodes import (
11
11
  History,
12
+ MergeNode,
12
13
  StrategyNode,
13
14
  UnnestNode,
14
15
  WhereSafetyNode,
@@ -18,6 +19,32 @@ from trilogy.core.processing.utility import padding
18
19
  LOGGER_PREFIX = "[GEN_UNNEST_NODE]"
19
20
 
20
21
 
22
+ def get_pseudonym_parents(
23
+ concept: BuildConcept,
24
+ local_optional: List[BuildConcept],
25
+ source_concepts,
26
+ environment: BuildEnvironment,
27
+ g,
28
+ depth,
29
+ history,
30
+ conditions,
31
+ ) -> List[StrategyNode]:
32
+ for x in concept.pseudonyms:
33
+ attempt = source_concepts(
34
+ mandatory_list=[environment.alias_origin_lookup[x]] + local_optional,
35
+ environment=environment,
36
+ g=g,
37
+ depth=depth + 1,
38
+ history=history,
39
+ conditions=conditions,
40
+ accept_partial=True,
41
+ )
42
+ if not attempt:
43
+ continue
44
+ return [attempt]
45
+ return []
46
+
47
+
21
48
  def gen_unnest_node(
22
49
  concept: BuildConcept,
23
50
  local_optional: List[BuildConcept],
@@ -29,14 +56,34 @@ def gen_unnest_node(
29
56
  conditions: BuildWhereClause | None = None,
30
57
  ) -> StrategyNode | None:
31
58
  arguments = []
59
+ join_nodes: list[StrategyNode] = []
32
60
  depth_prefix = "\t" * depth
33
61
  if isinstance(concept.lineage, BuildFunction):
34
62
  arguments = concept.lineage.concept_arguments
63
+ search_optional = local_optional
64
+ if (not arguments) and (local_optional and concept.pseudonyms):
65
+ logger.info(
66
+ f"{padding(depth)}{LOGGER_PREFIX} unnest node for {concept} has no parents; creating solo unnest node"
67
+ )
68
+ join_nodes += get_pseudonym_parents(
69
+ concept,
70
+ local_optional,
71
+ source_concepts,
72
+ environment,
73
+ g,
74
+ depth,
75
+ history,
76
+ conditions,
77
+ )
78
+ logger.info(
79
+ f"{padding(depth)}{LOGGER_PREFIX} unnest node for {concept} got join nodes {join_nodes}"
80
+ )
81
+ search_optional = []
35
82
 
36
- equivalent_optional = [x for x in local_optional if x.lineage == concept.lineage]
83
+ equivalent_optional = [x for x in search_optional if x.lineage == concept.lineage]
37
84
 
38
85
  non_equivalent_optional = [
39
- x for x in local_optional if x not in equivalent_optional
86
+ x for x in search_optional if x not in equivalent_optional
40
87
  ]
41
88
  all_parents = arguments + non_equivalent_optional
42
89
  logger.info(
@@ -44,7 +91,8 @@ def gen_unnest_node(
44
91
  )
45
92
  local_conditions = False
46
93
  expected_outputs = [concept] + local_optional
47
- if arguments or local_optional:
94
+ parent: StrategyNode | None = None
95
+ if arguments or search_optional:
48
96
  parent = source_concepts(
49
97
  mandatory_list=all_parents,
50
98
  environment=environment,
@@ -86,14 +134,37 @@ def gen_unnest_node(
86
134
  base = UnnestNode(
87
135
  unnest_concepts=[concept] + equivalent_optional,
88
136
  input_concepts=arguments + non_equivalent_optional,
89
- output_concepts=[concept] + local_optional,
137
+ output_concepts=[concept] + search_optional,
90
138
  environment=environment,
91
139
  parents=([parent] if parent else []),
92
140
  )
141
+
142
+ conditional = conditions.conditional if conditions else None
143
+ if join_nodes:
144
+ logger.info(
145
+ f"{depth_prefix}{LOGGER_PREFIX} unnest node for {concept} needs to merge with join nodes {join_nodes}"
146
+ )
147
+ for x in join_nodes:
148
+ logger.info(
149
+ f"{depth_prefix}{LOGGER_PREFIX} join node {x} with partial {x.partial_concepts}"
150
+ )
151
+ pseudonyms = [
152
+ environment.alias_origin_lookup[p] for p in concept.pseudonyms
153
+ ]
154
+ x.add_partial_concepts(pseudonyms)
155
+ return MergeNode(
156
+ input_concepts=base.output_concepts
157
+ + [j for n in join_nodes for j in n.output_concepts],
158
+ output_concepts=[concept] + local_optional,
159
+ environment=environment,
160
+ parents=[base] + join_nodes,
161
+ conditions=conditional if local_conditions is True else None,
162
+ preexisting_conditions=(
163
+ conditional if conditional and local_conditions is False else None
164
+ ),
165
+ )
93
166
  # we need to sometimes nest an unnest node,
94
167
  # as unnest operations are not valid in all situations
95
- # TODO: inline this node when we can detect it's safe
96
- conditional = conditions.conditional if conditions else None
97
168
  new = WhereSafetyNode(
98
169
  input_concepts=base.output_concepts,
99
170
  output_concepts=base.output_concepts,
@@ -197,7 +197,7 @@ class SelectStatement(HasUUID, SelectTypeMixin, BaseModel):
197
197
  for x in self.where_clause.concept_arguments:
198
198
  if isinstance(x, UndefinedConcept):
199
199
  validate = environment.concepts.get(x.address)
200
- if validate:
200
+ if validate and self.where_clause:
201
201
  self.where_clause = (
202
202
  self.where_clause.with_reference_replacement(
203
203
  x.address, validate.reference
@@ -194,6 +194,13 @@ FUNCTION_MAP = {
194
194
  FunctionType.INDEX_ACCESS: lambda x: f"{x[0]}[{x[1]}]",
195
195
  FunctionType.MAP_ACCESS: lambda x: f"{x[0]}[{x[1]}]",
196
196
  FunctionType.UNNEST: lambda x: f"unnest({x[0]})",
197
+ FunctionType.DATE_SPINE: lambda x: f"""unnest(
198
+ generate_series(
199
+ {x[0]},
200
+ {x[1]},
201
+ INTERVAL '1 day'
202
+ )
203
+ )""",
197
204
  FunctionType.RECURSE_EDGE: lambda x: f"CASE WHEN {x[1]} IS NULL THEN {x[0]} ELSE {x[1]} END",
198
205
  FunctionType.ATTR_ACCESS: lambda x: f"""{x[0]}.{x[1].replace("'", "")}""",
199
206
  FunctionType.STRUCT: lambda x: f"{{{', '.join(struct_arg(x))}}}",
@@ -213,6 +220,9 @@ FUNCTION_MAP = {
213
220
  FunctionType.ARRAY_TO_STRING: lambda args: (
214
221
  f"array_to_string({args[0]}, {args[1]})"
215
222
  ),
223
+ FunctionType.ARRAY_FILTER: lambda args: (
224
+ f"array_filter({args[0]}, {args[1]} -> {args[2]})"
225
+ ),
216
226
  # math
217
227
  FunctionType.ADD: lambda x: " + ".join(x),
218
228
  FunctionType.ABS: lambda x: f"abs({x[0]})",
@@ -237,6 +247,7 @@ FUNCTION_MAP = {
237
247
  FunctionType.AVG: lambda x: f"avg({x[0]})",
238
248
  FunctionType.MAX: lambda x: f"max({x[0]})",
239
249
  FunctionType.MIN: lambda x: f"min({x[0]})",
250
+ FunctionType.ANY: lambda x: f"any_value({x[0]})",
240
251
  # string types
241
252
  FunctionType.LIKE: lambda x: f" {x[0]} like {x[1]} ",
242
253
  FunctionType.UPPER: lambda x: f"UPPER({x[0]}) ",
@@ -285,6 +296,7 @@ FUNCTION_GRAIN_MATCH_MAP = {
285
296
  FunctionType.AVG: lambda args: f"{args[0]}",
286
297
  FunctionType.MAX: lambda args: f"{args[0]}",
287
298
  FunctionType.MIN: lambda args: f"{args[0]}",
299
+ FunctionType.ANY: lambda args: f"{args[0]}",
288
300
  }
289
301
 
290
302
 
@@ -91,7 +91,7 @@ def process_function_arg(
91
91
  # to simplify anonymous function handling
92
92
  if (
93
93
  arg.operator not in FunctionClass.AGGREGATE_FUNCTIONS.value
94
- and arg.operator != FunctionType.UNNEST
94
+ and arg.operator not in FunctionClass.ONE_TO_MANY.value
95
95
  ):
96
96
  return arg
97
97
  id_hash = string_to_hash(str(arg))
@@ -311,13 +311,18 @@ def concept_is_relevant(
311
311
  if concept.purpose in (Purpose.METRIC,):
312
312
  if all([c in others for c in concept.grain.components]):
313
313
  return False
314
+ if (
315
+ concept.derivation in (Derivation.BASIC,)
316
+ and isinstance(concept.lineage, Function)
317
+ and concept.lineage.operator == FunctionType.DATE_SPINE
318
+ ):
319
+ return True
314
320
  if concept.derivation in (Derivation.BASIC,) and isinstance(
315
321
  concept.lineage, (Function, CaseWhen)
316
322
  ):
317
323
  relevant = False
318
324
  for arg in concept.lineage.arguments:
319
325
  relevant = atom_is_relevant(arg, others, environment) or relevant
320
-
321
326
  return relevant
322
327
  if concept.derivation in (Derivation.BASIC,) and isinstance(
323
328
  concept.lineage, Parenthetical
@@ -529,7 +534,7 @@ def function_to_concept(
529
534
  elif parent.operator == FunctionType.UNION:
530
535
  derivation = Derivation.UNION
531
536
  granularity = Granularity.MULTI_ROW
532
- elif parent.operator == FunctionType.UNNEST:
537
+ elif parent.operator in FunctionClass.ONE_TO_MANY.value:
533
538
  derivation = Derivation.UNNEST
534
539
  granularity = Granularity.MULTI_ROW
535
540
  elif parent.operator == FunctionType.RECURSE_EDGE: