pytrilogy 0.0.3.36__tar.gz → 0.0.3.38__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pytrilogy might be problematic. Click here for more details.
- {pytrilogy-0.0.3.36/pytrilogy.egg-info → pytrilogy-0.0.3.38}/PKG-INFO +1 -1
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38/pytrilogy.egg-info}/PKG-INFO +1 -1
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_functions.py +19 -1
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_metadata.py +0 -3
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_typing.py +21 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/__init__.py +1 -1
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/enums.py +1 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/functions.py +7 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/models/author.py +8 -2
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/models/build.py +66 -5
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/concept_strategies_v3.py +23 -13
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/filter_node.py +3 -2
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/group_to_node.py +3 -0
- pytrilogy-0.0.3.38/trilogy/core/processing/node_generators/union_node.py +91 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/nodes/__init__.py +19 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/nodes/merge_node.py +3 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/utility.py +30 -6
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/query_processor.py +3 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/statements/author.py +2 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/dialect/base.py +10 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/dialect/bigquery.py +17 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/parsing/common.py +79 -2
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/parsing/parse_engine.py +11 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/parsing/trilogy.lark +15 -8
- pytrilogy-0.0.3.36/trilogy/core/processing/node_generators/union_node.py +0 -75
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/LICENSE.md +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/README.md +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/pyproject.toml +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/pytrilogy.egg-info/SOURCES.txt +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/pytrilogy.egg-info/dependency_links.txt +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/pytrilogy.egg-info/entry_points.txt +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/pytrilogy.egg-info/requires.txt +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/pytrilogy.egg-info/top_level.txt +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/setup.cfg +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/setup.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_datatypes.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_declarations.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_derived_concepts.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_discovery_nodes.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_enums.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_environment.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_executor.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_imports.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_models.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_multi_join_assignments.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_parse_engine.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_parsing.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_partial_handling.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_query_processing.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_query_render.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_select.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_show.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_statements.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_undefined_concept.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_user_functions.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/tests/test_where_clause.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/authoring/__init__.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/compiler.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/constants.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/__init__.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/constants.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/env_processor.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/environment_helpers.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/ergonomics.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/exceptions.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/graph_models.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/internal.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/models/__init__.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/models/build_environment.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/models/core.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/models/datasource.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/models/environment.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/models/execute.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/optimization.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/optimizations/__init__.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/optimizations/base_optimization.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/optimizations/inline_constant.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/optimizations/inline_datasource.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/optimizations/predicate_pushdown.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/__init__.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/graph_utils.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/__init__.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/basic_node.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/common.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/group_node.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/multiselect_node.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/node_merge_node.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/rowset_node.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/select_merge_node.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/select_node.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/synonym_node.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/unnest_node.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/window_node.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/nodes/base_node.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/nodes/filter_node.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/nodes/group_node.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/nodes/select_node_v2.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/nodes/union_node.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/nodes/unnest_node.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/nodes/window_node.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/statements/__init__.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/statements/build.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/statements/common.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/statements/execute.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/dialect/__init__.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/dialect/common.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/dialect/config.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/dialect/dataframe.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/dialect/duckdb.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/dialect/enums.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/dialect/postgres.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/dialect/presto.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/dialect/snowflake.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/dialect/sql_server.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/engine.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/executor.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/hooks/__init__.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/hooks/base_hook.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/hooks/graph_hook.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/hooks/query_debugger.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/metadata/__init__.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/parser.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/parsing/__init__.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/parsing/config.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/parsing/exceptions.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/parsing/helpers.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/parsing/render.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/py.typed +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/render.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/scripts/__init__.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/scripts/trilogy.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/std/__init__.py +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/std/date.preql +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/std/display.preql +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/std/geography.preql +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/std/money.preql +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/std/report.preql +0 -0
- {pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/utility.py +0 -0
|
@@ -212,6 +212,7 @@ def test_math_functions(test_environment):
|
|
|
212
212
|
property order_add <- revenue + 2;
|
|
213
213
|
property order_id.order_nested <- revenue * 2/2;
|
|
214
214
|
property order_id.rounded <- round(revenue + 2.01,2);
|
|
215
|
+
constant random <- random(1);
|
|
215
216
|
select
|
|
216
217
|
order_id,
|
|
217
218
|
inflated_order_value,
|
|
@@ -219,7 +220,8 @@ def test_math_functions(test_environment):
|
|
|
219
220
|
fixed_order_value,
|
|
220
221
|
order_sub,
|
|
221
222
|
order_add,
|
|
222
|
-
rounded
|
|
223
|
+
rounded,
|
|
224
|
+
random,
|
|
223
225
|
;"""
|
|
224
226
|
env, parsed = parse(declarations, environment=test_environment)
|
|
225
227
|
select: SelectStatement = parsed[-1]
|
|
@@ -227,6 +229,22 @@ def test_math_functions(test_environment):
|
|
|
227
229
|
dialect.compile_statement(process_query(test_environment, select))
|
|
228
230
|
|
|
229
231
|
|
|
232
|
+
def test_random_randomness(test_environment):
|
|
233
|
+
declarations = """
|
|
234
|
+
auto x <- unnest([1,2,3,4]);
|
|
235
|
+
select
|
|
236
|
+
x,
|
|
237
|
+
random(x) -> random_1,
|
|
238
|
+
;"""
|
|
239
|
+
env, parsed = parse(declarations, environment=test_environment)
|
|
240
|
+
z = (
|
|
241
|
+
Dialects.DUCK_DB.default_executor(environment=test_environment)
|
|
242
|
+
.execute_query(parsed[-1])
|
|
243
|
+
.fetchall()
|
|
244
|
+
)
|
|
245
|
+
assert z[0].random_1 != z[1].random_1, f"{z[0].random_1} == {z[1].random_1}"
|
|
246
|
+
|
|
247
|
+
|
|
230
248
|
def test_string_functions(test_environment):
|
|
231
249
|
declarations = """
|
|
232
250
|
property test_name <- concat(category_name, '_test');
|
|
@@ -214,3 +214,24 @@ sum(
|
|
|
214
214
|
)
|
|
215
215
|
|
|
216
216
|
assert env.environment.concepts["total"].datatype.traits == ["money"]
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
def test_custom_trait_unnest_typing():
|
|
220
|
+
env = Dialects.DUCK_DB.default_executor()
|
|
221
|
+
env.environment.parse(
|
|
222
|
+
"""
|
|
223
|
+
import std.geography;
|
|
224
|
+
|
|
225
|
+
const array <- ['VT', 'MA', 'NY', 'CA']::array<string::us_state_short>;
|
|
226
|
+
|
|
227
|
+
"""
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
_ = env.execute_query(
|
|
231
|
+
"""
|
|
232
|
+
SELECT
|
|
233
|
+
unnest(array)->state;
|
|
234
|
+
"""
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
assert env.environment.concepts["state"].datatype.traits == ["us_state_short"]
|
|
@@ -191,6 +191,7 @@ FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
|
|
|
191
191
|
),
|
|
192
192
|
FunctionType.GROUP: FunctionConfig(
|
|
193
193
|
arg_count=-1,
|
|
194
|
+
output_type_function=lambda args: get_output_type_at_index(args, 0),
|
|
194
195
|
),
|
|
195
196
|
FunctionType.COUNT: FunctionConfig(
|
|
196
197
|
output_purpose=Purpose.METRIC,
|
|
@@ -620,6 +621,12 @@ FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
|
|
|
620
621
|
output_type=DataType.INTEGER,
|
|
621
622
|
arg_count=1,
|
|
622
623
|
),
|
|
624
|
+
FunctionType.RANDOM: FunctionConfig(
|
|
625
|
+
valid_inputs=[],
|
|
626
|
+
output_purpose=Purpose.PROPERTY,
|
|
627
|
+
output_type=DataType.FLOAT,
|
|
628
|
+
arg_count=1,
|
|
629
|
+
),
|
|
623
630
|
FunctionType.ROUND: FunctionConfig(
|
|
624
631
|
valid_inputs=[
|
|
625
632
|
{DataType.INTEGER, DataType.FLOAT, DataType.NUMBER, DataType.NUMERIC},
|
|
@@ -180,8 +180,14 @@ class UndefinedConcept(ConceptRef):
|
|
|
180
180
|
|
|
181
181
|
def address_with_namespace(address: str, namespace: str) -> str:
|
|
182
182
|
existing_ns = address.split(".", 1)[0]
|
|
183
|
+
if "." in address:
|
|
184
|
+
existing_name = address.split(".", 1)[1]
|
|
185
|
+
else:
|
|
186
|
+
existing_name = address
|
|
187
|
+
if existing_name == ALL_ROWS_CONCEPT:
|
|
188
|
+
return address
|
|
183
189
|
if existing_ns == DEFAULT_NAMESPACE:
|
|
184
|
-
return f"{namespace}.{
|
|
190
|
+
return f"{namespace}.{existing_name}"
|
|
185
191
|
return f"{namespace}.{address}"
|
|
186
192
|
|
|
187
193
|
|
|
@@ -1129,7 +1135,7 @@ class Concept(Addressable, DataTyped, ConceptArgs, Mergeable, Namespaced, BaseMo
|
|
|
1129
1135
|
return self.lineage.concept_arguments if self.lineage else []
|
|
1130
1136
|
|
|
1131
1137
|
@classmethod
|
|
1132
|
-
def calculate_derivation(self, lineage, purpose):
|
|
1138
|
+
def calculate_derivation(self, lineage, purpose: Purpose) -> Derivation:
|
|
1133
1139
|
from trilogy.core.models.build import (
|
|
1134
1140
|
BuildAggregateWrapper,
|
|
1135
1141
|
BuildFilterItem,
|
|
@@ -65,6 +65,7 @@ from trilogy.core.models.author import (
|
|
|
65
65
|
RowsetLineage,
|
|
66
66
|
SelectLineage,
|
|
67
67
|
SubselectComparison,
|
|
68
|
+
UndefinedConcept,
|
|
68
69
|
WhereClause,
|
|
69
70
|
WindowItem,
|
|
70
71
|
)
|
|
@@ -800,9 +801,6 @@ class BuildConcept(Addressable, BuildConceptArgs, DataTyped, BaseModel):
|
|
|
800
801
|
modifiers: List[Modifier] = Field(default_factory=list) # type: ignore
|
|
801
802
|
pseudonyms: set[str] = Field(default_factory=set)
|
|
802
803
|
|
|
803
|
-
def with_select_context(self, *args, **kwargs):
|
|
804
|
-
return self
|
|
805
|
-
|
|
806
804
|
@property
|
|
807
805
|
def is_aggregate(self) -> bool:
|
|
808
806
|
return self.build_is_aggregate
|
|
@@ -1503,7 +1501,7 @@ class Factory:
|
|
|
1503
1501
|
return base
|
|
1504
1502
|
|
|
1505
1503
|
@build.register
|
|
1506
|
-
def _(self, base: Function) -> BuildFunction:
|
|
1504
|
+
def _(self, base: Function) -> BuildFunction | BuildAggregateWrapper:
|
|
1507
1505
|
from trilogy.parsing.common import arbitrary_to_concept
|
|
1508
1506
|
|
|
1509
1507
|
raw_args: list[Concept | FuncArgs] = []
|
|
@@ -1517,6 +1515,54 @@ class Factory:
|
|
|
1517
1515
|
raw_args.append(narg)
|
|
1518
1516
|
else:
|
|
1519
1517
|
raw_args.append(arg)
|
|
1518
|
+
if base.operator == FunctionType.GROUP:
|
|
1519
|
+
group_base = raw_args[0]
|
|
1520
|
+
final_args: List[Concept | ConceptRef] = []
|
|
1521
|
+
if isinstance(group_base, ConceptRef):
|
|
1522
|
+
if group_base.address in self.environment.concepts and not isinstance(
|
|
1523
|
+
self.environment.concepts[group_base.address], UndefinedConcept
|
|
1524
|
+
):
|
|
1525
|
+
group_base = self.environment.concepts[group_base.address]
|
|
1526
|
+
if (
|
|
1527
|
+
isinstance(group_base, Concept)
|
|
1528
|
+
and isinstance(group_base.lineage, AggregateWrapper)
|
|
1529
|
+
and not group_base.lineage.by
|
|
1530
|
+
):
|
|
1531
|
+
arguments = raw_args[1:]
|
|
1532
|
+
for x in arguments:
|
|
1533
|
+
if isinstance(x, (ConceptRef, Concept)):
|
|
1534
|
+
final_args.append(x)
|
|
1535
|
+
elif isinstance(x, (AggregateWrapper, FilterItem, WindowItem)):
|
|
1536
|
+
newx = arbitrary_to_concept(
|
|
1537
|
+
x,
|
|
1538
|
+
environment=self.environment,
|
|
1539
|
+
)
|
|
1540
|
+
final_args.append(newx)
|
|
1541
|
+
else:
|
|
1542
|
+
# constants, etc, can be ignored for group
|
|
1543
|
+
continue
|
|
1544
|
+
group_base = group_base.model_copy(
|
|
1545
|
+
deep=True,
|
|
1546
|
+
update={
|
|
1547
|
+
"lineage": AggregateWrapper(
|
|
1548
|
+
function=group_base.lineage.function,
|
|
1549
|
+
by=final_args,
|
|
1550
|
+
)
|
|
1551
|
+
},
|
|
1552
|
+
)
|
|
1553
|
+
group_base = group_base.with_grain(
|
|
1554
|
+
Grain.from_concepts(final_args, environment=self.environment)
|
|
1555
|
+
)
|
|
1556
|
+
rval = self.build(group_base)
|
|
1557
|
+
return BuildFunction.model_construct(
|
|
1558
|
+
operator=base.operator,
|
|
1559
|
+
arguments=[rval, *[self.build(c) for c in raw_args[1:]]],
|
|
1560
|
+
output_datatype=base.output_datatype,
|
|
1561
|
+
output_purpose=base.output_purpose,
|
|
1562
|
+
valid_inputs=base.valid_inputs,
|
|
1563
|
+
arg_count=base.arg_count,
|
|
1564
|
+
)
|
|
1565
|
+
|
|
1520
1566
|
new = BuildFunction.model_construct(
|
|
1521
1567
|
operator=base.operator,
|
|
1522
1568
|
arguments=[self.build(c) for c in raw_args],
|
|
@@ -1568,7 +1614,10 @@ class Factory:
|
|
|
1568
1614
|
new_lineage, final_grain, _ = base.get_select_grain_and_keys(
|
|
1569
1615
|
self.grain, self.environment
|
|
1570
1616
|
)
|
|
1571
|
-
|
|
1617
|
+
if new_lineage:
|
|
1618
|
+
build_lineage = self.build(new_lineage)
|
|
1619
|
+
else:
|
|
1620
|
+
build_lineage = None
|
|
1572
1621
|
derivation = Concept.calculate_derivation(build_lineage, base.purpose)
|
|
1573
1622
|
granularity = Concept.calculate_granularity(
|
|
1574
1623
|
derivation, final_grain, build_lineage
|
|
@@ -1929,6 +1978,18 @@ class Factory:
|
|
|
1929
1978
|
def _(self, base: TraitDataType):
|
|
1930
1979
|
return base
|
|
1931
1980
|
|
|
1981
|
+
@build.register
|
|
1982
|
+
def _(self, base: ListType):
|
|
1983
|
+
return base
|
|
1984
|
+
|
|
1985
|
+
@build.register
|
|
1986
|
+
def _(self, base: StructType):
|
|
1987
|
+
return base
|
|
1988
|
+
|
|
1989
|
+
@build.register
|
|
1990
|
+
def _(self, base: MapType):
|
|
1991
|
+
return base
|
|
1992
|
+
|
|
1932
1993
|
@build.register
|
|
1933
1994
|
def _(self, base: Datasource):
|
|
1934
1995
|
local_cache: dict[str, BuildConcept] = {}
|
|
@@ -110,11 +110,14 @@ def get_priority_concept(
|
|
|
110
110
|
depth: int,
|
|
111
111
|
) -> BuildConcept:
|
|
112
112
|
# optimized search for missing concepts
|
|
113
|
-
pass_one =
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
113
|
+
pass_one = sorted(
|
|
114
|
+
[
|
|
115
|
+
c
|
|
116
|
+
for c in all_concepts
|
|
117
|
+
if c.address not in attempted_addresses and c.address not in found_concepts
|
|
118
|
+
],
|
|
119
|
+
key=lambda x: x.address,
|
|
120
|
+
)
|
|
118
121
|
# sometimes we need to scan intermediate concepts to get merge keys or filter keys,
|
|
119
122
|
# so do an exhaustive search
|
|
120
123
|
# pass_two = [c for c in all_concepts+filter_only if c.address not in attempted_addresses]
|
|
@@ -388,6 +391,9 @@ def generate_node(
|
|
|
388
391
|
# conditions=conditions,
|
|
389
392
|
)
|
|
390
393
|
else:
|
|
394
|
+
logger.info(
|
|
395
|
+
f"{depth_to_prefix(depth)}{LOGGER_PREFIX} skipping search, already in a recursion fot these concepts"
|
|
396
|
+
)
|
|
391
397
|
return None
|
|
392
398
|
return ConstantNode(
|
|
393
399
|
input_concepts=[],
|
|
@@ -453,9 +459,10 @@ def generate_node(
|
|
|
453
459
|
f"{depth_to_prefix(depth)}{LOGGER_PREFIX} including any filters, there are non-root concepts we should expand first: {non_root}. Recursing with all of these as mandatory"
|
|
454
460
|
)
|
|
455
461
|
|
|
456
|
-
if not history.check_started(
|
|
457
|
-
|
|
458
|
-
):
|
|
462
|
+
# if not history.check_started(
|
|
463
|
+
# root_targets, accept_partial=accept_partial, conditions=conditions
|
|
464
|
+
# ) or 1==1:
|
|
465
|
+
if True:
|
|
459
466
|
history.log_start(
|
|
460
467
|
root_targets, accept_partial=accept_partial, conditions=conditions
|
|
461
468
|
)
|
|
@@ -470,7 +477,10 @@ def generate_node(
|
|
|
470
477
|
# which we do whenever we hit a root node
|
|
471
478
|
# conditions=conditions,
|
|
472
479
|
)
|
|
473
|
-
|
|
480
|
+
else:
|
|
481
|
+
logger.info(
|
|
482
|
+
f"{depth_to_prefix(depth)}{LOGGER_PREFIX} skipping root search, already in a recursion for these concepts"
|
|
483
|
+
)
|
|
474
484
|
check = history.gen_select_node(
|
|
475
485
|
concept,
|
|
476
486
|
local_optional,
|
|
@@ -546,7 +556,10 @@ def generate_node(
|
|
|
546
556
|
f"{depth_to_prefix(depth)}{LOGGER_PREFIX} resolved concepts through synonyms"
|
|
547
557
|
)
|
|
548
558
|
return resolved
|
|
549
|
-
|
|
559
|
+
else:
|
|
560
|
+
logger.info(
|
|
561
|
+
f"{depth_to_prefix(depth)}{LOGGER_PREFIX} skipping synonym search, already in a recursion for these concepts"
|
|
562
|
+
)
|
|
550
563
|
return None
|
|
551
564
|
else:
|
|
552
565
|
raise ValueError(f"Unknown derivation {concept.derivation} on {concept}")
|
|
@@ -1041,7 +1054,6 @@ def _search_concepts(
|
|
|
1041
1054
|
environment=environment,
|
|
1042
1055
|
depth=depth,
|
|
1043
1056
|
)
|
|
1044
|
-
logger.info(f"gcheck result is {result}")
|
|
1045
1057
|
if result.required:
|
|
1046
1058
|
logger.info(
|
|
1047
1059
|
f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Adding group node"
|
|
@@ -1104,7 +1116,6 @@ def source_query_concepts(
|
|
|
1104
1116
|
history=history,
|
|
1105
1117
|
conditions=conditions,
|
|
1106
1118
|
)
|
|
1107
|
-
|
|
1108
1119
|
if not root:
|
|
1109
1120
|
error_strings = [
|
|
1110
1121
|
f"{c.address}<{c.purpose}>{c.derivation}>" for c in output_concepts
|
|
@@ -1130,5 +1141,4 @@ def source_query_concepts(
|
|
|
1130
1141
|
)
|
|
1131
1142
|
else:
|
|
1132
1143
|
candidate = root
|
|
1133
|
-
|
|
1134
1144
|
return candidate
|
{pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/filter_node.py
RENAMED
|
@@ -48,9 +48,10 @@ def gen_filter_node(
|
|
|
48
48
|
if isinstance(x.lineage, FILTER_TYPES):
|
|
49
49
|
if concept.lineage.where == where:
|
|
50
50
|
logger.info(
|
|
51
|
-
f"{padding(depth)}{LOGGER_PREFIX} fetching {x.lineage.content.address} as optional parent with same filter conditions "
|
|
51
|
+
f"{padding(depth)}{LOGGER_PREFIX} fetching {x.lineage.content.address} as optional parent from optional {x} with same filter conditions "
|
|
52
52
|
)
|
|
53
|
-
|
|
53
|
+
if x.lineage.content.address not in parent_row_concepts:
|
|
54
|
+
parent_row_concepts.append(x.lineage.content)
|
|
54
55
|
optional_included.append(x)
|
|
55
56
|
continue
|
|
56
57
|
if conditions and conditions == where:
|
{pytrilogy-0.0.3.36 → pytrilogy-0.0.3.38}/trilogy/core/processing/node_generators/group_to_node.py
RENAMED
|
@@ -3,6 +3,8 @@ from typing import List
|
|
|
3
3
|
from trilogy.constants import logger
|
|
4
4
|
from trilogy.core.models.build import BuildConcept, BuildFunction, BuildWhereClause
|
|
5
5
|
from trilogy.core.models.build_environment import BuildEnvironment
|
|
6
|
+
|
|
7
|
+
# C:\Users\ethan\coding_projects\pytrilogy\trilogy\core\processing\node_generators\group_to_node.py
|
|
6
8
|
from trilogy.core.processing.nodes import (
|
|
7
9
|
GroupNode,
|
|
8
10
|
History,
|
|
@@ -30,6 +32,7 @@ def gen_group_to_node(
|
|
|
30
32
|
f"Group to should have function lineage, is {type(concept.lineage)}"
|
|
31
33
|
)
|
|
32
34
|
group_arg = concept.lineage.arguments[0]
|
|
35
|
+
|
|
33
36
|
parent_concepts: List[BuildConcept] = concept.lineage.concept_arguments
|
|
34
37
|
logger.info(
|
|
35
38
|
f"{padding(depth)}{LOGGER_PREFIX} group by node has required parents {[x.address for x in parent_concepts]}"
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
from typing import List
|
|
2
|
+
|
|
3
|
+
from trilogy.constants import logger
|
|
4
|
+
from trilogy.core.enums import FunctionType
|
|
5
|
+
from trilogy.core.models.build import BuildConcept, BuildFunction, BuildWhereClause
|
|
6
|
+
from trilogy.core.processing.nodes import History, StrategyNode, UnionNode
|
|
7
|
+
from trilogy.core.processing.utility import padding
|
|
8
|
+
|
|
9
|
+
LOGGER_PREFIX = "[GEN_UNION_NODE]"
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def is_union(c: BuildConcept):
|
|
13
|
+
return (
|
|
14
|
+
isinstance(c.lineage, BuildFunction)
|
|
15
|
+
and c.lineage.operator == FunctionType.UNION
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def build_layers(
|
|
20
|
+
concepts: list[BuildConcept],
|
|
21
|
+
) -> tuple[list[list[BuildConcept]], list[BuildConcept]]:
|
|
22
|
+
sources = {
|
|
23
|
+
x.address: x.lineage.concept_arguments if x.lineage else [] for x in concepts
|
|
24
|
+
}
|
|
25
|
+
root = concepts[0]
|
|
26
|
+
|
|
27
|
+
built_layers = []
|
|
28
|
+
layers = root.lineage.concept_arguments if root.lineage else []
|
|
29
|
+
sourced = set()
|
|
30
|
+
while layers:
|
|
31
|
+
layer = []
|
|
32
|
+
current = layers.pop()
|
|
33
|
+
sourced.add(current.address)
|
|
34
|
+
layer.append(current)
|
|
35
|
+
for key, values in sources.items():
|
|
36
|
+
if key == current.address:
|
|
37
|
+
continue
|
|
38
|
+
for value in values:
|
|
39
|
+
if value.address in (current.keys or []) or current.address in (
|
|
40
|
+
value.keys or []
|
|
41
|
+
):
|
|
42
|
+
layer.append(value)
|
|
43
|
+
sourced.add(value.address)
|
|
44
|
+
built_layers.append(layer)
|
|
45
|
+
complete = [
|
|
46
|
+
x for x in concepts if all([x.address in sourced for x in sources[x.address]])
|
|
47
|
+
]
|
|
48
|
+
return built_layers, complete
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def gen_union_node(
|
|
52
|
+
concept: BuildConcept,
|
|
53
|
+
local_optional: List[BuildConcept],
|
|
54
|
+
environment,
|
|
55
|
+
g,
|
|
56
|
+
depth: int,
|
|
57
|
+
source_concepts,
|
|
58
|
+
history: History | None = None,
|
|
59
|
+
conditions: BuildWhereClause | None = None,
|
|
60
|
+
) -> StrategyNode | None:
|
|
61
|
+
all_unions = [x for x in local_optional if is_union(x)] + [concept]
|
|
62
|
+
logger.info(f"{padding(depth)}{LOGGER_PREFIX} found unions {all_unions}")
|
|
63
|
+
parent_nodes = []
|
|
64
|
+
layers, resolved = build_layers(all_unions)
|
|
65
|
+
for layer in layers:
|
|
66
|
+
logger.info(
|
|
67
|
+
f"{padding(depth)}{LOGGER_PREFIX} fetching layer {layer} with resolved {resolved}"
|
|
68
|
+
)
|
|
69
|
+
parent: StrategyNode = source_concepts(
|
|
70
|
+
mandatory_list=layer,
|
|
71
|
+
environment=environment,
|
|
72
|
+
g=g,
|
|
73
|
+
depth=depth + 1,
|
|
74
|
+
history=history,
|
|
75
|
+
conditions=conditions,
|
|
76
|
+
)
|
|
77
|
+
parent.hide_output_concepts(parent.output_concepts)
|
|
78
|
+
parent.add_output_concepts(resolved)
|
|
79
|
+
parent_nodes.append(parent)
|
|
80
|
+
if not parent:
|
|
81
|
+
logger.info(
|
|
82
|
+
f"{padding(depth)}{LOGGER_PREFIX} could not find union node parents"
|
|
83
|
+
)
|
|
84
|
+
return None
|
|
85
|
+
|
|
86
|
+
return UnionNode(
|
|
87
|
+
input_concepts=resolved,
|
|
88
|
+
output_concepts=resolved,
|
|
89
|
+
environment=environment,
|
|
90
|
+
parents=parent_nodes,
|
|
91
|
+
)
|
|
@@ -45,6 +45,11 @@ class History(BaseModel):
|
|
|
45
45
|
self.history[
|
|
46
46
|
self._concepts_to_lookup(search, accept_partial, conditions=conditions)
|
|
47
47
|
] = output
|
|
48
|
+
self.log_end(
|
|
49
|
+
search,
|
|
50
|
+
accept_partial=accept_partial,
|
|
51
|
+
conditions=conditions,
|
|
52
|
+
)
|
|
48
53
|
|
|
49
54
|
def get_history(
|
|
50
55
|
self,
|
|
@@ -83,6 +88,20 @@ class History(BaseModel):
|
|
|
83
88
|
)
|
|
84
89
|
)
|
|
85
90
|
|
|
91
|
+
def log_end(
|
|
92
|
+
self,
|
|
93
|
+
search: list[BuildConcept],
|
|
94
|
+
accept_partial: bool = False,
|
|
95
|
+
conditions: BuildWhereClause | None = None,
|
|
96
|
+
):
|
|
97
|
+
self.started.discard(
|
|
98
|
+
self._concepts_to_lookup(
|
|
99
|
+
search,
|
|
100
|
+
accept_partial=accept_partial,
|
|
101
|
+
conditions=conditions,
|
|
102
|
+
)
|
|
103
|
+
)
|
|
104
|
+
|
|
86
105
|
def check_started(
|
|
87
106
|
self,
|
|
88
107
|
search: list[BuildConcept],
|
|
@@ -272,6 +272,8 @@ class MergeNode(StrategyNode):
|
|
|
272
272
|
f"{self.logging_prefix}{LOGGER_PREFIX} Merge node has only one parent with the same"
|
|
273
273
|
" outputs as this merge node, dropping merge node "
|
|
274
274
|
)
|
|
275
|
+
# push up any conditions we need
|
|
276
|
+
final.ordering = self.ordering
|
|
275
277
|
return final
|
|
276
278
|
|
|
277
279
|
# if we have multiple candidates, see if one is good enough
|
|
@@ -293,6 +295,7 @@ class MergeNode(StrategyNode):
|
|
|
293
295
|
f" has all required output properties with partial {[c.address for c in dataset.partial_concepts]}"
|
|
294
296
|
f" and self has no conditions ({self.conditions})"
|
|
295
297
|
)
|
|
298
|
+
dataset.ordering = self.ordering
|
|
296
299
|
return dataset
|
|
297
300
|
|
|
298
301
|
pregrain = BuildGrain()
|
|
@@ -603,15 +603,29 @@ def find_nullable_concepts(
|
|
|
603
603
|
def sort_select_output_processed(
|
|
604
604
|
cte: CTE | UnionCTE, query: ProcessedQuery
|
|
605
605
|
) -> CTE | UnionCTE:
|
|
606
|
-
output_addresses = [
|
|
607
|
-
c.address for c in query.output_columns if c.address not in query.hidden_columns
|
|
608
|
-
]
|
|
606
|
+
output_addresses = [c.address for c in query.output_columns]
|
|
609
607
|
|
|
610
608
|
mapping = {x.address: x for x in cte.output_columns}
|
|
611
609
|
|
|
612
|
-
new_output = []
|
|
610
|
+
new_output: list[BuildConcept] = []
|
|
613
611
|
for x in output_addresses:
|
|
614
612
|
new_output.append(mapping[x])
|
|
613
|
+
|
|
614
|
+
for oc in cte.output_columns:
|
|
615
|
+
# add hidden back
|
|
616
|
+
if oc.address not in output_addresses:
|
|
617
|
+
new_output.append(oc)
|
|
618
|
+
|
|
619
|
+
cte.hidden_concepts = set(
|
|
620
|
+
[
|
|
621
|
+
c.address
|
|
622
|
+
for c in cte.output_columns
|
|
623
|
+
if (
|
|
624
|
+
c.address not in query.output_columns
|
|
625
|
+
or c.address in query.hidden_columns
|
|
626
|
+
)
|
|
627
|
+
]
|
|
628
|
+
)
|
|
615
629
|
cte.output_columns = new_output
|
|
616
630
|
return cte
|
|
617
631
|
|
|
@@ -619,18 +633,28 @@ def sort_select_output_processed(
|
|
|
619
633
|
def sort_select_output(
|
|
620
634
|
cte: CTE | UnionCTE, query: SelectStatement | MultiSelectStatement | ProcessedQuery
|
|
621
635
|
) -> CTE | UnionCTE:
|
|
636
|
+
|
|
622
637
|
if isinstance(query, ProcessedQuery):
|
|
623
638
|
return sort_select_output_processed(cte, query)
|
|
639
|
+
|
|
624
640
|
output_addresses = [
|
|
625
641
|
c.address
|
|
626
642
|
for c in query.output_components
|
|
627
|
-
if c.address not in query.hidden_components
|
|
643
|
+
# if c.address not in query.hidden_components
|
|
628
644
|
]
|
|
629
645
|
|
|
630
646
|
mapping = {x.address: x for x in cte.output_columns}
|
|
631
647
|
|
|
632
|
-
new_output = []
|
|
648
|
+
new_output: list[BuildConcept] = []
|
|
633
649
|
for x in output_addresses:
|
|
634
650
|
new_output.append(mapping[x])
|
|
635
651
|
cte.output_columns = new_output
|
|
652
|
+
cte.hidden_concepts = set(
|
|
653
|
+
[
|
|
654
|
+
c.address
|
|
655
|
+
for c in query.output_components
|
|
656
|
+
if c.address in query.hidden_components
|
|
657
|
+
]
|
|
658
|
+
)
|
|
659
|
+
|
|
636
660
|
return cte
|
|
@@ -431,7 +431,9 @@ def get_query_datasources(
|
|
|
431
431
|
hooks: Optional[List[BaseHook]] = None,
|
|
432
432
|
) -> QueryDatasource:
|
|
433
433
|
ds = get_query_node(environment, statement.as_lineage(environment))
|
|
434
|
+
|
|
434
435
|
final_qds = ds.resolve()
|
|
436
|
+
|
|
435
437
|
if hooks:
|
|
436
438
|
for hook in hooks:
|
|
437
439
|
hook.process_root_strategy_node(ds)
|
|
@@ -510,6 +512,7 @@ def process_query(
|
|
|
510
512
|
hook.process_root_datasource(root_datasource)
|
|
511
513
|
# this should always return 1 - TODO, refactor
|
|
512
514
|
root_cte = datasource_to_cte(root_datasource, environment.cte_name_map)
|
|
515
|
+
|
|
513
516
|
for hook in hooks:
|
|
514
517
|
hook.process_root_cte(root_cte)
|
|
515
518
|
raw_ctes: List[CTE | UnionCTE] = list(reversed(flatten_ctes(root_cte)))
|
|
@@ -135,6 +135,7 @@ class SelectStatement(HasUUID, SelectTypeMixin, BaseModel):
|
|
|
135
135
|
)
|
|
136
136
|
|
|
137
137
|
output.grain = output.calculate_grain(environment)
|
|
138
|
+
|
|
138
139
|
for x in selection:
|
|
139
140
|
if x.is_undefined and environment.concepts.fail_on_missing:
|
|
140
141
|
environment.concepts.raise_undefined(
|
|
@@ -166,6 +167,7 @@ class SelectStatement(HasUUID, SelectTypeMixin, BaseModel):
|
|
|
166
167
|
targets = []
|
|
167
168
|
for x in self.selection:
|
|
168
169
|
targets.append(x.concept)
|
|
170
|
+
|
|
169
171
|
result = Grain.from_concepts(
|
|
170
172
|
targets, where_clause=self.where_clause, environment=environment
|
|
171
173
|
)
|
|
@@ -132,6 +132,11 @@ DATATYPE_MAP: dict[DataType, str] = {
|
|
|
132
132
|
DataType.MAP: "map",
|
|
133
133
|
DataType.DATE: "date",
|
|
134
134
|
DataType.DATETIME: "datetime",
|
|
135
|
+
DataType.LIST: "list",
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
COMPLEX_DATATYPE_MAP = {
|
|
139
|
+
DataType.LIST: lambda x: f"{x}[]",
|
|
135
140
|
}
|
|
136
141
|
|
|
137
142
|
|
|
@@ -166,12 +171,14 @@ FUNCTION_MAP = {
|
|
|
166
171
|
FunctionType.DATETIME_LITERAL: lambda x: f"datetime '{x}'",
|
|
167
172
|
# math
|
|
168
173
|
FunctionType.ADD: lambda x: " + ".join(x),
|
|
174
|
+
FunctionType.ABS: lambda x: f"abs({x[0]})",
|
|
169
175
|
FunctionType.SUBTRACT: lambda x: " - ".join(x),
|
|
170
176
|
FunctionType.DIVIDE: lambda x: " / ".join(x),
|
|
171
177
|
FunctionType.MULTIPLY: lambda x: " * ".join(x),
|
|
172
178
|
FunctionType.ROUND: lambda x: f"round({x[0]},{x[1]})",
|
|
173
179
|
FunctionType.MOD: lambda x: f"({x[0]} % {x[1]})",
|
|
174
180
|
FunctionType.SQRT: lambda x: f"sqrt({x[0]})",
|
|
181
|
+
FunctionType.RANDOM: lambda x: "random()",
|
|
175
182
|
# aggregate types
|
|
176
183
|
FunctionType.COUNT_DISTINCT: lambda x: f"count(distinct {x[0]})",
|
|
177
184
|
FunctionType.COUNT: lambda x: f"count({x[0]})",
|
|
@@ -283,6 +290,7 @@ class BaseDialect:
|
|
|
283
290
|
QUOTE_CHARACTER = "`"
|
|
284
291
|
SQL_TEMPLATE = GENERIC_SQL_TEMPLATE
|
|
285
292
|
DATATYPE_MAP = DATATYPE_MAP
|
|
293
|
+
COMPLEX_DATATYPE_MAP = COMPLEX_DATATYPE_MAP
|
|
286
294
|
UNNEST_MODE = UnnestMode.CROSS_APPLY
|
|
287
295
|
|
|
288
296
|
def __init__(self, rendering: Rendering | None = None):
|
|
@@ -682,6 +690,8 @@ class BaseDialect:
|
|
|
682
690
|
return self.FUNCTION_MAP[FunctionType.DATETIME_LITERAL](e)
|
|
683
691
|
elif isinstance(e, TraitDataType):
|
|
684
692
|
return self.render_expr(e.type, cte=cte, cte_map=cte_map)
|
|
693
|
+
elif isinstance(e, ListType):
|
|
694
|
+
return f"{self.COMPLEX_DATATYPE_MAP[DataType.LIST](self.render_expr(e.value_data_type, cte=cte, cte_map=cte_map))}"
|
|
685
695
|
else:
|
|
686
696
|
raise ValueError(f"Unable to render type {type(e)} {e}")
|
|
687
697
|
|