pytrilogy 0.0.3.64__tar.gz → 0.0.3.66__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pytrilogy might be problematic. Click here for more details.
- {pytrilogy-0.0.3.64/pytrilogy.egg-info → pytrilogy-0.0.3.66}/PKG-INFO +1 -1
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66/pytrilogy.egg-info}/PKG-INFO +1 -1
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_parsing.py +44 -1
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/__init__.py +1 -1
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/graph_models.py +45 -1
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/models/author.py +17 -4
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/models/build.py +4 -1
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/models/environment.py +15 -11
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/models/execute.py +30 -58
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/node_merge_node.py +4 -1
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +3 -1
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/select_merge_node.py +40 -52
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/query_processor.py +3 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/dialect/base.py +5 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/hooks/graph_hook.py +0 -1
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/parsing/common.py +28 -3
- pytrilogy-0.0.3.66/trilogy/std/display.preql +6 -0
- pytrilogy-0.0.3.64/trilogy/std/display.preql +0 -3
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/LICENSE.md +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/README.md +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/pyproject.toml +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/pytrilogy.egg-info/SOURCES.txt +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/pytrilogy.egg-info/dependency_links.txt +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/pytrilogy.egg-info/entry_points.txt +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/pytrilogy.egg-info/requires.txt +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/pytrilogy.egg-info/top_level.txt +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/setup.cfg +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/setup.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_datatypes.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_declarations.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_derived_concepts.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_discovery_nodes.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_enums.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_environment.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_executor.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_failure.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_functions.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_imports.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_metadata.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_models.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_multi_join_assignments.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_parse_engine.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_parsing_failures.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_partial_handling.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_query_processing.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_query_render.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_select.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_show.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_statements.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_typing.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_undefined_concept.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_user_functions.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/tests/test_where_clause.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/authoring/__init__.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/compiler.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/constants.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/__init__.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/constants.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/enums.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/env_processor.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/environment_helpers.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/ergonomics.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/exceptions.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/functions.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/internal.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/models/__init__.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/models/build_environment.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/models/core.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/models/datasource.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/optimization.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/optimizations/__init__.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/optimizations/base_optimization.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/optimizations/inline_datasource.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/optimizations/predicate_pushdown.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/__init__.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/concept_strategies_v3.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/discovery_loop.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/discovery_node_factory.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/discovery_utility.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/discovery_validation.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/graph_utils.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/__init__.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/basic_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/common.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/filter_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/group_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/group_to_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/multiselect_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/recursive_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/rowset_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/select_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/synonym_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/union_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/unnest_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/window_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/nodes/__init__.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/nodes/base_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/nodes/filter_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/nodes/group_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/nodes/merge_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/nodes/recursive_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/nodes/select_node_v2.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/nodes/union_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/nodes/unnest_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/nodes/window_node.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/utility.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/statements/__init__.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/statements/author.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/statements/build.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/statements/common.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/statements/execute.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/utility.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/dialect/__init__.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/dialect/bigquery.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/dialect/common.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/dialect/config.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/dialect/dataframe.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/dialect/duckdb.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/dialect/enums.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/dialect/postgres.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/dialect/presto.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/dialect/snowflake.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/dialect/sql_server.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/engine.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/executor.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/hooks/__init__.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/hooks/base_hook.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/hooks/query_debugger.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/metadata/__init__.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/parser.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/parsing/__init__.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/parsing/config.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/parsing/exceptions.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/parsing/helpers.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/parsing/parse_engine.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/parsing/render.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/parsing/trilogy.lark +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/py.typed +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/render.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/scripts/__init__.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/scripts/trilogy.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/std/__init__.py +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/std/date.preql +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/std/geography.preql +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/std/money.preql +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/std/net.preql +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/std/ranking.preql +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/std/report.preql +0 -0
- {pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/utility.py +0 -0
|
@@ -2,7 +2,7 @@ from trilogy import Dialects
|
|
|
2
2
|
from trilogy.constants import MagicConstants
|
|
3
3
|
from trilogy.core.enums import BooleanOperator, ComparisonOperator, Purpose
|
|
4
4
|
from trilogy.core.functions import argument_to_purpose, function_args_to_output_purpose
|
|
5
|
-
from trilogy.core.models.author import Comparison
|
|
5
|
+
from trilogy.core.models.author import Comparison, Conditional, SubselectComparison
|
|
6
6
|
from trilogy.core.models.core import (
|
|
7
7
|
DataType,
|
|
8
8
|
TupleWrapper,
|
|
@@ -16,6 +16,7 @@ from trilogy.core.models.environment import (
|
|
|
16
16
|
from trilogy.core.statements.author import SelectStatement, ShowStatement
|
|
17
17
|
from trilogy.core.statements.execute import ProcessedQuery
|
|
18
18
|
from trilogy.dialect.base import BaseDialect
|
|
19
|
+
from trilogy.parsing.common import atom_is_relevant
|
|
19
20
|
from trilogy.parsing.parse_engine import (
|
|
20
21
|
arg_to_datatype,
|
|
21
22
|
parse_text,
|
|
@@ -685,3 +686,45 @@ select x % 10 -> x_mod_10;
|
|
|
685
686
|
|
|
686
687
|
"""
|
|
687
688
|
)
|
|
689
|
+
|
|
690
|
+
|
|
691
|
+
def test_is_atom():
|
|
692
|
+
env = Environment()
|
|
693
|
+
|
|
694
|
+
env.parse(
|
|
695
|
+
"""
|
|
696
|
+
key x int;
|
|
697
|
+
auto x_sum <- sum(x);
|
|
698
|
+
"""
|
|
699
|
+
)
|
|
700
|
+
|
|
701
|
+
assert not atom_is_relevant(
|
|
702
|
+
Comparison(left=env.concepts["x_sum"], right=0, operator=ComparisonOperator.GT),
|
|
703
|
+
[env.concepts["x"]],
|
|
704
|
+
env,
|
|
705
|
+
)
|
|
706
|
+
assert atom_is_relevant(
|
|
707
|
+
Comparison(left=env.concepts["x"], right=0, operator=ComparisonOperator.GT),
|
|
708
|
+
[env.concepts["x_sum"]],
|
|
709
|
+
env,
|
|
710
|
+
)
|
|
711
|
+
|
|
712
|
+
assert not atom_is_relevant(
|
|
713
|
+
Conditional(left=env.concepts["x_sum"], right=0, operator=BooleanOperator.AND),
|
|
714
|
+
[env.concepts["x"]],
|
|
715
|
+
env,
|
|
716
|
+
)
|
|
717
|
+
assert atom_is_relevant(
|
|
718
|
+
Conditional(left=env.concepts["x"], right=0, operator=BooleanOperator.AND),
|
|
719
|
+
[env.concepts["x_sum"]],
|
|
720
|
+
env,
|
|
721
|
+
)
|
|
722
|
+
assert not atom_is_relevant(
|
|
723
|
+
SubselectComparison(
|
|
724
|
+
left=env.concepts["x_sum"],
|
|
725
|
+
right=env.concepts["x"],
|
|
726
|
+
operator=ComparisonOperator.IN,
|
|
727
|
+
),
|
|
728
|
+
[env.concepts["x_sum"]],
|
|
729
|
+
env,
|
|
730
|
+
)
|
|
@@ -1,6 +1,50 @@
|
|
|
1
1
|
import networkx as nx
|
|
2
2
|
|
|
3
|
-
from trilogy.core.models.build import BuildConcept, BuildDatasource
|
|
3
|
+
from trilogy.core.models.build import BuildConcept, BuildDatasource, BuildWhereClause
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def get_graph_exact_match(
|
|
7
|
+
g: nx.DiGraph, conditions: BuildWhereClause | None
|
|
8
|
+
) -> set[str]:
|
|
9
|
+
datasources: dict[str, BuildDatasource | list[BuildDatasource]] = (
|
|
10
|
+
nx.get_node_attributes(g, "datasource")
|
|
11
|
+
)
|
|
12
|
+
exact: set[str] = set()
|
|
13
|
+
for node in g.nodes:
|
|
14
|
+
if node in datasources:
|
|
15
|
+
ds = datasources[node]
|
|
16
|
+
if isinstance(ds, list):
|
|
17
|
+
exact.add(node)
|
|
18
|
+
continue
|
|
19
|
+
|
|
20
|
+
if not conditions and not ds.non_partial_for:
|
|
21
|
+
exact.add(node)
|
|
22
|
+
continue
|
|
23
|
+
elif conditions:
|
|
24
|
+
if not ds.non_partial_for:
|
|
25
|
+
continue
|
|
26
|
+
if ds.non_partial_for and conditions == ds.non_partial_for:
|
|
27
|
+
exact.add(node)
|
|
28
|
+
continue
|
|
29
|
+
else:
|
|
30
|
+
continue
|
|
31
|
+
|
|
32
|
+
return exact
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def prune_sources_for_conditions(
|
|
36
|
+
g: nx.DiGraph,
|
|
37
|
+
conditions: BuildWhereClause | None,
|
|
38
|
+
):
|
|
39
|
+
|
|
40
|
+
complete = get_graph_exact_match(g, conditions)
|
|
41
|
+
to_remove = []
|
|
42
|
+
for node in g.nodes:
|
|
43
|
+
if node.startswith("ds~") and node not in complete:
|
|
44
|
+
to_remove.append(node)
|
|
45
|
+
|
|
46
|
+
for node in to_remove:
|
|
47
|
+
g.remove_node(node)
|
|
4
48
|
|
|
5
49
|
|
|
6
50
|
def concept_to_node(input: BuildConcept) -> str:
|
|
@@ -271,6 +271,20 @@ class Conditional(Mergeable, ConceptArgs, Namespaced, DataTyped, BaseModel):
|
|
|
271
271
|
right: Expr
|
|
272
272
|
operator: BooleanOperator
|
|
273
273
|
|
|
274
|
+
@field_validator("left", mode="before")
|
|
275
|
+
@classmethod
|
|
276
|
+
def left_validator(cls, v, info: ValidationInfo):
|
|
277
|
+
if isinstance(v, Concept):
|
|
278
|
+
return v.reference
|
|
279
|
+
return v
|
|
280
|
+
|
|
281
|
+
@field_validator("right", mode="before")
|
|
282
|
+
@classmethod
|
|
283
|
+
def right_validator(cls, v, info: ValidationInfo):
|
|
284
|
+
if isinstance(v, Concept):
|
|
285
|
+
return v.reference
|
|
286
|
+
return v
|
|
287
|
+
|
|
274
288
|
def __add__(self, other) -> "Conditional":
|
|
275
289
|
if other is None:
|
|
276
290
|
return self
|
|
@@ -346,7 +360,6 @@ class Conditional(Mergeable, ConceptArgs, Namespaced, DataTyped, BaseModel):
|
|
|
346
360
|
|
|
347
361
|
@property
|
|
348
362
|
def concept_arguments(self) -> Sequence[ConceptRef]:
|
|
349
|
-
"""Return concepts directly referenced in where clause"""
|
|
350
363
|
output = []
|
|
351
364
|
output += get_concept_arguments(self.left)
|
|
352
365
|
output += get_concept_arguments(self.right)
|
|
@@ -575,11 +588,11 @@ class Comparison(ConceptArgs, Mergeable, DataTyped, Namespaced, BaseModel):
|
|
|
575
588
|
date,
|
|
576
589
|
Function,
|
|
577
590
|
ConceptRef,
|
|
578
|
-
|
|
591
|
+
Conditional,
|
|
579
592
|
DataType,
|
|
580
|
-
|
|
593
|
+
Comparison,
|
|
581
594
|
FunctionCallWrapper,
|
|
582
|
-
|
|
595
|
+
Parenthetical,
|
|
583
596
|
MagicConstants,
|
|
584
597
|
WindowItem,
|
|
585
598
|
AggregateWrapper,
|
|
@@ -1631,7 +1631,10 @@ class Factory:
|
|
|
1631
1631
|
|
|
1632
1632
|
@build.register
|
|
1633
1633
|
def _(self, base: CaseElse) -> BuildCaseElse:
|
|
1634
|
-
|
|
1634
|
+
expr: Concept | FuncArgs = base.expr
|
|
1635
|
+
if isinstance(expr, (AggregateWrapper, FilterItem, WindowItem)):
|
|
1636
|
+
expr, _ = self.instantiate_concept(expr)
|
|
1637
|
+
return BuildCaseElse.model_construct(expr=self.build(expr))
|
|
1635
1638
|
|
|
1636
1639
|
@build.register
|
|
1637
1640
|
def _(self, base: Concept) -> BuildConcept:
|
|
@@ -603,7 +603,7 @@ class Environment(BaseModel):
|
|
|
603
603
|
# too hacky for maintainability
|
|
604
604
|
if current_derivation not in (Derivation.ROOT, Derivation.CONSTANT):
|
|
605
605
|
logger.info(
|
|
606
|
-
f"A datasource has been added which will persist derived concept {new_persisted_concept.address}"
|
|
606
|
+
f"A datasource has been added which will persist derived concept {new_persisted_concept.address} with derivation {current_derivation}"
|
|
607
607
|
)
|
|
608
608
|
persisted = f"{PERSISTED_CONCEPT_PREFIX}_" + new_persisted_concept.name
|
|
609
609
|
# override the current concept source to reflect that it's now coming from a datasource
|
|
@@ -622,17 +622,21 @@ class Environment(BaseModel):
|
|
|
622
622
|
meta=meta,
|
|
623
623
|
force=True,
|
|
624
624
|
)
|
|
625
|
+
base = {
|
|
626
|
+
"lineage": None,
|
|
627
|
+
"metadata": new_persisted_concept.metadata.model_copy(
|
|
628
|
+
update={"concept_source": ConceptSource.PERSIST_STATEMENT}
|
|
629
|
+
),
|
|
630
|
+
"derivation": Derivation.ROOT,
|
|
631
|
+
"purpose": new_persisted_concept.purpose,
|
|
632
|
+
}
|
|
633
|
+
# purpose is used in derivation calculation
|
|
634
|
+
# which should be fixed, but we'll do in a followup
|
|
635
|
+
# so override here
|
|
636
|
+
if new_persisted_concept.purpose == Purpose.CONSTANT:
|
|
637
|
+
base["purpose"] = Purpose.KEY
|
|
625
638
|
new_persisted_concept = new_persisted_concept.model_copy(
|
|
626
|
-
deep=True,
|
|
627
|
-
update={
|
|
628
|
-
"lineage": None,
|
|
629
|
-
"metadata": new_persisted_concept.metadata.model_copy(
|
|
630
|
-
update={
|
|
631
|
-
"concept_source": ConceptSource.PERSIST_STATEMENT
|
|
632
|
-
}
|
|
633
|
-
),
|
|
634
|
-
"derivation": Derivation.ROOT,
|
|
635
|
-
},
|
|
639
|
+
deep=True, update=base
|
|
636
640
|
)
|
|
637
641
|
self.add_concept(
|
|
638
642
|
new_persisted_concept,
|
|
@@ -56,6 +56,12 @@ LOGGER_PREFIX = "[MODELS_EXECUTE]"
|
|
|
56
56
|
DATASOURCE_TYPES = (BuildDatasource, BuildDatasource)
|
|
57
57
|
|
|
58
58
|
|
|
59
|
+
class InlinedCTE(BaseModel):
|
|
60
|
+
original_alias: str
|
|
61
|
+
new_alias: str
|
|
62
|
+
new_base: str
|
|
63
|
+
|
|
64
|
+
|
|
59
65
|
class CTE(BaseModel):
|
|
60
66
|
name: str
|
|
61
67
|
source: "QueryDatasource"
|
|
@@ -78,6 +84,7 @@ class CTE(BaseModel):
|
|
|
78
84
|
limit: Optional[int] = None
|
|
79
85
|
base_name_override: Optional[str] = None
|
|
80
86
|
base_alias_override: Optional[str] = None
|
|
87
|
+
inlined_ctes: dict[str, InlinedCTE] = Field(default_factory=dict)
|
|
81
88
|
|
|
82
89
|
@field_validator("join_derived_concepts")
|
|
83
90
|
def validate_join_derived_concepts(cls, v):
|
|
@@ -104,62 +111,6 @@ class CTE(BaseModel):
|
|
|
104
111
|
def validate_output_columns(cls, v):
|
|
105
112
|
return unique(v, "address")
|
|
106
113
|
|
|
107
|
-
def inline_constant(self, concept: BuildConcept):
|
|
108
|
-
if not concept.derivation == Derivation.CONSTANT:
|
|
109
|
-
return False
|
|
110
|
-
if not isinstance(concept.lineage, BuildFunction):
|
|
111
|
-
return False
|
|
112
|
-
if not concept.lineage.operator == FunctionType.CONSTANT:
|
|
113
|
-
return False
|
|
114
|
-
# remove the constant
|
|
115
|
-
removed: set = set()
|
|
116
|
-
if concept.address in self.source_map:
|
|
117
|
-
removed = removed.union(self.source_map[concept.address])
|
|
118
|
-
del self.source_map[concept.address]
|
|
119
|
-
|
|
120
|
-
if self.condition:
|
|
121
|
-
self.condition = self.condition.inline_constant(concept)
|
|
122
|
-
# if we've entirely removed the need to join to someplace to get the concept
|
|
123
|
-
# drop the join as well.
|
|
124
|
-
for removed_cte in removed:
|
|
125
|
-
still_required = any(
|
|
126
|
-
[
|
|
127
|
-
removed_cte in x
|
|
128
|
-
for x in self.source_map.values()
|
|
129
|
-
or self.existence_source_map.values()
|
|
130
|
-
]
|
|
131
|
-
)
|
|
132
|
-
if not still_required:
|
|
133
|
-
self.joins = [
|
|
134
|
-
join
|
|
135
|
-
for join in self.joins
|
|
136
|
-
if not isinstance(join, Join)
|
|
137
|
-
or (
|
|
138
|
-
isinstance(join, Join)
|
|
139
|
-
and (
|
|
140
|
-
join.right_cte.name != removed_cte
|
|
141
|
-
and any(
|
|
142
|
-
[
|
|
143
|
-
x.cte.name != removed_cte
|
|
144
|
-
for x in (join.joinkey_pairs or [])
|
|
145
|
-
]
|
|
146
|
-
)
|
|
147
|
-
)
|
|
148
|
-
)
|
|
149
|
-
]
|
|
150
|
-
for join in self.joins:
|
|
151
|
-
if isinstance(join, UnnestJoin) and concept in join.concepts:
|
|
152
|
-
join.rendering_required = False
|
|
153
|
-
|
|
154
|
-
self.parent_ctes = [
|
|
155
|
-
x for x in self.parent_ctes if x.name != removed_cte
|
|
156
|
-
]
|
|
157
|
-
if removed_cte == self.base_name_override:
|
|
158
|
-
candidates = [x.name for x in self.parent_ctes]
|
|
159
|
-
self.base_name_override = candidates[0] if candidates else None
|
|
160
|
-
self.base_alias_override = candidates[0] if candidates else None
|
|
161
|
-
return True
|
|
162
|
-
|
|
163
114
|
@property
|
|
164
115
|
def comment(self) -> str:
|
|
165
116
|
base = f"Target: {str(self.grain)}. Group: {self.group_to_grain}"
|
|
@@ -240,7 +191,18 @@ class CTE(BaseModel):
|
|
|
240
191
|
]
|
|
241
192
|
elif v == parent.safe_identifier:
|
|
242
193
|
self.source_map[k] = [ds_being_inlined.safe_identifier]
|
|
243
|
-
|
|
194
|
+
for k, v in self.existence_source_map.items():
|
|
195
|
+
if isinstance(v, list):
|
|
196
|
+
self.existence_source_map[k] = [
|
|
197
|
+
(
|
|
198
|
+
ds_being_inlined.safe_identifier
|
|
199
|
+
if x == parent.safe_identifier
|
|
200
|
+
else x
|
|
201
|
+
)
|
|
202
|
+
for x in v
|
|
203
|
+
]
|
|
204
|
+
elif v == parent.safe_identifier:
|
|
205
|
+
self.existence_source_map[k] = [ds_being_inlined.safe_identifier]
|
|
244
206
|
# zip in any required values for lookups
|
|
245
207
|
for k in ds_being_inlined.output_lcl.addresses:
|
|
246
208
|
if k in self.source_map and self.source_map[k]:
|
|
@@ -251,6 +213,11 @@ class CTE(BaseModel):
|
|
|
251
213
|
]
|
|
252
214
|
if force_group:
|
|
253
215
|
self.group_to_grain = True
|
|
216
|
+
self.inlined_ctes[ds_being_inlined.safe_identifier] = InlinedCTE(
|
|
217
|
+
original_alias=parent.name,
|
|
218
|
+
new_alias=ds_being_inlined.safe_identifier,
|
|
219
|
+
new_base=ds_being_inlined.safe_location,
|
|
220
|
+
)
|
|
254
221
|
return True
|
|
255
222
|
|
|
256
223
|
def __add__(self, other: "CTE" | "UnionCTE"):
|
|
@@ -303,6 +270,10 @@ class CTE(BaseModel):
|
|
|
303
270
|
**self.existence_source_map,
|
|
304
271
|
**other.existence_source_map,
|
|
305
272
|
}
|
|
273
|
+
self.inlined_ctes = {
|
|
274
|
+
**self.inlined_ctes,
|
|
275
|
+
**other.inlined_ctes,
|
|
276
|
+
}
|
|
306
277
|
|
|
307
278
|
return self
|
|
308
279
|
|
|
@@ -672,7 +643,7 @@ class QueryDatasource(BaseModel):
|
|
|
672
643
|
and CONFIG.validate_missing
|
|
673
644
|
):
|
|
674
645
|
raise SyntaxError(
|
|
675
|
-
f"
|
|
646
|
+
f"Missing source map entry for {concept.address} on {key} with pseudonyms {concept.pseudonyms}, have map: {v}"
|
|
676
647
|
)
|
|
677
648
|
return v
|
|
678
649
|
|
|
@@ -1057,6 +1028,7 @@ class UnionCTE(BaseModel):
|
|
|
1057
1028
|
hidden_concepts: set[str] = Field(default_factory=set)
|
|
1058
1029
|
partial_concepts: list[BuildConcept] = Field(default_factory=list)
|
|
1059
1030
|
existence_source_map: Dict[str, list[str]] = Field(default_factory=dict)
|
|
1031
|
+
inlined_ctes: Dict[str, InlinedCTE] = Field(default_factory=dict)
|
|
1060
1032
|
|
|
1061
1033
|
@computed_field # type: ignore
|
|
1062
1034
|
@property
|
{pytrilogy-0.0.3.64 → pytrilogy-0.0.3.66}/trilogy/core/processing/node_generators/node_merge_node.py
RENAMED
|
@@ -6,7 +6,7 @@ from networkx.algorithms import approximation as ax
|
|
|
6
6
|
from trilogy.constants import logger
|
|
7
7
|
from trilogy.core.enums import Derivation
|
|
8
8
|
from trilogy.core.exceptions import AmbiguousRelationshipResolutionException
|
|
9
|
-
from trilogy.core.graph_models import concept_to_node
|
|
9
|
+
from trilogy.core.graph_models import concept_to_node, prune_sources_for_conditions
|
|
10
10
|
from trilogy.core.models.build import BuildConcept, BuildConditional, BuildWhereClause
|
|
11
11
|
from trilogy.core.models.build_environment import BuildEnvironment
|
|
12
12
|
from trilogy.core.processing.nodes import History, MergeNode, StrategyNode
|
|
@@ -222,10 +222,12 @@ def resolve_weak_components(
|
|
|
222
222
|
environment_graph: nx.DiGraph,
|
|
223
223
|
filter_downstream: bool = True,
|
|
224
224
|
accept_partial: bool = False,
|
|
225
|
+
search_conditions: BuildWhereClause | None = None,
|
|
225
226
|
) -> list[list[BuildConcept]] | None:
|
|
226
227
|
break_flag = False
|
|
227
228
|
found = []
|
|
228
229
|
search_graph = environment_graph.copy()
|
|
230
|
+
prune_sources_for_conditions(search_graph, conditions=search_conditions)
|
|
229
231
|
reduced_concept_sets: list[set[str]] = []
|
|
230
232
|
|
|
231
233
|
# loop through, removing new nodes we find
|
|
@@ -406,6 +408,7 @@ def gen_merge_node(
|
|
|
406
408
|
g,
|
|
407
409
|
filter_downstream=filter_downstream,
|
|
408
410
|
accept_partial=accept_partial,
|
|
411
|
+
search_conditions=search_conditions,
|
|
409
412
|
)
|
|
410
413
|
if not weak_resolve:
|
|
411
414
|
logger.info(
|
|
@@ -169,7 +169,9 @@ def is_fully_covered(
|
|
|
169
169
|
return current_end >= end
|
|
170
170
|
|
|
171
171
|
|
|
172
|
-
def get_union_sources(
|
|
172
|
+
def get_union_sources(
|
|
173
|
+
datasources: list[BuildDatasource], concepts: list[BuildConcept]
|
|
174
|
+
) -> List[list[BuildDatasource]]:
|
|
173
175
|
candidates: list[BuildDatasource] = []
|
|
174
176
|
for x in datasources:
|
|
175
177
|
if all([c.address in x.output_concepts for c in concepts]):
|
|
@@ -5,7 +5,11 @@ import networkx as nx
|
|
|
5
5
|
|
|
6
6
|
from trilogy.constants import logger
|
|
7
7
|
from trilogy.core.enums import Derivation
|
|
8
|
-
from trilogy.core.graph_models import
|
|
8
|
+
from trilogy.core.graph_models import (
|
|
9
|
+
concept_to_node,
|
|
10
|
+
get_graph_exact_match,
|
|
11
|
+
prune_sources_for_conditions,
|
|
12
|
+
)
|
|
9
13
|
from trilogy.core.models.build import (
|
|
10
14
|
BuildConcept,
|
|
11
15
|
BuildDatasource,
|
|
@@ -57,28 +61,6 @@ def get_graph_partial_nodes(
|
|
|
57
61
|
return partial
|
|
58
62
|
|
|
59
63
|
|
|
60
|
-
def get_graph_exact_match(
|
|
61
|
-
g: nx.DiGraph, conditions: BuildWhereClause | None
|
|
62
|
-
) -> set[str]:
|
|
63
|
-
datasources: dict[str, BuildDatasource | list[BuildDatasource]] = (
|
|
64
|
-
nx.get_node_attributes(g, "datasource")
|
|
65
|
-
)
|
|
66
|
-
exact: set[str] = set()
|
|
67
|
-
for node in g.nodes:
|
|
68
|
-
if node in datasources:
|
|
69
|
-
ds = datasources[node]
|
|
70
|
-
if not isinstance(ds, list):
|
|
71
|
-
if not ds.non_partial_for:
|
|
72
|
-
continue
|
|
73
|
-
if ds.non_partial_for and conditions == ds.non_partial_for:
|
|
74
|
-
exact.add(node)
|
|
75
|
-
continue
|
|
76
|
-
else:
|
|
77
|
-
continue
|
|
78
|
-
|
|
79
|
-
return exact
|
|
80
|
-
|
|
81
|
-
|
|
82
64
|
def get_graph_grains(g: nx.DiGraph) -> dict[str, list[str]]:
|
|
83
65
|
datasources: dict[str, BuildDatasource | list[BuildDatasource]] = (
|
|
84
66
|
nx.get_node_attributes(g, "datasource")
|
|
@@ -98,28 +80,31 @@ def get_graph_grains(g: nx.DiGraph) -> dict[str, list[str]]:
|
|
|
98
80
|
|
|
99
81
|
|
|
100
82
|
def subgraph_is_complete(
|
|
101
|
-
nodes: list[str], targets: set[str], mapping: dict[str, str]
|
|
83
|
+
nodes: list[str], targets: set[str], mapping: dict[str, str], g: nx.DiGraph
|
|
102
84
|
) -> bool:
|
|
103
85
|
mapped = set([mapping.get(n, n) for n in nodes])
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
)
|
|
121
|
-
|
|
122
|
-
|
|
86
|
+
passed = all([t in mapped for t in targets])
|
|
87
|
+
if not passed:
|
|
88
|
+
logger.info(
|
|
89
|
+
f"Subgraph {nodes} is not complete, missing targets {targets} - mapped {mapped}"
|
|
90
|
+
)
|
|
91
|
+
return False
|
|
92
|
+
# check if all concepts have a datasource edge
|
|
93
|
+
has_ds_edge = {
|
|
94
|
+
mapping.get(n, n): any(x.startswith("ds~") for x in nx.neighbors(g, n))
|
|
95
|
+
for n in nodes
|
|
96
|
+
if n.startswith("c~")
|
|
97
|
+
}
|
|
98
|
+
has_ds_edge = {k: False for k in targets}
|
|
99
|
+
# check at least one instance of concept has a datasource edge
|
|
100
|
+
for n in nodes:
|
|
101
|
+
if n.startswith("c~"):
|
|
102
|
+
neighbors = nx.neighbors(g, n)
|
|
103
|
+
for neighbor in neighbors:
|
|
104
|
+
if neighbor.startswith("ds~"):
|
|
105
|
+
has_ds_edge[mapping.get(n, n)] = True
|
|
106
|
+
break
|
|
107
|
+
return all(has_ds_edge.values()) and passed
|
|
123
108
|
|
|
124
109
|
|
|
125
110
|
def create_pruned_concept_graph(
|
|
@@ -133,8 +118,6 @@ def create_pruned_concept_graph(
|
|
|
133
118
|
orig_g = g
|
|
134
119
|
|
|
135
120
|
g = g.copy()
|
|
136
|
-
if conditions:
|
|
137
|
-
prune_sources_for_conditions(g, depth, conditions)
|
|
138
121
|
union_options = get_union_sources(datasources, all_concepts)
|
|
139
122
|
for ds_list in union_options:
|
|
140
123
|
node_address = "ds~" + "-".join([x.name for x in ds_list])
|
|
@@ -144,7 +127,8 @@ def create_pruned_concept_graph(
|
|
|
144
127
|
g.add_node(node_address, datasource=ds_list)
|
|
145
128
|
for c in common:
|
|
146
129
|
g.add_edge(node_address, concept_to_node(c))
|
|
147
|
-
|
|
130
|
+
g.add_edge(concept_to_node(c), node_address)
|
|
131
|
+
prune_sources_for_conditions(g, conditions)
|
|
148
132
|
target_addresses = set([c.address for c in all_concepts])
|
|
149
133
|
concepts: dict[str, BuildConcept] = nx.get_node_attributes(orig_g, "concept")
|
|
150
134
|
datasource_map: dict[str, BuildDatasource | list[BuildDatasource]] = (
|
|
@@ -156,8 +140,7 @@ def create_pruned_concept_graph(
|
|
|
156
140
|
# filter out synonyms
|
|
157
141
|
if (x := concepts.get(n, None)) and x.address in target_addresses
|
|
158
142
|
}
|
|
159
|
-
|
|
160
|
-
# GraphHook().query_graph_built(g)
|
|
143
|
+
|
|
161
144
|
relevant_concepts: list[str] = list(relevant_concepts_pre.keys())
|
|
162
145
|
relevent_datasets: list[str] = []
|
|
163
146
|
if not accept_partial:
|
|
@@ -179,6 +162,7 @@ def create_pruned_concept_graph(
|
|
|
179
162
|
to_remove.append(edge)
|
|
180
163
|
for edge in to_remove:
|
|
181
164
|
g.remove_edge(*edge)
|
|
165
|
+
|
|
182
166
|
for n in g.nodes():
|
|
183
167
|
if not n.startswith("ds~"):
|
|
184
168
|
continue
|
|
@@ -211,13 +195,13 @@ def create_pruned_concept_graph(
|
|
|
211
195
|
if n not in relevent_datasets and n not in relevant_concepts
|
|
212
196
|
]
|
|
213
197
|
)
|
|
214
|
-
|
|
198
|
+
# from trilogy.hooks.graph_hook import GraphHook
|
|
199
|
+
# GraphHook().query_graph_built(g)
|
|
215
200
|
subgraphs = list(nx.connected_components(g.to_undirected()))
|
|
216
|
-
|
|
217
201
|
subgraphs = [
|
|
218
202
|
s
|
|
219
203
|
for s in subgraphs
|
|
220
|
-
if subgraph_is_complete(s, target_addresses, relevant_concepts_pre)
|
|
204
|
+
if subgraph_is_complete(s, target_addresses, relevant_concepts_pre, g)
|
|
221
205
|
]
|
|
222
206
|
|
|
223
207
|
if not subgraphs:
|
|
@@ -524,8 +508,12 @@ def gen_select_merge_node(
|
|
|
524
508
|
constants = [c for c in all_concepts if c.derivation == Derivation.CONSTANT]
|
|
525
509
|
if not non_constant and constants:
|
|
526
510
|
logger.info(
|
|
527
|
-
f"{padding(depth)}{LOGGER_PREFIX} only constant inputs to discovery, returning constant node directly"
|
|
511
|
+
f"{padding(depth)}{LOGGER_PREFIX} only constant inputs to discovery ({constants}), returning constant node directly"
|
|
528
512
|
)
|
|
513
|
+
for x in constants:
|
|
514
|
+
logger.info(
|
|
515
|
+
f"{padding(depth)}{LOGGER_PREFIX} {x} {x.lineage} {x.derivation}"
|
|
516
|
+
)
|
|
529
517
|
if conditions:
|
|
530
518
|
if not all(
|
|
531
519
|
[x.derivation == Derivation.CONSTANT for x in conditions.row_arguments]
|
|
@@ -395,6 +395,9 @@ def get_query_node(
|
|
|
395
395
|
if not statement.output_components:
|
|
396
396
|
raise ValueError(f"Statement has no output components {statement}")
|
|
397
397
|
history = history or History(base_environment=environment)
|
|
398
|
+
print(
|
|
399
|
+
f"{LOGGER_PREFIX} building query node for {statement.output_components} grain {statement.grain}"
|
|
400
|
+
)
|
|
398
401
|
build_statement: BuildSelectLineage | BuildMultiSelectLineage = Factory(
|
|
399
402
|
environment=environment,
|
|
400
403
|
).build(statement)
|
|
@@ -580,7 +580,12 @@ class BaseDialect:
|
|
|
580
580
|
target = INVALID_REFERENCE_STRING(
|
|
581
581
|
f"Missing source CTE for {e.right.address}"
|
|
582
582
|
)
|
|
583
|
+
assert cte, "CTE must be provided for inlined CTEs"
|
|
584
|
+
if target in cte.inlined_ctes:
|
|
585
|
+
info = cte.inlined_ctes[target]
|
|
586
|
+
return f"{self.render_expr(e.left, cte=cte, cte_map=cte_map, raise_invalid=raise_invalid)} {e.operator.value} (select {target}.{self.QUOTE_CHARACTER}{e.right.safe_address}{self.QUOTE_CHARACTER} from {info.new_base} as {target} where {target}.{self.QUOTE_CHARACTER}{e.right.safe_address}{self.QUOTE_CHARACTER} is not null)"
|
|
583
587
|
return f"{self.render_expr(e.left, cte=cte, cte_map=cte_map, raise_invalid=raise_invalid)} {e.operator.value} (select {target}.{self.QUOTE_CHARACTER}{e.right.safe_address}{self.QUOTE_CHARACTER} from {target} where {target}.{self.QUOTE_CHARACTER}{e.right.safe_address}{self.QUOTE_CHARACTER} is not null)"
|
|
588
|
+
|
|
584
589
|
elif isinstance(
|
|
585
590
|
e.right,
|
|
586
591
|
(ListWrapper, TupleWrapper, BuildParenthetical, list),
|