pytrilogy 0.0.3.103__tar.gz → 0.0.3.104__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pytrilogy might be problematic. Click here for more details.
- {pytrilogy-0.0.3.103/pytrilogy.egg-info → pytrilogy-0.0.3.104}/PKG-INFO +1 -1
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104/pytrilogy.egg-info}/PKG-INFO +1 -1
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/pytrilogy.egg-info/SOURCES.txt +1 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_partial_handling.py +2 -2
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/__init__.py +1 -1
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/constants.py +1 -1
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/models/execute.py +1 -6
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/optimizations/predicate_pushdown.py +9 -1
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/concept_strategies_v3.py +35 -14
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/discovery_node_factory.py +6 -1
- pytrilogy-0.0.3.104/trilogy/core/processing/discovery_utility.py +294 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/basic_node.py +1 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/common.py +1 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/filter_node.py +0 -10
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/group_node.py +36 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/multiselect_node.py +1 -1
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/node_merge_node.py +2 -6
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/rowset_node.py +1 -1
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/base_node.py +13 -2
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/group_node.py +9 -91
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/merge_node.py +9 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/utility.py +8 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/dialect/base.py +3 -0
- pytrilogy-0.0.3.104/trilogy/std/color.preql +3 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/std/display.preql +3 -3
- pytrilogy-0.0.3.103/trilogy/core/processing/discovery_utility.py +0 -145
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/LICENSE.md +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/README.md +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/pyproject.toml +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/pytrilogy.egg-info/dependency_links.txt +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/pytrilogy.egg-info/entry_points.txt +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/pytrilogy.egg-info/requires.txt +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/pytrilogy.egg-info/top_level.txt +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/setup.cfg +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/setup.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_datatypes.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_declarations.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_derived_concepts.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_discovery_nodes.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_enums.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_environment.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_execute_models.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_executor.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_failure.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_functions.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_imports.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_metadata.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_models.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_multi_join_assignments.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_parse_engine.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_parsing.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_parsing_failures.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_query_processing.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_query_render.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_select.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_show.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_statements.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_typing.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_undefined_concept.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_user_functions.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_validators.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/tests/test_where_clause.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/authoring/__init__.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/__init__.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/constants.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/enums.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/env_processor.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/environment_helpers.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/ergonomics.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/exceptions.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/functions.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/graph_models.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/internal.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/models/__init__.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/models/author.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/models/build.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/models/build_environment.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/models/core.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/models/datasource.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/models/environment.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/optimization.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/optimizations/__init__.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/optimizations/base_optimization.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/optimizations/hide_unused_concept.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/optimizations/inline_datasource.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/__init__.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/discovery_validation.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/graph_utils.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/__init__.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/constant_node.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/group_to_node.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/recursive_node.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/select_merge_node.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/select_node.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/synonym_node.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/union_node.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/unnest_node.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/window_node.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/__init__.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/filter_node.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/recursive_node.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/select_node_v2.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/union_node.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/unnest_node.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/nodes/window_node.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/query_processor.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/statements/__init__.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/statements/author.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/statements/build.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/statements/common.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/statements/execute.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/utility.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/validation/__init__.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/validation/common.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/validation/concept.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/validation/datasource.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/validation/environment.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/validation/fix.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/dialect/__init__.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/dialect/bigquery.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/dialect/common.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/dialect/config.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/dialect/dataframe.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/dialect/duckdb.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/dialect/enums.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/dialect/metadata.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/dialect/postgres.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/dialect/presto.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/dialect/snowflake.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/dialect/sql_server.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/engine.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/executor.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/hooks/__init__.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/hooks/base_hook.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/hooks/graph_hook.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/hooks/query_debugger.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/metadata/__init__.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/parser.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/parsing/__init__.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/parsing/common.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/parsing/config.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/parsing/exceptions.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/parsing/helpers.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/parsing/parse_engine.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/parsing/render.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/parsing/trilogy.lark +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/py.typed +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/render.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/scripts/__init__.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/scripts/trilogy.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/std/__init__.py +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/std/date.preql +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/std/geography.preql +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/std/metric.preql +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/std/money.preql +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/std/net.preql +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/std/ranking.preql +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/std/report.preql +0 -0
- {pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/utility.py +0 -0
|
@@ -2,7 +2,7 @@ from sqlalchemy import create_engine
|
|
|
2
2
|
|
|
3
3
|
from trilogy import Dialects, Executor
|
|
4
4
|
from trilogy.core.enums import Purpose
|
|
5
|
-
from trilogy.core.models.author import Concept
|
|
5
|
+
from trilogy.core.models.author import Concept, Grain
|
|
6
6
|
from trilogy.core.models.core import (
|
|
7
7
|
DataType,
|
|
8
8
|
)
|
|
@@ -90,6 +90,7 @@ def setup_titanic(env: Environment):
|
|
|
90
90
|
ColumnAssignment(alias="name", concept=name),
|
|
91
91
|
ColumnAssignment(alias="fare", concept=fare),
|
|
92
92
|
],
|
|
93
|
+
grain=Grain(components=[id.address]),
|
|
93
94
|
),
|
|
94
95
|
)
|
|
95
96
|
return env
|
|
@@ -140,7 +141,6 @@ def test_partial_assignment():
|
|
|
140
141
|
depth=0,
|
|
141
142
|
)
|
|
142
143
|
assert isinstance(sourced, FilterNode)
|
|
143
|
-
assert len(sourced.parents) == 1
|
|
144
144
|
|
|
145
145
|
|
|
146
146
|
def test_filter_query():
|
|
@@ -711,8 +711,6 @@ class QueryDatasource(BaseModel):
|
|
|
711
711
|
f" {[c.address for c in self.output_concepts]} concepts and"
|
|
712
712
|
f" {other.name} with {[c.address for c in other.output_concepts]} concepts"
|
|
713
713
|
)
|
|
714
|
-
logger.info(self.source_map)
|
|
715
|
-
logger.info(other.source_map)
|
|
716
714
|
|
|
717
715
|
merged_datasources: dict[str, Union[BuildDatasource, "QueryDatasource"]] = {}
|
|
718
716
|
|
|
@@ -816,10 +814,7 @@ class QueryDatasource(BaseModel):
|
|
|
816
814
|
use_raw_name,
|
|
817
815
|
force_alias=force_alias,
|
|
818
816
|
)
|
|
819
|
-
except ValueError
|
|
820
|
-
from trilogy.constants import logger
|
|
821
|
-
|
|
822
|
-
logger.debug(e)
|
|
817
|
+
except ValueError:
|
|
823
818
|
continue
|
|
824
819
|
existing = [c.with_grain(self.grain) for c in self.output_concepts]
|
|
825
820
|
if concept in existing:
|
{pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/optimizations/predicate_pushdown.py
RENAMED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from trilogy.core.enums import (
|
|
2
2
|
BooleanOperator,
|
|
3
|
+
SourceType,
|
|
3
4
|
)
|
|
4
5
|
from trilogy.core.models.build import (
|
|
5
6
|
BuildComparison,
|
|
@@ -59,12 +60,19 @@ class PredicatePushdown(OptimizationRule):
|
|
|
59
60
|
)
|
|
60
61
|
return False
|
|
61
62
|
materialized = {k for k, v in parent_cte.source_map.items() if v != []}
|
|
63
|
+
|
|
62
64
|
if not row_conditions or not materialized:
|
|
63
65
|
return False
|
|
64
66
|
output_addresses = {x.address for x in parent_cte.output_columns}
|
|
65
67
|
# if any of the existence conditions are created on the asset, we can't push up to it
|
|
66
68
|
if existence_conditions and existence_conditions.intersection(output_addresses):
|
|
67
69
|
return False
|
|
70
|
+
if existence_conditions:
|
|
71
|
+
self.log(
|
|
72
|
+
f"Not pushing up existence {candidate} to {parent_cte.name} as it is a filter node"
|
|
73
|
+
)
|
|
74
|
+
if parent_cte.source.source_type == SourceType.FILTER:
|
|
75
|
+
return False
|
|
68
76
|
# if it's a root datasource, we can filter on _any_ of the output concepts
|
|
69
77
|
if parent_cte.is_root_datasource:
|
|
70
78
|
extra_check = {
|
|
@@ -81,7 +89,7 @@ class PredicatePushdown(OptimizationRule):
|
|
|
81
89
|
children = inverse_map.get(parent_cte.name, [])
|
|
82
90
|
if all([is_child_of(candidate, child.condition) for child in children]):
|
|
83
91
|
self.log(
|
|
84
|
-
f"All concepts
|
|
92
|
+
f"All concepts [{row_conditions}] and existence conditions [{existence_conditions}] not block pushup of [{output_addresses}]found on {parent_cte.name} with existing {parent_cte.condition} and all it's {len(children)} children include same filter; pushing up {candidate}"
|
|
85
93
|
)
|
|
86
94
|
if parent_cte.condition and not is_scalar_condition(
|
|
87
95
|
parent_cte.condition
|
{pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/concept_strategies_v3.py
RENAMED
|
@@ -19,7 +19,7 @@ from trilogy.core.processing.discovery_utility import (
|
|
|
19
19
|
LOGGER_PREFIX,
|
|
20
20
|
depth_to_prefix,
|
|
21
21
|
get_priority_concept,
|
|
22
|
-
|
|
22
|
+
group_if_required_v2,
|
|
23
23
|
)
|
|
24
24
|
from trilogy.core.processing.discovery_validation import (
|
|
25
25
|
ValidationResult,
|
|
@@ -66,7 +66,19 @@ def generate_candidates_restrictive(
|
|
|
66
66
|
|
|
67
67
|
# if it's single row, joins are irrelevant. Fetch without keys.
|
|
68
68
|
if priority_concept.granularity == Granularity.SINGLE_ROW:
|
|
69
|
-
|
|
69
|
+
logger.info("Have single row concept, including only other single row optional")
|
|
70
|
+
optional = (
|
|
71
|
+
[
|
|
72
|
+
x
|
|
73
|
+
for x in candidates
|
|
74
|
+
if x.granularity == Granularity.SINGLE_ROW
|
|
75
|
+
and x.address not in priority_concept.pseudonyms
|
|
76
|
+
and priority_concept.address not in x.pseudonyms
|
|
77
|
+
]
|
|
78
|
+
if priority_concept.derivation == Derivation.AGGREGATE
|
|
79
|
+
else []
|
|
80
|
+
)
|
|
81
|
+
return optional, conditions
|
|
70
82
|
|
|
71
83
|
if conditions and priority_concept.derivation in ROOT_DERIVATIONS:
|
|
72
84
|
logger.info(
|
|
@@ -374,15 +386,21 @@ def generate_loop_completion(context: LoopContext, virtual: set[str]) -> Strateg
|
|
|
374
386
|
logger.info(
|
|
375
387
|
f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Found different non-virtual output concepts ({non_virtual_difference_values}), removing condition injected values by setting outputs to {[x.address for x in output.output_concepts if x.address in non_virtual_output]}"
|
|
376
388
|
)
|
|
377
|
-
output.set_output_concepts(
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
389
|
+
# output.set_output_concepts(
|
|
390
|
+
# [
|
|
391
|
+
# x
|
|
392
|
+
# for x in output.output_concepts
|
|
393
|
+
# if x.address not in non_virtual_difference_values
|
|
394
|
+
# or any(c in non_virtual_output for c in x.pseudonyms)
|
|
395
|
+
# ],
|
|
396
|
+
# rebuild=True,
|
|
397
|
+
# change_visibility=False
|
|
398
|
+
# )
|
|
399
|
+
# output.set_output_concepts(context.original_mandatory)
|
|
400
|
+
|
|
401
|
+
# if isinstance(output, MergeNode):
|
|
402
|
+
# output.force_group = True
|
|
403
|
+
# output.rebuild_cache()
|
|
386
404
|
|
|
387
405
|
logger.info(
|
|
388
406
|
f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Source stack has single node, returning that {type(output)}"
|
|
@@ -416,14 +434,17 @@ def generate_loop_completion(context: LoopContext, virtual: set[str]) -> Strateg
|
|
|
416
434
|
logger.info(
|
|
417
435
|
f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Graph is connected, returning {type(output)} node output {[x.address for x in output.usable_outputs]} partial {[c.address for c in output.partial_concepts or []]} with {context.conditions}"
|
|
418
436
|
)
|
|
437
|
+
from trilogy.core.processing.discovery_utility import group_if_required_v2
|
|
438
|
+
|
|
419
439
|
if condition_required and context.conditions and non_virtual_different:
|
|
420
440
|
logger.info(
|
|
421
441
|
f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Conditions {context.conditions} were injected, checking if we need a group to restore grain"
|
|
422
442
|
)
|
|
423
|
-
return
|
|
443
|
+
return group_if_required_v2(
|
|
424
444
|
output, context.original_mandatory, context.environment
|
|
425
445
|
)
|
|
426
|
-
|
|
446
|
+
|
|
447
|
+
return group_if_required_v2(output, context.original_mandatory, context.environment)
|
|
427
448
|
|
|
428
449
|
|
|
429
450
|
def _search_concepts(
|
|
@@ -588,4 +609,4 @@ def source_query_concepts(
|
|
|
588
609
|
logger.info(
|
|
589
610
|
f"{depth_to_prefix(0)}{LOGGER_PREFIX} final concepts are {[x.address for x in final]}"
|
|
590
611
|
)
|
|
591
|
-
return
|
|
612
|
+
return group_if_required_v2(root, output_concepts, environment)
|
{pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/discovery_node_factory.py
RENAMED
|
@@ -177,7 +177,12 @@ def _generate_union_node(ctx: NodeGenerationContext) -> StrategyNode | None:
|
|
|
177
177
|
def _generate_aggregate_node(ctx: NodeGenerationContext) -> StrategyNode | None:
|
|
178
178
|
# Filter out constants to avoid multiplication issues
|
|
179
179
|
agg_optional = [
|
|
180
|
-
x
|
|
180
|
+
x
|
|
181
|
+
for x in ctx.local_optional
|
|
182
|
+
if not (
|
|
183
|
+
x.granularity == Granularity.SINGLE_ROW
|
|
184
|
+
and x.derivation != Derivation.AGGREGATE
|
|
185
|
+
)
|
|
181
186
|
]
|
|
182
187
|
|
|
183
188
|
logger.info(
|
|
@@ -0,0 +1,294 @@
|
|
|
1
|
+
from typing import List
|
|
2
|
+
|
|
3
|
+
from trilogy.constants import logger
|
|
4
|
+
from trilogy.core.enums import Derivation, Purpose
|
|
5
|
+
from trilogy.core.models.build import (
|
|
6
|
+
BuildConcept,
|
|
7
|
+
BuildDatasource,
|
|
8
|
+
BuildFilterItem,
|
|
9
|
+
BuildGrain,
|
|
10
|
+
BuildRowsetItem,
|
|
11
|
+
)
|
|
12
|
+
from trilogy.core.models.build_environment import BuildEnvironment
|
|
13
|
+
from trilogy.core.models.execute import QueryDatasource, UnnestJoin
|
|
14
|
+
from trilogy.core.processing.nodes import GroupNode, MergeNode, StrategyNode
|
|
15
|
+
from trilogy.core.processing.utility import GroupRequiredResponse
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def depth_to_prefix(depth: int) -> str:
|
|
19
|
+
return "\t" * depth
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
LOGGER_PREFIX = "[DISCOVERY LOOP]"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def calculate_effective_parent_grain(
|
|
26
|
+
node: QueryDatasource | BuildDatasource,
|
|
27
|
+
) -> BuildGrain:
|
|
28
|
+
# calculate the effective grain of the parent node
|
|
29
|
+
# this is the union of all parent grains
|
|
30
|
+
if isinstance(node, MergeNode):
|
|
31
|
+
grain = BuildGrain()
|
|
32
|
+
qds = node.resolve()
|
|
33
|
+
if not qds.joins:
|
|
34
|
+
return qds.datasources[0].grain
|
|
35
|
+
for join in qds.joins:
|
|
36
|
+
if isinstance(join, UnnestJoin):
|
|
37
|
+
continue
|
|
38
|
+
pairs = join.concept_pairs or []
|
|
39
|
+
for key in pairs:
|
|
40
|
+
left = key.existing_datasource
|
|
41
|
+
grain += left.grain
|
|
42
|
+
keys = [key.right for key in pairs]
|
|
43
|
+
join_grain = BuildGrain.from_concepts(keys)
|
|
44
|
+
if join_grain == join.right_datasource.grain:
|
|
45
|
+
logger.info(f"irrelevant right join {join}, does not change grain")
|
|
46
|
+
else:
|
|
47
|
+
logger.info(
|
|
48
|
+
f"join changes grain, adding {join.right_datasource.grain} to {grain}"
|
|
49
|
+
)
|
|
50
|
+
grain += join.right_datasource.grain
|
|
51
|
+
return grain
|
|
52
|
+
else:
|
|
53
|
+
return node.grain or BuildGrain()
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def check_if_group_required(
|
|
57
|
+
downstream_concepts: List[BuildConcept],
|
|
58
|
+
parents: list[QueryDatasource | BuildDatasource],
|
|
59
|
+
environment: BuildEnvironment,
|
|
60
|
+
depth: int = 0,
|
|
61
|
+
) -> GroupRequiredResponse:
|
|
62
|
+
padding = "\t" * depth
|
|
63
|
+
target_grain = BuildGrain.from_concepts(
|
|
64
|
+
downstream_concepts,
|
|
65
|
+
environment=environment,
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
comp_grain = BuildGrain()
|
|
69
|
+
for source in parents:
|
|
70
|
+
# comp_grain += source.grain
|
|
71
|
+
comp_grain += calculate_effective_parent_grain(source)
|
|
72
|
+
|
|
73
|
+
# dynamically select if we need to group
|
|
74
|
+
# we must avoid grouping if we are already at grain
|
|
75
|
+
if comp_grain.issubset(target_grain):
|
|
76
|
+
|
|
77
|
+
logger.info(
|
|
78
|
+
f"{padding}{LOGGER_PREFIX} Group requirement check: {comp_grain}, target: {target_grain}, grain is subset of target, no group node required"
|
|
79
|
+
)
|
|
80
|
+
return GroupRequiredResponse(target_grain, comp_grain, False)
|
|
81
|
+
# find out what extra is in the comp grain vs target grain
|
|
82
|
+
difference = [
|
|
83
|
+
environment.concepts[c] for c in (comp_grain - target_grain).components
|
|
84
|
+
]
|
|
85
|
+
logger.info(
|
|
86
|
+
f"{padding}{LOGGER_PREFIX} Group requirement check: upstream grain: {comp_grain}, desired grain: {target_grain} from , difference {[x.address for x in difference]}"
|
|
87
|
+
)
|
|
88
|
+
for x in difference:
|
|
89
|
+
logger.info(
|
|
90
|
+
f"{padding}{LOGGER_PREFIX} Difference concept {x.address} purpose {x.purpose} keys {x.keys}"
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
# if the difference is all unique properties whose keys are in the source grain
|
|
94
|
+
# we can also suppress the group
|
|
95
|
+
if all(
|
|
96
|
+
[
|
|
97
|
+
x.keys
|
|
98
|
+
and all(
|
|
99
|
+
environment.concepts[z].address in comp_grain.components for z in x.keys
|
|
100
|
+
)
|
|
101
|
+
for x in difference
|
|
102
|
+
]
|
|
103
|
+
):
|
|
104
|
+
logger.info(
|
|
105
|
+
f"{padding}{LOGGER_PREFIX} Group requirement check: skipped due to unique property validation"
|
|
106
|
+
)
|
|
107
|
+
return GroupRequiredResponse(target_grain, comp_grain, False)
|
|
108
|
+
if all([x.purpose == Purpose.KEY for x in difference]):
|
|
109
|
+
logger.info(
|
|
110
|
+
f"{padding}{LOGGER_PREFIX} checking if downstream is unique properties of key"
|
|
111
|
+
)
|
|
112
|
+
replaced_grain_raw: list[set[str]] = [
|
|
113
|
+
(
|
|
114
|
+
x.keys or set()
|
|
115
|
+
if x.purpose == Purpose.UNIQUE_PROPERTY
|
|
116
|
+
else set([x.address])
|
|
117
|
+
)
|
|
118
|
+
for x in downstream_concepts
|
|
119
|
+
if x.address in target_grain.components
|
|
120
|
+
]
|
|
121
|
+
# flatten the list of lists
|
|
122
|
+
replaced_grain = [item for sublist in replaced_grain_raw for item in sublist]
|
|
123
|
+
# if the replaced grain is a subset of the comp grain, we can skip the group
|
|
124
|
+
unique_grain_comp = BuildGrain.from_concepts(
|
|
125
|
+
replaced_grain, environment=environment
|
|
126
|
+
)
|
|
127
|
+
if comp_grain.issubset(unique_grain_comp):
|
|
128
|
+
logger.info(
|
|
129
|
+
f"{padding}{LOGGER_PREFIX} Group requirement check: skipped due to unique property validation"
|
|
130
|
+
)
|
|
131
|
+
return GroupRequiredResponse(target_grain, comp_grain, False)
|
|
132
|
+
logger.info(
|
|
133
|
+
f"{padding}{LOGGER_PREFIX} Checking for grain equivalence for filters and rowsets"
|
|
134
|
+
)
|
|
135
|
+
ngrain = []
|
|
136
|
+
for con in target_grain.components:
|
|
137
|
+
full = environment.concepts[con]
|
|
138
|
+
if full.derivation == Derivation.ROWSET:
|
|
139
|
+
ngrain.append(full.address.split(".", 1)[1])
|
|
140
|
+
elif full.derivation == Derivation.FILTER:
|
|
141
|
+
assert isinstance(full.lineage, BuildFilterItem)
|
|
142
|
+
if isinstance(full.lineage.content, BuildConcept):
|
|
143
|
+
ngrain.append(full.lineage.content.address)
|
|
144
|
+
else:
|
|
145
|
+
ngrain.append(full.address)
|
|
146
|
+
target_grain2 = BuildGrain.from_concepts(
|
|
147
|
+
ngrain,
|
|
148
|
+
environment=environment,
|
|
149
|
+
)
|
|
150
|
+
if comp_grain.issubset(target_grain2):
|
|
151
|
+
logger.info(
|
|
152
|
+
f"{padding}{LOGGER_PREFIX} Group requirement check: {comp_grain}, {target_grain2}, pre rowset grain is subset of target, no group node required"
|
|
153
|
+
)
|
|
154
|
+
return GroupRequiredResponse(target_grain2, comp_grain, False)
|
|
155
|
+
|
|
156
|
+
logger.info(f"{padding}{LOGGER_PREFIX} Group requirement check: group required")
|
|
157
|
+
return GroupRequiredResponse(target_grain, comp_grain, True)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def group_if_required_v2(
|
|
161
|
+
root: StrategyNode, final: List[BuildConcept], environment: BuildEnvironment
|
|
162
|
+
):
|
|
163
|
+
required = check_if_group_required(
|
|
164
|
+
downstream_concepts=final, parents=[root.resolve()], environment=environment
|
|
165
|
+
)
|
|
166
|
+
targets = [
|
|
167
|
+
x
|
|
168
|
+
for x in root.output_concepts
|
|
169
|
+
if x.address in final or any(c in final for c in x.pseudonyms)
|
|
170
|
+
]
|
|
171
|
+
if required.required:
|
|
172
|
+
if isinstance(root, MergeNode):
|
|
173
|
+
root.force_group = True
|
|
174
|
+
root.set_output_concepts(targets, rebuild=False, change_visibility=False)
|
|
175
|
+
root.rebuild_cache()
|
|
176
|
+
return root
|
|
177
|
+
elif isinstance(root, GroupNode):
|
|
178
|
+
# root.set_output_concepts(final, rebuild=False)
|
|
179
|
+
# root.rebuild_cache()
|
|
180
|
+
return root
|
|
181
|
+
return GroupNode(
|
|
182
|
+
output_concepts=targets,
|
|
183
|
+
input_concepts=targets,
|
|
184
|
+
environment=environment,
|
|
185
|
+
parents=[root],
|
|
186
|
+
partial_concepts=root.partial_concepts,
|
|
187
|
+
preexisting_conditions=root.preexisting_conditions,
|
|
188
|
+
)
|
|
189
|
+
elif isinstance(root, GroupNode):
|
|
190
|
+
|
|
191
|
+
return root
|
|
192
|
+
else:
|
|
193
|
+
root.set_output_concepts(targets, rebuild=False, change_visibility=False)
|
|
194
|
+
return root
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def get_upstream_concepts(base: BuildConcept, nested: bool = False) -> set[str]:
|
|
198
|
+
upstream = set()
|
|
199
|
+
if nested:
|
|
200
|
+
upstream.add(base.address)
|
|
201
|
+
if not base.lineage:
|
|
202
|
+
return upstream
|
|
203
|
+
for x in base.lineage.concept_arguments:
|
|
204
|
+
# if it's derived from any value in a rowset, ALL rowset items are upstream
|
|
205
|
+
if x.derivation == Derivation.ROWSET:
|
|
206
|
+
assert isinstance(x.lineage, BuildRowsetItem), type(x.lineage)
|
|
207
|
+
for y in x.lineage.rowset.select.output_components:
|
|
208
|
+
upstream.add(f"{x.lineage.rowset.name}.{y.address}")
|
|
209
|
+
# upstream = upstream.union(get_upstream_concepts(y, nested=True))
|
|
210
|
+
upstream = upstream.union(get_upstream_concepts(x, nested=True))
|
|
211
|
+
return upstream
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def get_priority_concept(
|
|
215
|
+
all_concepts: List[BuildConcept],
|
|
216
|
+
attempted_addresses: set[str],
|
|
217
|
+
found_concepts: set[str],
|
|
218
|
+
depth: int,
|
|
219
|
+
) -> BuildConcept:
|
|
220
|
+
# optimized search for missing concepts
|
|
221
|
+
pass_one = sorted(
|
|
222
|
+
[
|
|
223
|
+
c
|
|
224
|
+
for c in all_concepts
|
|
225
|
+
if c.address not in attempted_addresses and c.address not in found_concepts
|
|
226
|
+
],
|
|
227
|
+
key=lambda x: x.address,
|
|
228
|
+
)
|
|
229
|
+
# sometimes we need to scan intermediate concepts to get merge keys or filter keys,
|
|
230
|
+
# so do an exhaustive search
|
|
231
|
+
# pass_two = [c for c in all_concepts+filter_only if c.address not in attempted_addresses]
|
|
232
|
+
for remaining_concept in (pass_one,):
|
|
233
|
+
priority = (
|
|
234
|
+
# then multiselects to remove them from scope
|
|
235
|
+
[c for c in remaining_concept if c.derivation == Derivation.MULTISELECT]
|
|
236
|
+
+
|
|
237
|
+
# then rowsets to remove them from scope, as they cannot get partials
|
|
238
|
+
[c for c in remaining_concept if c.derivation == Derivation.ROWSET]
|
|
239
|
+
+
|
|
240
|
+
# then rowsets to remove them from scope, as they cannot get partials
|
|
241
|
+
[c for c in remaining_concept if c.derivation == Derivation.UNION]
|
|
242
|
+
# we should be home-free here
|
|
243
|
+
+
|
|
244
|
+
# then aggregates to remove them from scope, as they cannot get partials
|
|
245
|
+
[c for c in remaining_concept if c.derivation == Derivation.AGGREGATE]
|
|
246
|
+
# then windows to remove them from scope, as they cannot get partials
|
|
247
|
+
+ [c for c in remaining_concept if c.derivation == Derivation.WINDOW]
|
|
248
|
+
# then filters to remove them from scope, also cannot get partials
|
|
249
|
+
+ [c for c in remaining_concept if c.derivation == Derivation.FILTER]
|
|
250
|
+
# unnests are weird?
|
|
251
|
+
+ [c for c in remaining_concept if c.derivation == Derivation.UNNEST]
|
|
252
|
+
+ [c for c in remaining_concept if c.derivation == Derivation.RECURSIVE]
|
|
253
|
+
+ [c for c in remaining_concept if c.derivation == Derivation.BASIC]
|
|
254
|
+
+ [c for c in remaining_concept if c.derivation == Derivation.GROUP_TO]
|
|
255
|
+
+ [c for c in remaining_concept if c.derivation == Derivation.CONSTANT]
|
|
256
|
+
# finally our plain selects
|
|
257
|
+
+ [
|
|
258
|
+
c for c in remaining_concept if c.derivation == Derivation.ROOT
|
|
259
|
+
] # and any non-single row constants
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
priority += [
|
|
263
|
+
c
|
|
264
|
+
for c in remaining_concept
|
|
265
|
+
if c.address not in [x.address for x in priority]
|
|
266
|
+
]
|
|
267
|
+
final = []
|
|
268
|
+
# if any thing is derived from another concept
|
|
269
|
+
# get the derived copy first
|
|
270
|
+
# as this will usually resolve cleaner
|
|
271
|
+
for x in priority:
|
|
272
|
+
if any(
|
|
273
|
+
[
|
|
274
|
+
x.address
|
|
275
|
+
in get_upstream_concepts(
|
|
276
|
+
c,
|
|
277
|
+
)
|
|
278
|
+
for c in priority
|
|
279
|
+
]
|
|
280
|
+
):
|
|
281
|
+
logger.info(
|
|
282
|
+
f"{depth_to_prefix(depth)}{LOGGER_PREFIX} delaying fetch of {x.address} as parent of another concept"
|
|
283
|
+
)
|
|
284
|
+
continue
|
|
285
|
+
final.append(x)
|
|
286
|
+
# then append anything we didn't get
|
|
287
|
+
for x2 in priority:
|
|
288
|
+
if x2 not in final:
|
|
289
|
+
final.append(x2)
|
|
290
|
+
if final:
|
|
291
|
+
return final[0]
|
|
292
|
+
raise ValueError(
|
|
293
|
+
f"Cannot resolve query. No remaining priority concepts, have attempted {attempted_addresses}"
|
|
294
|
+
)
|
{pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/basic_node.py
RENAMED
|
@@ -143,4 +143,5 @@ def gen_basic_node(
|
|
|
143
143
|
logger.info(
|
|
144
144
|
f"{depth_prefix}{LOGGER_PREFIX} Returning basic select for {concept}: input: {[x.address for x in parent_node.input_concepts]} output {[x.address for x in parent_node.output_concepts]} hidden {[x for x in parent_node.hidden_concepts]}"
|
|
145
145
|
)
|
|
146
|
+
|
|
146
147
|
return parent_node
|
{pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/filter_node.py
RENAMED
|
@@ -246,13 +246,6 @@ def gen_filter_node(
|
|
|
246
246
|
filter_node = parent
|
|
247
247
|
else:
|
|
248
248
|
core_parent_nodes.append(row_parent)
|
|
249
|
-
filters = [concept] + same_filter_optional
|
|
250
|
-
parents_for_grain = [
|
|
251
|
-
x.lineage.content
|
|
252
|
-
for x in filters
|
|
253
|
-
if isinstance(x.lineage, BuildFilterItem)
|
|
254
|
-
and isinstance(x.lineage.content, BuildConcept)
|
|
255
|
-
]
|
|
256
249
|
filter_node = FilterNode(
|
|
257
250
|
input_concepts=unique(
|
|
258
251
|
parent_row_concepts + flattened_existence,
|
|
@@ -261,9 +254,6 @@ def gen_filter_node(
|
|
|
261
254
|
output_concepts=[concept] + same_filter_optional + parent_row_concepts,
|
|
262
255
|
environment=environment,
|
|
263
256
|
parents=core_parent_nodes,
|
|
264
|
-
grain=BuildGrain.from_concepts(
|
|
265
|
-
parents_for_grain + parent_row_concepts, environment=environment
|
|
266
|
-
),
|
|
267
257
|
preexisting_conditions=conditions.conditional if conditions else None,
|
|
268
258
|
)
|
|
269
259
|
|
{pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/group_node.py
RENAMED
|
@@ -108,6 +108,42 @@ def gen_group_node(
|
|
|
108
108
|
logger.info(
|
|
109
109
|
f"{padding(depth)}{LOGGER_PREFIX} cannot include optional agg {possible_agg.address}; it has mismatched parent grain {comp_grain } vs local parent {build_grain_parents}"
|
|
110
110
|
)
|
|
111
|
+
elif concept.grain.abstract:
|
|
112
|
+
for possible_agg in local_optional:
|
|
113
|
+
if not isinstance(
|
|
114
|
+
possible_agg.lineage,
|
|
115
|
+
(BuildAggregateWrapper, BuildFunction),
|
|
116
|
+
):
|
|
117
|
+
|
|
118
|
+
continue
|
|
119
|
+
logger.info(
|
|
120
|
+
f"{padding(depth)}{LOGGER_PREFIX} considering optional agg {possible_agg.address} for {concept.address}"
|
|
121
|
+
)
|
|
122
|
+
agg_parents = resolve_function_parent_concepts(
|
|
123
|
+
possible_agg,
|
|
124
|
+
environment=environment,
|
|
125
|
+
)
|
|
126
|
+
comp_grain = get_aggregate_grain(possible_agg, environment)
|
|
127
|
+
if not possible_agg.grain.abstract:
|
|
128
|
+
continue
|
|
129
|
+
if set([x.address for x in agg_parents]).issubset(
|
|
130
|
+
set([x.address for x in parent_concepts])
|
|
131
|
+
):
|
|
132
|
+
output_concepts.append(possible_agg)
|
|
133
|
+
logger.info(
|
|
134
|
+
f"{padding(depth)}{LOGGER_PREFIX} found equivalent group by optional concept {possible_agg.address} for {concept.address}"
|
|
135
|
+
)
|
|
136
|
+
elif comp_grain == get_aggregate_grain(concept, environment):
|
|
137
|
+
extra = [x for x in agg_parents if x.address not in parent_concepts]
|
|
138
|
+
parent_concepts += extra
|
|
139
|
+
output_concepts.append(possible_agg)
|
|
140
|
+
logger.info(
|
|
141
|
+
f"{padding(depth)}{LOGGER_PREFIX} found equivalent group by optional concept {possible_agg.address} for {concept.address}"
|
|
142
|
+
)
|
|
143
|
+
else:
|
|
144
|
+
logger.info(
|
|
145
|
+
f"{padding(depth)}{LOGGER_PREFIX} cannot include optional agg {possible_agg.address}; it has mismatched parent grain {comp_grain } vs local parent {get_aggregate_grain(concept, environment)}"
|
|
146
|
+
)
|
|
111
147
|
if parent_concepts:
|
|
112
148
|
target_grain = BuildGrain.from_concepts(parent_concepts)
|
|
113
149
|
logger.info(
|
|
@@ -156,7 +156,7 @@ def gen_multiselect_node(
|
|
|
156
156
|
possible_joins = concept_to_relevant_joins(additional_relevant)
|
|
157
157
|
if not local_optional:
|
|
158
158
|
logger.info(
|
|
159
|
-
f"{padding(depth)}{LOGGER_PREFIX} no
|
|
159
|
+
f"{padding(depth)}{LOGGER_PREFIX} no enrichment required for rowset node; exiting early"
|
|
160
160
|
)
|
|
161
161
|
return node
|
|
162
162
|
if not possible_joins:
|
|
@@ -20,9 +20,6 @@ from trilogy.core.models.build import (
|
|
|
20
20
|
BuildWhereClause,
|
|
21
21
|
)
|
|
22
22
|
from trilogy.core.models.build_environment import BuildEnvironment
|
|
23
|
-
from trilogy.core.processing.discovery_utility import (
|
|
24
|
-
group_if_required,
|
|
25
|
-
)
|
|
26
23
|
from trilogy.core.processing.nodes import History, MergeNode, StrategyNode
|
|
27
24
|
from trilogy.core.processing.utility import padding
|
|
28
25
|
from trilogy.utility import unique
|
|
@@ -510,7 +507,7 @@ def subgraphs_to_merge_node(
|
|
|
510
507
|
search_conditions: BuildWhereClause | None = None,
|
|
511
508
|
enable_early_exit: bool = True,
|
|
512
509
|
):
|
|
513
|
-
|
|
510
|
+
|
|
514
511
|
parents: List[StrategyNode] = []
|
|
515
512
|
logger.info(
|
|
516
513
|
f"{padding(depth)}{LOGGER_PREFIX} fetching subgraphs {[[c.address for c in subgraph] for subgraph in concept_subgraphs]}"
|
|
@@ -555,7 +552,7 @@ def subgraphs_to_merge_node(
|
|
|
555
552
|
f"{padding(depth)}{LOGGER_PREFIX} only one parent node, exiting early w/ {[c.address for c in parents[0].output_concepts]}"
|
|
556
553
|
)
|
|
557
554
|
parent = parents[0]
|
|
558
|
-
return
|
|
555
|
+
return parent
|
|
559
556
|
|
|
560
557
|
rval = MergeNode(
|
|
561
558
|
input_concepts=unique(input_c, "address"),
|
|
@@ -563,7 +560,6 @@ def subgraphs_to_merge_node(
|
|
|
563
560
|
environment=environment,
|
|
564
561
|
parents=parents,
|
|
565
562
|
depth=depth,
|
|
566
|
-
grain=target_grain,
|
|
567
563
|
# hidden_concepts=[]
|
|
568
564
|
# conditions=conditions,
|
|
569
565
|
# conditions=search_conditions.conditional,
|
{pytrilogy-0.0.3.103 → pytrilogy-0.0.3.104}/trilogy/core/processing/node_generators/rowset_node.py
RENAMED
|
@@ -92,7 +92,7 @@ def gen_rowset_node(
|
|
|
92
92
|
|
|
93
93
|
node.rebuild_cache()
|
|
94
94
|
logger.info(
|
|
95
|
-
f"{padding(depth)}{LOGGER_PREFIX} final output is {[x.address for x in node.output_concepts]}"
|
|
95
|
+
f"{padding(depth)}{LOGGER_PREFIX} final output is {[x.address for x in node.output_concepts]} with grain {node.grain}"
|
|
96
96
|
)
|
|
97
97
|
if not local_optional or all(
|
|
98
98
|
(
|