pytrilogy 0.0.2.55__tar.gz → 0.0.2.57__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pytrilogy might be problematic. Click here for more details.
- {pytrilogy-0.0.2.55/pytrilogy.egg-info → pytrilogy-0.0.2.57}/PKG-INFO +1 -1
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57/pytrilogy.egg-info}/PKG-INFO +1 -1
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_models.py +4 -2
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_select.py +33 -1
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/__init__.py +1 -1
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/environment_helpers.py +16 -5
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/functions.py +2 -1
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/models.py +116 -16
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/concept_strategies_v3.py +21 -14
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/basic_node.py +4 -1
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/filter_node.py +5 -1
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/group_to_node.py +10 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/multiselect_node.py +16 -18
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/rowset_node.py +1 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/select_merge_node.py +8 -1
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/nodes/base_node.py +13 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/nodes/group_node.py +37 -40
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/nodes/merge_node.py +1 -3
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/utility.py +44 -14
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/query_processor.py +1 -1
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/dialect/base.py +3 -1
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/parsing/parse_engine.py +3 -71
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/utility.py +5 -2
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/LICENSE.md +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/README.md +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/pyproject.toml +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/pytrilogy.egg-info/SOURCES.txt +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/pytrilogy.egg-info/dependency_links.txt +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/pytrilogy.egg-info/entry_points.txt +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/pytrilogy.egg-info/requires.txt +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/pytrilogy.egg-info/top_level.txt +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/setup.cfg +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/setup.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_datatypes.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_declarations.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_derived_concepts.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_discovery_nodes.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_enums.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_environment.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_executor.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_functions.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_imports.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_metadata.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_multi_join_assignments.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_parse_engine.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_parsing.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_partial_handling.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_query_processing.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_show.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_statements.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_undefined_concept.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/tests/test_where_clause.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/compiler.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/constants.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/__init__.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/constants.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/enums.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/env_processor.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/ergonomics.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/exceptions.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/graph_models.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/internal.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/optimization.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/optimizations/__init__.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/optimizations/base_optimization.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/optimizations/inline_constant.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/optimizations/inline_datasource.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/optimizations/predicate_pushdown.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/__init__.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/graph_utils.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/__init__.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/common.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/group_node.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/node_merge_node.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/select_node.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/union_node.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/unnest_node.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/window_node.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/nodes/__init__.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/nodes/filter_node.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/nodes/select_node_v2.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/nodes/union_node.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/nodes/unnest_node.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/nodes/window_node.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/dialect/__init__.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/dialect/bigquery.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/dialect/common.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/dialect/config.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/dialect/duckdb.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/dialect/enums.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/dialect/postgres.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/dialect/presto.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/dialect/snowflake.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/dialect/sql_server.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/engine.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/executor.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/hooks/__init__.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/hooks/base_hook.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/hooks/graph_hook.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/hooks/query_debugger.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/metadata/__init__.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/parser.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/parsing/__init__.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/parsing/common.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/parsing/config.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/parsing/exceptions.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/parsing/helpers.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/parsing/render.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/parsing/trilogy.lark +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/py.typed +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/scripts/__init__.py +0 -0
- {pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/scripts/trilogy.py +0 -0
|
@@ -147,12 +147,14 @@ def test_select(test_environment: Environment):
|
|
|
147
147
|
pid = test_environment.concepts["product_id"]
|
|
148
148
|
cid = test_environment.concepts["category_id"]
|
|
149
149
|
cname = test_environment.concepts["category_name"]
|
|
150
|
-
x = SelectStatement(
|
|
150
|
+
x = SelectStatement(
|
|
151
|
+
selection=[oid, pid, cid, cname], grain=Grain(components=[oid, pid, cid])
|
|
152
|
+
)
|
|
151
153
|
ds = x.to_datasource(
|
|
152
154
|
test_environment.namespace, "test", address=Address(location="test")
|
|
153
155
|
)
|
|
154
156
|
|
|
155
|
-
assert ds.grain == Grain(components=[oid, pid, cid])
|
|
157
|
+
assert ds.grain.components == Grain(components=[oid, pid, cid]).components
|
|
156
158
|
|
|
157
159
|
|
|
158
160
|
def test_undefined(test_environment: Environment):
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
# from trilogy.compiler import compile
|
|
2
|
-
from trilogy import Dialects
|
|
2
|
+
from trilogy import Dialects, Environment
|
|
3
3
|
from trilogy.core.models import Grain, SelectStatement
|
|
4
4
|
from trilogy.core.query_processor import process_query
|
|
5
5
|
from trilogy.dialect.bigquery import BigqueryDialect
|
|
@@ -176,3 +176,35 @@ select id + 2 as three;
|
|
|
176
176
|
|
|
177
177
|
result = Dialects.DUCK_DB.default_executor(environment=env).execute_text(q1)[-1]
|
|
178
178
|
assert result.fetchone().three == 3
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def test_select_from_components():
|
|
182
|
+
env = Environment()
|
|
183
|
+
q1 = """
|
|
184
|
+
|
|
185
|
+
key id int;
|
|
186
|
+
property id.class int;
|
|
187
|
+
property id.name string;
|
|
188
|
+
|
|
189
|
+
select
|
|
190
|
+
class,
|
|
191
|
+
upper(id.name)-> upper_name,
|
|
192
|
+
count(id) ->class_id_count,
|
|
193
|
+
;
|
|
194
|
+
"""
|
|
195
|
+
env, statements = env.parse(q1)
|
|
196
|
+
|
|
197
|
+
select: SelectStatement = statements[-1]
|
|
198
|
+
|
|
199
|
+
assert select.grain.components == {"local.class", "local.upper_name"}
|
|
200
|
+
assert select.local_concepts["local.class_id_count"].grain.components == {
|
|
201
|
+
"local.class",
|
|
202
|
+
"local.upper_name",
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
# SelectStatement.from_inputs(
|
|
206
|
+
# environment=env,
|
|
207
|
+
# selection=[SelectItem(concept=env.concepts["id"]),
|
|
208
|
+
# SelectItem(concept=env.concepts["id.class"])],
|
|
209
|
+
# input_components=[],
|
|
210
|
+
# )
|
|
@@ -11,12 +11,23 @@ from trilogy.core.models import (
|
|
|
11
11
|
)
|
|
12
12
|
from trilogy.parsing.common import Meta, arg_to_datatype, process_function_args
|
|
13
13
|
|
|
14
|
+
FUNCTION_DESCRIPTION_MAPS = {
|
|
15
|
+
FunctionType.DATE: "The date part of a timestamp/date. Integer, 0-31 depending on month.",
|
|
16
|
+
FunctionType.MONTH: "The month part of a timestamp/date. Integer, 1-12.",
|
|
17
|
+
FunctionType.YEAR: "The year part of a timestamp/date. Integer.",
|
|
18
|
+
FunctionType.QUARTER: "The quarter part of a timestamp/date. Integer, 1-4.",
|
|
19
|
+
FunctionType.DAY_OF_WEEK: "The day of the week part of a timestamp/date. Integer, 0-6.",
|
|
20
|
+
FunctionType.HOUR: "The hour part of a timestamp. Integer, 0-23.",
|
|
21
|
+
FunctionType.MINUTE: "The minute part of a timestamp. Integer, 0-59.",
|
|
22
|
+
FunctionType.SECOND: "The second part of a timestamp. Integer, 0-59.",
|
|
23
|
+
}
|
|
24
|
+
|
|
14
25
|
|
|
15
26
|
def generate_date_concepts(concept: Concept, environment: Environment):
|
|
16
27
|
if concept.metadata and concept.metadata.description:
|
|
17
28
|
base_description = concept.metadata.description
|
|
18
29
|
else:
|
|
19
|
-
base_description = f"a {concept.
|
|
30
|
+
base_description = f"a {concept.address}"
|
|
20
31
|
if concept.metadata and concept.metadata.line_number:
|
|
21
32
|
base_line_number = concept.metadata.line_number
|
|
22
33
|
else:
|
|
@@ -54,7 +65,7 @@ def generate_date_concepts(concept: Concept, environment: Environment):
|
|
|
54
65
|
concept.address,
|
|
55
66
|
),
|
|
56
67
|
metadata=Metadata(
|
|
57
|
-
description=f"Auto-derived
|
|
68
|
+
description=f"Auto-derived from {base_description}. {FUNCTION_DESCRIPTION_MAPS.get(ftype, ftype.value)}. ",
|
|
58
69
|
line_number=base_line_number,
|
|
59
70
|
concept_source=ConceptSource.AUTO_DERIVED,
|
|
60
71
|
),
|
|
@@ -68,7 +79,7 @@ def generate_datetime_concepts(concept: Concept, environment: Environment):
|
|
|
68
79
|
if concept.metadata and concept.metadata.description:
|
|
69
80
|
base_description = concept.metadata.description
|
|
70
81
|
else:
|
|
71
|
-
base_description =
|
|
82
|
+
base_description = concept.address
|
|
72
83
|
if concept.metadata and concept.metadata.line_number:
|
|
73
84
|
base_line_number = concept.metadata.line_number
|
|
74
85
|
else:
|
|
@@ -105,7 +116,7 @@ def generate_datetime_concepts(concept: Concept, environment: Environment):
|
|
|
105
116
|
concept.address,
|
|
106
117
|
),
|
|
107
118
|
metadata=Metadata(
|
|
108
|
-
description=f"Auto-derived
|
|
119
|
+
description=f"Auto-derived from {base_description}. {FUNCTION_DESCRIPTION_MAPS.get(ftype, ftype.value)}.",
|
|
109
120
|
line_number=base_line_number,
|
|
110
121
|
concept_source=ConceptSource.AUTO_DERIVED,
|
|
111
122
|
),
|
|
@@ -147,7 +158,7 @@ def generate_key_concepts(concept: Concept, environment: Environment):
|
|
|
147
158
|
concept.address,
|
|
148
159
|
},
|
|
149
160
|
metadata=Metadata(
|
|
150
|
-
description=f"Auto-derived
|
|
161
|
+
description=f"Auto-derived integer. The {ftype.value} of {concept.address}, {base_description}",
|
|
151
162
|
line_number=base_line_number,
|
|
152
163
|
concept_source=ConceptSource.AUTO_DERIVED,
|
|
153
164
|
),
|
|
@@ -127,10 +127,11 @@ def Unnest(args: list[Concept]) -> Function:
|
|
|
127
127
|
|
|
128
128
|
def Group(args: list[Concept]) -> Function:
|
|
129
129
|
output = args[0]
|
|
130
|
+
datatype = arg_to_datatype(output)
|
|
130
131
|
return Function(
|
|
131
132
|
operator=FunctionType.GROUP,
|
|
132
133
|
arguments=args,
|
|
133
|
-
output_datatype=
|
|
134
|
+
output_datatype=datatype,
|
|
134
135
|
output_purpose=Purpose.PROPERTY,
|
|
135
136
|
arg_count=-1,
|
|
136
137
|
)
|
|
@@ -620,7 +620,7 @@ class Concept(Mergeable, Namespaced, SelectContext, BaseModel):
|
|
|
620
620
|
)
|
|
621
621
|
final_grain = self.grain or grain
|
|
622
622
|
keys = self.keys if self.keys else None
|
|
623
|
-
if self.is_aggregate and isinstance(new_lineage, Function):
|
|
623
|
+
if self.is_aggregate and isinstance(new_lineage, Function) and grain.components:
|
|
624
624
|
grain_components = [environment.concepts[c] for c in grain.components]
|
|
625
625
|
new_lineage = AggregateWrapper(function=new_lineage, by=grain_components)
|
|
626
626
|
final_grain = grain
|
|
@@ -923,9 +923,16 @@ class Grain(Namespaced, BaseModel):
|
|
|
923
923
|
if not self.where_clause:
|
|
924
924
|
where = other.where_clause
|
|
925
925
|
elif not other.where_clause == self.where_clause:
|
|
926
|
-
|
|
927
|
-
|
|
926
|
+
where = WhereClause(
|
|
927
|
+
conditional=Conditional(
|
|
928
|
+
left=self.where_clause.conditional,
|
|
929
|
+
right=other.where_clause.conditional,
|
|
930
|
+
operator=BooleanOperator.AND,
|
|
931
|
+
)
|
|
928
932
|
)
|
|
933
|
+
# raise NotImplementedError(
|
|
934
|
+
# f"Cannot merge grains with where clauses, self {self.where_clause} other {other.where_clause}"
|
|
935
|
+
# )
|
|
929
936
|
return Grain(
|
|
930
937
|
components=self.components.union(other.components), where_clause=where
|
|
931
938
|
)
|
|
@@ -1015,6 +1022,7 @@ class EnvironmentConceptDict(dict):
|
|
|
1015
1022
|
def raise_undefined(
|
|
1016
1023
|
self, key: str, line_no: int | None = None, file: Path | str | None = None
|
|
1017
1024
|
) -> Never:
|
|
1025
|
+
|
|
1018
1026
|
matches = self._find_similar_concepts(key)
|
|
1019
1027
|
message = f"Undefined concept: {key}."
|
|
1020
1028
|
if matches:
|
|
@@ -1660,6 +1668,96 @@ class SelectStatement(HasUUID, Mergeable, Namespaced, SelectTypeMixin, BaseModel
|
|
|
1660
1668
|
] = Field(default_factory=EnvironmentConceptDict)
|
|
1661
1669
|
grain: Grain = Field(default_factory=Grain)
|
|
1662
1670
|
|
|
1671
|
+
@classmethod
|
|
1672
|
+
def from_inputs(
|
|
1673
|
+
cls,
|
|
1674
|
+
environment: Environment,
|
|
1675
|
+
selection: List[SelectItem],
|
|
1676
|
+
order_by: OrderBy | None = None,
|
|
1677
|
+
limit: int | None = None,
|
|
1678
|
+
meta: Metadata | None = None,
|
|
1679
|
+
where_clause: WhereClause | None = None,
|
|
1680
|
+
having_clause: HavingClause | None = None,
|
|
1681
|
+
) -> "SelectStatement":
|
|
1682
|
+
|
|
1683
|
+
output = SelectStatement(
|
|
1684
|
+
selection=selection,
|
|
1685
|
+
where_clause=where_clause,
|
|
1686
|
+
having_clause=having_clause,
|
|
1687
|
+
limit=limit,
|
|
1688
|
+
order_by=order_by,
|
|
1689
|
+
meta=meta or Metadata(),
|
|
1690
|
+
)
|
|
1691
|
+
for parse_pass in [
|
|
1692
|
+
1,
|
|
1693
|
+
2,
|
|
1694
|
+
]:
|
|
1695
|
+
# the first pass will result in all concepts being defined
|
|
1696
|
+
# the second will get grains appropriately
|
|
1697
|
+
# eg if someone does sum(x)->a, b+c -> z - we don't know if Z is a key to group by or an aggregate
|
|
1698
|
+
# until after the first pass, and so don't know the grain of a
|
|
1699
|
+
|
|
1700
|
+
if parse_pass == 1:
|
|
1701
|
+
grain = Grain.from_concepts(
|
|
1702
|
+
[
|
|
1703
|
+
x.content
|
|
1704
|
+
for x in output.selection
|
|
1705
|
+
if isinstance(x.content, Concept)
|
|
1706
|
+
],
|
|
1707
|
+
where_clause=output.where_clause,
|
|
1708
|
+
)
|
|
1709
|
+
if parse_pass == 2:
|
|
1710
|
+
grain = Grain.from_concepts(
|
|
1711
|
+
output.output_components, where_clause=output.where_clause
|
|
1712
|
+
)
|
|
1713
|
+
output.grain = grain
|
|
1714
|
+
pass_grain = Grain() if parse_pass == 1 else grain
|
|
1715
|
+
for item in selection:
|
|
1716
|
+
# we don't know the grain of an aggregate at assignment time
|
|
1717
|
+
# so rebuild at this point in the tree
|
|
1718
|
+
# TODO: simplify
|
|
1719
|
+
if isinstance(item.content, ConceptTransform):
|
|
1720
|
+
new_concept = item.content.output.with_select_context(
|
|
1721
|
+
output.local_concepts,
|
|
1722
|
+
# the first pass grain will be incorrect
|
|
1723
|
+
pass_grain,
|
|
1724
|
+
environment=environment,
|
|
1725
|
+
)
|
|
1726
|
+
output.local_concepts[new_concept.address] = new_concept
|
|
1727
|
+
item.content.output = new_concept
|
|
1728
|
+
if parse_pass == 2 and CONFIG.select_as_definition:
|
|
1729
|
+
environment.add_concept(new_concept)
|
|
1730
|
+
elif isinstance(item.content, UndefinedConcept):
|
|
1731
|
+
environment.concepts.raise_undefined(
|
|
1732
|
+
item.content.address,
|
|
1733
|
+
line_no=item.content.metadata.line_number,
|
|
1734
|
+
file=environment.env_file_path,
|
|
1735
|
+
)
|
|
1736
|
+
elif isinstance(item.content, Concept):
|
|
1737
|
+
# Sometimes cached values here don't have the latest info
|
|
1738
|
+
# but we can't just use environment, as it might not have the right grain.
|
|
1739
|
+
item.content = item.content.with_select_context(
|
|
1740
|
+
output.local_concepts,
|
|
1741
|
+
pass_grain,
|
|
1742
|
+
environment=environment,
|
|
1743
|
+
)
|
|
1744
|
+
output.local_concepts[item.content.address] = item.content
|
|
1745
|
+
|
|
1746
|
+
if order_by:
|
|
1747
|
+
output.order_by = order_by.with_select_context(
|
|
1748
|
+
local_concepts=output.local_concepts,
|
|
1749
|
+
grain=output.grain,
|
|
1750
|
+
environment=environment,
|
|
1751
|
+
)
|
|
1752
|
+
if output.having_clause:
|
|
1753
|
+
output.having_clause = output.having_clause.with_select_context(
|
|
1754
|
+
local_concepts=output.local_concepts,
|
|
1755
|
+
grain=output.grain,
|
|
1756
|
+
environment=environment,
|
|
1757
|
+
)
|
|
1758
|
+
output.validate_syntax(environment)
|
|
1759
|
+
return output
|
|
1760
|
+
|
|
1663
1761
|
def validate_syntax(self, environment: Environment):
|
|
1664
1762
|
if self.where_clause:
|
|
1665
1763
|
for x in self.where_clause.concept_arguments:
|
|
@@ -2091,6 +2189,10 @@ class Datasource(HasUUID, Namespaced, BaseModel):
|
|
|
2091
2189
|
def duplicate(self) -> Datasource:
|
|
2092
2190
|
return self.model_copy(deep=True)
|
|
2093
2191
|
|
|
2192
|
+
@property
|
|
2193
|
+
def hidden_concepts(self) -> List[Concept]:
|
|
2194
|
+
return []
|
|
2195
|
+
|
|
2094
2196
|
def merge_concept(
|
|
2095
2197
|
self, source: Concept, target: Concept, modifiers: List[Modifier]
|
|
2096
2198
|
):
|
|
@@ -2163,17 +2265,7 @@ class Datasource(HasUUID, Namespaced, BaseModel):
|
|
|
2163
2265
|
@field_validator("grain", mode="before")
|
|
2164
2266
|
@classmethod
|
|
2165
2267
|
def grain_enforcement(cls, v: Grain, info: ValidationInfo):
|
|
2166
|
-
values = info.data
|
|
2167
2268
|
grain: Grain = safe_grain(v)
|
|
2168
|
-
if not grain.components:
|
|
2169
|
-
columns: List[ColumnAssignment] = values.get("columns", [])
|
|
2170
|
-
grain = Grain.from_concepts(
|
|
2171
|
-
[
|
|
2172
|
-
c.concept.with_grain(Grain())
|
|
2173
|
-
for c in columns
|
|
2174
|
-
if c.concept.purpose == Purpose.KEY
|
|
2175
|
-
]
|
|
2176
|
-
)
|
|
2177
2269
|
return grain
|
|
2178
2270
|
|
|
2179
2271
|
def add_column(
|
|
@@ -2982,12 +3074,18 @@ class CTE(BaseModel):
|
|
|
2982
3074
|
assert isinstance(c.lineage, RowsetItem)
|
|
2983
3075
|
return check_is_not_in_group(c.lineage.content)
|
|
2984
3076
|
if c.derivation == PurposeLineage.CONSTANT:
|
|
2985
|
-
return
|
|
3077
|
+
return True
|
|
2986
3078
|
if c.purpose == Purpose.METRIC:
|
|
2987
3079
|
return True
|
|
2988
|
-
|
|
3080
|
+
|
|
3081
|
+
if c.derivation == PurposeLineage.BASIC and c.lineage:
|
|
2989
3082
|
if all([check_is_not_in_group(x) for x in c.lineage.concept_arguments]):
|
|
2990
3083
|
return True
|
|
3084
|
+
if (
|
|
3085
|
+
isinstance(c.lineage, Function)
|
|
3086
|
+
and c.lineage.operator == FunctionType.GROUP
|
|
3087
|
+
):
|
|
3088
|
+
return check_is_not_in_group(c.lineage.concept_arguments[0])
|
|
2991
3089
|
return False
|
|
2992
3090
|
|
|
2993
3091
|
return (
|
|
@@ -3264,6 +3362,7 @@ class Environment(BaseModel):
|
|
|
3264
3362
|
alias_origin_lookup: Dict[str, Concept] = Field(default_factory=dict)
|
|
3265
3363
|
# TODO: support freezing environments to avoid mutation
|
|
3266
3364
|
frozen: bool = False
|
|
3365
|
+
env_file_path: Path | None = None
|
|
3267
3366
|
|
|
3268
3367
|
def freeze(self):
|
|
3269
3368
|
self.frozen = True
|
|
@@ -3317,7 +3416,7 @@ class Environment(BaseModel):
|
|
|
3317
3416
|
path = Path(path)
|
|
3318
3417
|
with open(path, "r") as f:
|
|
3319
3418
|
read = f.read()
|
|
3320
|
-
return Environment(working_path=
|
|
3419
|
+
return Environment(working_path=path.parent, env_file_path=path).parse(read)[0]
|
|
3321
3420
|
|
|
3322
3421
|
@classmethod
|
|
3323
3422
|
def from_string(cls, input: str) -> "Environment":
|
|
@@ -3664,6 +3763,7 @@ class Environment(BaseModel):
|
|
|
3664
3763
|
for k, v in self.concepts.items():
|
|
3665
3764
|
if v.address == target.address:
|
|
3666
3765
|
v.pseudonyms.add(source.address)
|
|
3766
|
+
|
|
3667
3767
|
if v.address == source.address:
|
|
3668
3768
|
replacements[k] = target
|
|
3669
3769
|
v.pseudonyms.add(target.address)
|
|
@@ -539,11 +539,14 @@ def validate_concept(
|
|
|
539
539
|
found_addresses.add(concept.address)
|
|
540
540
|
found_map[str(node)].add(concept)
|
|
541
541
|
for v_address in concept.pseudonyms:
|
|
542
|
+
if v_address in seen:
|
|
543
|
+
return
|
|
542
544
|
v = environment.concepts[v_address]
|
|
543
|
-
if v
|
|
545
|
+
if v.address in seen:
|
|
544
546
|
return
|
|
545
|
-
if v
|
|
547
|
+
if v.address == concept.address:
|
|
546
548
|
return
|
|
549
|
+
|
|
547
550
|
validate_concept(
|
|
548
551
|
v,
|
|
549
552
|
node,
|
|
@@ -866,6 +869,7 @@ def _search_concepts(
|
|
|
866
869
|
)
|
|
867
870
|
if complete == ValidationResult.INCOMPLETE_CONDITION:
|
|
868
871
|
cond_dict = {str(node): node.preexisting_conditions for node in stack}
|
|
872
|
+
logger.error(f"Have {cond_dict} and need {str(conditions)}")
|
|
869
873
|
raise SyntaxError(f"Have {cond_dict} and need {str(conditions)}")
|
|
870
874
|
# early exit if we have a complete stack with one node
|
|
871
875
|
# we can only early exit if we have a complete stack
|
|
@@ -987,17 +991,20 @@ def source_query_concepts(
|
|
|
987
991
|
raise ValueError(
|
|
988
992
|
f"Could not resolve conections between {error_strings} from environment graph."
|
|
989
993
|
)
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
],
|
|
994
|
-
input_concepts=[
|
|
995
|
-
x for x in root.output_concepts if x.address not in root.hidden_concepts
|
|
996
|
-
],
|
|
994
|
+
final = [x for x in root.output_concepts if x.address not in root.hidden_concepts]
|
|
995
|
+
if GroupNode.check_if_required(
|
|
996
|
+
downstream_concepts=final,
|
|
997
|
+
parents=[root.resolve()],
|
|
997
998
|
environment=environment,
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
999
|
+
).required:
|
|
1000
|
+
candidate: StrategyNode = GroupNode(
|
|
1001
|
+
output_concepts=final,
|
|
1002
|
+
input_concepts=final,
|
|
1003
|
+
environment=environment,
|
|
1004
|
+
parents=[root],
|
|
1005
|
+
partial_concepts=root.partial_concepts,
|
|
1006
|
+
)
|
|
1007
|
+
else:
|
|
1008
|
+
candidate = root
|
|
1009
|
+
|
|
1003
1010
|
return candidate
|
{pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/basic_node.py
RENAMED
|
@@ -13,6 +13,7 @@ from trilogy.core.processing.node_generators.common import (
|
|
|
13
13
|
resolve_function_parent_concepts,
|
|
14
14
|
)
|
|
15
15
|
from trilogy.core.processing.nodes import History, StrategyNode
|
|
16
|
+
from trilogy.utility import unique
|
|
16
17
|
|
|
17
18
|
LOGGER_PREFIX = "[GEN_BASIC_NODE]"
|
|
18
19
|
|
|
@@ -65,7 +66,9 @@ def gen_basic_node(
|
|
|
65
66
|
non_equivalent_optional = [
|
|
66
67
|
x for x in local_optional if x not in equivalent_optional
|
|
67
68
|
]
|
|
68
|
-
all_parents =
|
|
69
|
+
all_parents: list[Concept] = unique(
|
|
70
|
+
parent_concepts + non_equivalent_optional, "address"
|
|
71
|
+
)
|
|
69
72
|
logger.info(
|
|
70
73
|
f"{depth_prefix}{LOGGER_PREFIX} Fetching parents {[x.address for x in all_parents]}"
|
|
71
74
|
)
|
{pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/filter_node.py
RENAMED
|
@@ -60,7 +60,7 @@ def gen_filter_node(
|
|
|
60
60
|
g=g,
|
|
61
61
|
depth=depth + 1,
|
|
62
62
|
history=history,
|
|
63
|
-
|
|
63
|
+
conditions=conditions,
|
|
64
64
|
)
|
|
65
65
|
|
|
66
66
|
flattened_existence = [x for y in parent_existence_concepts for x in y]
|
|
@@ -194,6 +194,9 @@ def gen_filter_node(
|
|
|
194
194
|
history=history,
|
|
195
195
|
conditions=conditions,
|
|
196
196
|
)
|
|
197
|
+
logger.info(
|
|
198
|
+
f"{padding(depth)}{LOGGER_PREFIX} returning filter node and enrich node with {enrich_node.output_concepts} and {enrich_node.input_concepts}"
|
|
199
|
+
)
|
|
197
200
|
return MergeNode(
|
|
198
201
|
input_concepts=[concept, immediate_parent] + local_optional,
|
|
199
202
|
output_concepts=[
|
|
@@ -206,4 +209,5 @@ def gen_filter_node(
|
|
|
206
209
|
filter_node,
|
|
207
210
|
enrich_node,
|
|
208
211
|
],
|
|
212
|
+
preexisting_conditions=conditions.conditional if conditions else None,
|
|
209
213
|
)
|
{pytrilogy-0.0.2.55 → pytrilogy-0.0.2.57}/trilogy/core/processing/node_generators/group_to_node.py
RENAMED
|
@@ -26,6 +26,7 @@ def gen_group_to_node(
|
|
|
26
26
|
# aggregates MUST always group to the proper grain
|
|
27
27
|
if not isinstance(concept.lineage, Function):
|
|
28
28
|
raise SyntaxError("Group to should have function lineage")
|
|
29
|
+
group_arg = concept.lineage.arguments[0]
|
|
29
30
|
parent_concepts: List[Concept] = concept.lineage.concept_arguments
|
|
30
31
|
logger.info(
|
|
31
32
|
f"{padding(depth)}{LOGGER_PREFIX} group by node has required parents {[x.address for x in parent_concepts]}"
|
|
@@ -47,6 +48,13 @@ def gen_group_to_node(
|
|
|
47
48
|
environment=environment,
|
|
48
49
|
parents=parents,
|
|
49
50
|
depth=depth,
|
|
51
|
+
preexisting_conditions=conditions.conditional if conditions else None,
|
|
52
|
+
hidden_concepts=(
|
|
53
|
+
[group_arg]
|
|
54
|
+
if isinstance(group_arg, Concept)
|
|
55
|
+
and group_arg.address not in local_optional
|
|
56
|
+
else []
|
|
57
|
+
),
|
|
50
58
|
)
|
|
51
59
|
|
|
52
60
|
# early exit if no optional
|
|
@@ -62,6 +70,7 @@ def gen_group_to_node(
|
|
|
62
70
|
g=g,
|
|
63
71
|
depth=depth + 1,
|
|
64
72
|
history=history,
|
|
73
|
+
conditions=conditions,
|
|
65
74
|
)
|
|
66
75
|
if not enrich_node:
|
|
67
76
|
logger.info(
|
|
@@ -83,4 +92,5 @@ def gen_group_to_node(
|
|
|
83
92
|
],
|
|
84
93
|
whole_grain=True,
|
|
85
94
|
depth=depth,
|
|
95
|
+
preexisting_conditions=conditions.conditional if conditions else None,
|
|
86
96
|
)
|
|
@@ -69,6 +69,7 @@ def gen_multiselect_node(
|
|
|
69
69
|
lineage: MultiSelectStatement = concept.lineage
|
|
70
70
|
|
|
71
71
|
base_parents: List[StrategyNode] = []
|
|
72
|
+
partial = []
|
|
72
73
|
for select in lineage.selects:
|
|
73
74
|
snode: StrategyNode = source_concepts(
|
|
74
75
|
mandatory_list=select.output_components,
|
|
@@ -103,6 +104,9 @@ def gen_multiselect_node(
|
|
|
103
104
|
for mc in merge_concepts:
|
|
104
105
|
assert mc in snode.resolve().output_concepts
|
|
105
106
|
base_parents.append(snode)
|
|
107
|
+
if select.where_clause:
|
|
108
|
+
for item in select.output_components:
|
|
109
|
+
partial.append(item)
|
|
106
110
|
|
|
107
111
|
node_joins = extra_align_joins(lineage, base_parents)
|
|
108
112
|
node = MergeNode(
|
|
@@ -112,35 +116,28 @@ def gen_multiselect_node(
|
|
|
112
116
|
depth=depth,
|
|
113
117
|
parents=base_parents,
|
|
114
118
|
node_joins=node_joins,
|
|
119
|
+
hidden_concepts=[x for y in base_parents for x in y.hidden_concepts],
|
|
115
120
|
)
|
|
116
121
|
|
|
117
122
|
enrichment = set([x.address for x in local_optional])
|
|
118
123
|
|
|
119
|
-
|
|
124
|
+
multiselect_relevant = [
|
|
120
125
|
x
|
|
121
126
|
for x in lineage.derived_concepts
|
|
122
127
|
if x.address == concept.address or x.address in enrichment
|
|
123
128
|
]
|
|
124
|
-
additional_relevant = [
|
|
125
|
-
x for x in select.output_components if x.address in enrichment
|
|
126
|
-
]
|
|
129
|
+
additional_relevant = [x for x in node.output_concepts if x.address in enrichment]
|
|
127
130
|
# add in other other concepts
|
|
128
|
-
for item in rowset_relevant:
|
|
129
|
-
node.output_concepts.append(item)
|
|
130
|
-
for item in additional_relevant:
|
|
131
|
-
node.output_concepts.append(item)
|
|
132
|
-
if select.where_clause:
|
|
133
|
-
for item in additional_relevant:
|
|
134
|
-
node.partial_concepts.append(item)
|
|
135
131
|
|
|
136
|
-
|
|
137
|
-
node.resolution_cache = node._resolve()
|
|
132
|
+
node.set_output_concepts(multiselect_relevant + additional_relevant)
|
|
138
133
|
|
|
139
|
-
#
|
|
140
|
-
#
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
)
|
|
134
|
+
# node.add_partial_concepts(partial)
|
|
135
|
+
# if select.where_clause:
|
|
136
|
+
# for item in additional_relevant:
|
|
137
|
+
# node.partial_concepts.append(item)
|
|
138
|
+
node.grain = Grain.from_concepts(node.output_concepts, environment=environment)
|
|
139
|
+
node.rebuild_cache()
|
|
140
|
+
# we need a better API for refreshing a nodes QDS
|
|
144
141
|
possible_joins = concept_to_relevant_joins(additional_relevant)
|
|
145
142
|
if not local_optional:
|
|
146
143
|
logger.info(
|
|
@@ -159,6 +156,7 @@ def gen_multiselect_node(
|
|
|
159
156
|
f"{padding(depth)}{LOGGER_PREFIX} all enriched concepts returned from base rowset node; exiting early"
|
|
160
157
|
)
|
|
161
158
|
return node
|
|
159
|
+
|
|
162
160
|
enrich_node: MergeNode = source_concepts( # this fetches the parent + join keys
|
|
163
161
|
# to then connect to the rest of the query
|
|
164
162
|
mandatory_list=additional_relevant + local_optional,
|
|
@@ -240,10 +240,12 @@ def create_datasource_node(
|
|
|
240
240
|
depth: int,
|
|
241
241
|
conditions: WhereClause | None = None,
|
|
242
242
|
) -> tuple[StrategyNode, bool]:
|
|
243
|
-
target_grain = Grain.from_concepts(all_concepts)
|
|
243
|
+
target_grain = Grain.from_concepts(all_concepts, environment=environment)
|
|
244
244
|
force_group = False
|
|
245
245
|
if not datasource.grain.issubset(target_grain):
|
|
246
246
|
force_group = True
|
|
247
|
+
if not datasource.grain.components:
|
|
248
|
+
force_group = True
|
|
247
249
|
partial_concepts = [
|
|
248
250
|
c.concept
|
|
249
251
|
for c in datasource.columns
|
|
@@ -350,6 +352,9 @@ def create_select_node(
|
|
|
350
352
|
|
|
351
353
|
# we need to nest the group node one further
|
|
352
354
|
if force_group is True:
|
|
355
|
+
logger.info(
|
|
356
|
+
f"{padding(depth)}{LOGGER_PREFIX} source requires group before consumption."
|
|
357
|
+
)
|
|
353
358
|
candidate: StrategyNode = GroupNode(
|
|
354
359
|
output_concepts=all_concepts,
|
|
355
360
|
input_concepts=all_concepts,
|
|
@@ -359,8 +364,10 @@ def create_select_node(
|
|
|
359
364
|
partial_concepts=bcandidate.partial_concepts,
|
|
360
365
|
nullable_concepts=bcandidate.nullable_concepts,
|
|
361
366
|
preexisting_conditions=bcandidate.preexisting_conditions,
|
|
367
|
+
force_group=force_group,
|
|
362
368
|
)
|
|
363
369
|
else:
|
|
370
|
+
|
|
364
371
|
candidate = bcandidate
|
|
365
372
|
return candidate
|
|
366
373
|
|
|
@@ -192,6 +192,8 @@ class StrategyNode:
|
|
|
192
192
|
for x in self.parents:
|
|
193
193
|
for z in x.usable_outputs:
|
|
194
194
|
non_hidden.add(z.address)
|
|
195
|
+
for psd in z.pseudonyms:
|
|
196
|
+
non_hidden.add(psd)
|
|
195
197
|
if not all([x.address in non_hidden for x in self.input_concepts]):
|
|
196
198
|
missing = [x for x in self.input_concepts if x.address not in non_hidden]
|
|
197
199
|
raise ValueError(
|
|
@@ -210,6 +212,8 @@ class StrategyNode:
|
|
|
210
212
|
return self
|
|
211
213
|
|
|
212
214
|
def add_condition(self, condition: Conditional | Comparison | Parenthetical):
|
|
215
|
+
if self.conditions and condition == self.conditions:
|
|
216
|
+
return self
|
|
213
217
|
if self.conditions:
|
|
214
218
|
self.conditions = Conditional(
|
|
215
219
|
left=self.conditions, right=condition, operator=BooleanOperator.AND
|
|
@@ -244,6 +248,15 @@ class StrategyNode:
|
|
|
244
248
|
self.rebuild_cache()
|
|
245
249
|
return self
|
|
246
250
|
|
|
251
|
+
def add_partial_concepts(self, concepts: List[Concept], rebuild: bool = True):
|
|
252
|
+
for concept in concepts:
|
|
253
|
+
if concept.address not in self.partial_lcl.addresses:
|
|
254
|
+
self.partial_concepts.append(concept)
|
|
255
|
+
self.partial_lcl = LooseConceptList(concepts=self.partial_concepts)
|
|
256
|
+
if rebuild:
|
|
257
|
+
self.rebuild_cache()
|
|
258
|
+
return self
|
|
259
|
+
|
|
247
260
|
def add_existence_concepts(self, concepts: List[Concept], rebuild: bool = True):
|
|
248
261
|
for concept in concepts:
|
|
249
262
|
if concept.address not in self.output_concepts:
|