pytrilogy 0.0.2.48__tar.gz → 0.0.2.50__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pytrilogy might be problematic. Click here for more details.
- {pytrilogy-0.0.2.48/pytrilogy.egg-info → pytrilogy-0.0.2.50}/PKG-INFO +1 -1
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50/pytrilogy.egg-info}/PKG-INFO +1 -1
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_discovery_nodes.py +0 -1
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_functions.py +32 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_query_processing.py +2 -6
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/__init__.py +1 -1
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/enums.py +11 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/functions.py +4 -1
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/models.py +29 -14
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/concept_strategies_v3.py +3 -3
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/common.py +0 -2
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/filter_node.py +0 -3
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/group_node.py +0 -1
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/group_to_node.py +0 -2
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/multiselect_node.py +0 -2
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/node_merge_node.py +0 -1
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/rowset_node.py +27 -8
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/select_merge_node.py +138 -59
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/union_node.py +0 -1
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/unnest_node.py +0 -2
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/window_node.py +0 -2
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/nodes/base_node.py +28 -3
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/nodes/filter_node.py +0 -3
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/nodes/group_node.py +9 -6
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/nodes/merge_node.py +3 -4
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/nodes/select_node_v2.py +5 -4
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/nodes/union_node.py +0 -3
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/nodes/unnest_node.py +0 -3
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/nodes/window_node.py +0 -3
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/utility.py +4 -1
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/query_processor.py +3 -8
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/dialect/base.py +14 -2
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/dialect/duckdb.py +7 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/hooks/graph_hook.py +14 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/parsing/common.py +14 -5
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/parsing/parse_engine.py +32 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/parsing/trilogy.lark +3 -1
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/LICENSE.md +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/README.md +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/pyproject.toml +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/pytrilogy.egg-info/SOURCES.txt +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/pytrilogy.egg-info/dependency_links.txt +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/pytrilogy.egg-info/entry_points.txt +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/pytrilogy.egg-info/requires.txt +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/pytrilogy.egg-info/top_level.txt +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/setup.cfg +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/setup.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_datatypes.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_declarations.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_derived_concepts.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_enums.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_environment.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_executor.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_imports.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_metadata.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_models.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_multi_join_assignments.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_parse_engine.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_parsing.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_partial_handling.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_select.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_show.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_statements.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_undefined_concept.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/tests/test_where_clause.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/compiler.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/constants.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/__init__.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/constants.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/env_processor.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/environment_helpers.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/ergonomics.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/exceptions.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/graph_models.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/internal.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/optimization.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/optimizations/__init__.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/optimizations/base_optimization.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/optimizations/inline_constant.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/optimizations/inline_datasource.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/optimizations/predicate_pushdown.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/__init__.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/graph_utils.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/__init__.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/basic_node.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/select_node.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/nodes/__init__.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/dialect/__init__.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/dialect/bigquery.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/dialect/common.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/dialect/config.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/dialect/enums.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/dialect/postgres.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/dialect/presto.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/dialect/snowflake.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/dialect/sql_server.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/engine.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/executor.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/hooks/__init__.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/hooks/base_hook.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/hooks/query_debugger.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/metadata/__init__.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/parser.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/parsing/__init__.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/parsing/config.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/parsing/exceptions.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/parsing/helpers.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/parsing/render.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/py.typed +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/scripts/__init__.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/scripts/trilogy.py +0 -0
- {pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/utility.py +0 -0
|
@@ -15,7 +15,6 @@ def test_group_node(test_environment, test_environment_graph):
|
|
|
15
15
|
output_concepts=[total_revenue, category],
|
|
16
16
|
input_concepts=[category, revenue],
|
|
17
17
|
environment=test_environment,
|
|
18
|
-
g=test_environment_graph,
|
|
19
18
|
parents=[
|
|
20
19
|
search_concepts(
|
|
21
20
|
[category, revenue],
|
|
@@ -14,6 +14,8 @@ from trilogy.dialect.duckdb import DuckDBDialect
|
|
|
14
14
|
from trilogy.dialect.snowflake import SnowflakeDialect
|
|
15
15
|
from trilogy.dialect.sql_server import SqlServerDialect
|
|
16
16
|
from trilogy.parser import parse
|
|
17
|
+
from trilogy import Dialects
|
|
18
|
+
from datetime import date, datetime
|
|
17
19
|
|
|
18
20
|
logger.setLevel(INFO)
|
|
19
21
|
|
|
@@ -154,6 +156,36 @@ def test_explicit_cast(test_environment):
|
|
|
154
156
|
dialect.compile_statement(process_query(test_environment, select))
|
|
155
157
|
|
|
156
158
|
|
|
159
|
+
def test_literal_cast(test_environment):
|
|
160
|
+
declarations = """
|
|
161
|
+
select
|
|
162
|
+
'1'::int -> one,
|
|
163
|
+
'1'::float -> one_float,
|
|
164
|
+
'1'::string -> one_string,
|
|
165
|
+
'2024-01-01'::date -> one_date,
|
|
166
|
+
'2024-01-01 01:01:01'::datetime -> one_datetime,
|
|
167
|
+
'true'::bool -> one_bool,
|
|
168
|
+
;"""
|
|
169
|
+
env, parsed = parse(declarations, environment=test_environment)
|
|
170
|
+
|
|
171
|
+
select: SelectStatement = parsed[-1]
|
|
172
|
+
z = (
|
|
173
|
+
Dialects.DUCK_DB.default_executor(environment=test_environment)
|
|
174
|
+
.execute_query(parsed[-1])
|
|
175
|
+
.fetchall()
|
|
176
|
+
)
|
|
177
|
+
assert z[0].one == 1
|
|
178
|
+
assert z[0].one_float == 1.0
|
|
179
|
+
assert z[0].one_string == "1"
|
|
180
|
+
assert z[0].one_date == date(year=2024, month=1, day=1)
|
|
181
|
+
assert z[0].one_datetime == datetime(
|
|
182
|
+
year=2024, month=1, day=1, hour=1, minute=1, second=1
|
|
183
|
+
)
|
|
184
|
+
assert z[0].one_bool == True
|
|
185
|
+
for dialect in TEST_DIALECTS:
|
|
186
|
+
dialect.compile_statement(process_query(test_environment, select))
|
|
187
|
+
|
|
188
|
+
|
|
157
189
|
def test_math_functions(test_environment):
|
|
158
190
|
declarations = """
|
|
159
191
|
|
|
@@ -130,9 +130,7 @@ def test_join_aggregate(test_environment: Environment, test_environment_graph):
|
|
|
130
130
|
|
|
131
131
|
def test_query_aggregation(test_environment, test_environment_graph):
|
|
132
132
|
select = SelectStatement(selection=[test_environment.concepts["total_revenue"]])
|
|
133
|
-
datasource = get_query_datasources(
|
|
134
|
-
environment=test_environment, graph=test_environment_graph, statement=select
|
|
135
|
-
)
|
|
133
|
+
datasource = get_query_datasources(environment=test_environment, statement=select)
|
|
136
134
|
|
|
137
135
|
assert {datasource.identifier} == {"revenue_at_local_order_id_at_abstract"}
|
|
138
136
|
check = datasource
|
|
@@ -150,9 +148,7 @@ def test_query_datasources(test_environment, test_environment_graph):
|
|
|
150
148
|
test_environment.concepts["total_revenue"],
|
|
151
149
|
]
|
|
152
150
|
)
|
|
153
|
-
get_query_datasources(
|
|
154
|
-
environment=test_environment, graph=test_environment_graph, statement=select
|
|
155
|
-
)
|
|
151
|
+
get_query_datasources(environment=test_environment, statement=select)
|
|
156
152
|
|
|
157
153
|
|
|
158
154
|
def test_full_query(test_environment, test_environment_graph):
|
|
@@ -120,6 +120,8 @@ class FunctionType(Enum):
|
|
|
120
120
|
|
|
121
121
|
ALIAS = "alias"
|
|
122
122
|
|
|
123
|
+
PARENTHETICAL = "parenthetical"
|
|
124
|
+
|
|
123
125
|
# Generic
|
|
124
126
|
CASE = "case"
|
|
125
127
|
CAST = "cast"
|
|
@@ -135,6 +137,8 @@ class FunctionType(Enum):
|
|
|
135
137
|
ATTR_ACCESS = "attr_access"
|
|
136
138
|
STRUCT = "struct"
|
|
137
139
|
ARRAY = "array"
|
|
140
|
+
DATE_LITERAL = "date_literal"
|
|
141
|
+
DATETIME_LITERAL = "datetime_literal"
|
|
138
142
|
|
|
139
143
|
# TEXT AND MAYBE MORE
|
|
140
144
|
SPLIT = "split"
|
|
@@ -260,6 +264,13 @@ class ComparisonOperator(Enum):
|
|
|
260
264
|
CONTAINS = "contains"
|
|
261
265
|
ELSE = "else"
|
|
262
266
|
|
|
267
|
+
def __eq__(self, other):
|
|
268
|
+
if isinstance(other, str):
|
|
269
|
+
return self.value == other
|
|
270
|
+
if not isinstance(other, ComparisonOperator):
|
|
271
|
+
return False
|
|
272
|
+
return self.value == other.value
|
|
273
|
+
|
|
263
274
|
@classmethod
|
|
264
275
|
def _missing_(cls, value):
|
|
265
276
|
if not isinstance(value, list) and " " in str(value):
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from datetime import date, datetime
|
|
1
2
|
from typing import Optional
|
|
2
3
|
|
|
3
4
|
from trilogy.constants import MagicConstants
|
|
@@ -17,6 +18,8 @@ from trilogy.core.models import (
|
|
|
17
18
|
arg_to_datatype,
|
|
18
19
|
)
|
|
19
20
|
|
|
21
|
+
GENERIC_ARGS = Concept | Function | str | int | float | date | datetime
|
|
22
|
+
|
|
20
23
|
|
|
21
24
|
def create_function_derived_concept(
|
|
22
25
|
name: str,
|
|
@@ -262,7 +265,7 @@ def get_attr_datatype(
|
|
|
262
265
|
return arg.datatype
|
|
263
266
|
|
|
264
267
|
|
|
265
|
-
def AttrAccess(args: list[
|
|
268
|
+
def AttrAccess(args: list[GENERIC_ARGS]):
|
|
266
269
|
return Function(
|
|
267
270
|
operator=FunctionType.ATTR_ACCESS,
|
|
268
271
|
arguments=args,
|
|
@@ -5,6 +5,7 @@ import hashlib
|
|
|
5
5
|
import os
|
|
6
6
|
from abc import ABC
|
|
7
7
|
from collections import UserDict, UserList, defaultdict
|
|
8
|
+
from datetime import date, datetime
|
|
8
9
|
from enum import Enum
|
|
9
10
|
from functools import cached_property
|
|
10
11
|
from pathlib import Path
|
|
@@ -467,7 +468,7 @@ class Concept(Mergeable, Namespaced, SelectContext, BaseModel):
|
|
|
467
468
|
)
|
|
468
469
|
|
|
469
470
|
def __repr__(self):
|
|
470
|
-
base = f"{self.
|
|
471
|
+
base = f"{self.address}@{self.grain}"
|
|
471
472
|
return base
|
|
472
473
|
|
|
473
474
|
@property
|
|
@@ -662,11 +663,24 @@ class Concept(Mergeable, Namespaced, SelectContext, BaseModel):
|
|
|
662
663
|
local_concepts=local_concepts, grain=grain, environment=environment
|
|
663
664
|
)
|
|
664
665
|
final_grain = self.grain
|
|
665
|
-
|
|
666
|
+
keys = (
|
|
667
|
+
tuple(
|
|
668
|
+
[
|
|
669
|
+
x.with_select_context(local_concepts, grain, environment)
|
|
670
|
+
for x in self.keys
|
|
671
|
+
]
|
|
672
|
+
)
|
|
673
|
+
if self.keys
|
|
674
|
+
else None
|
|
675
|
+
)
|
|
666
676
|
if self.is_aggregate and isinstance(new_lineage, Function):
|
|
667
677
|
new_lineage = AggregateWrapper(function=new_lineage, by=grain.components)
|
|
668
678
|
final_grain = grain
|
|
669
|
-
|
|
679
|
+
keys = tuple(grain.components)
|
|
680
|
+
elif (
|
|
681
|
+
self.is_aggregate and not keys and isinstance(new_lineage, AggregateWrapper)
|
|
682
|
+
):
|
|
683
|
+
keys = tuple(new_lineage.by)
|
|
670
684
|
return self.__class__(
|
|
671
685
|
name=self.name,
|
|
672
686
|
datatype=self.datatype,
|
|
@@ -675,16 +689,7 @@ class Concept(Mergeable, Namespaced, SelectContext, BaseModel):
|
|
|
675
689
|
lineage=new_lineage,
|
|
676
690
|
grain=final_grain,
|
|
677
691
|
namespace=self.namespace,
|
|
678
|
-
keys=
|
|
679
|
-
tuple(
|
|
680
|
-
[
|
|
681
|
-
x.with_select_context(local_concepts, grain, environment)
|
|
682
|
-
for x in self.keys
|
|
683
|
-
]
|
|
684
|
-
)
|
|
685
|
-
if self.keys
|
|
686
|
-
else None
|
|
687
|
-
),
|
|
692
|
+
keys=keys,
|
|
688
693
|
modifiers=self.modifiers,
|
|
689
694
|
# a select needs to always defer to the environment for pseudonyms
|
|
690
695
|
# TODO: evaluate if this should be cached
|
|
@@ -1260,6 +1265,8 @@ class Function(Mergeable, Namespaced, SelectContext, BaseModel):
|
|
|
1260
1265
|
int,
|
|
1261
1266
|
float,
|
|
1262
1267
|
str,
|
|
1268
|
+
date,
|
|
1269
|
+
datetime,
|
|
1263
1270
|
MapWrapper[Any, Any],
|
|
1264
1271
|
DataType,
|
|
1265
1272
|
ListType,
|
|
@@ -2626,7 +2633,7 @@ class QueryDatasource(BaseModel):
|
|
|
2626
2633
|
and CONFIG.validate_missing
|
|
2627
2634
|
):
|
|
2628
2635
|
raise SyntaxError(
|
|
2629
|
-
f"
|
|
2636
|
+
f"On query datasource missing source map for {concept.address} on {key}, have {v}"
|
|
2630
2637
|
)
|
|
2631
2638
|
return v
|
|
2632
2639
|
|
|
@@ -3864,6 +3871,8 @@ class Comparison(
|
|
|
3864
3871
|
float,
|
|
3865
3872
|
list,
|
|
3866
3873
|
bool,
|
|
3874
|
+
datetime,
|
|
3875
|
+
date,
|
|
3867
3876
|
Function,
|
|
3868
3877
|
Concept,
|
|
3869
3878
|
"Conditional",
|
|
@@ -3880,6 +3889,8 @@ class Comparison(
|
|
|
3880
3889
|
float,
|
|
3881
3890
|
list,
|
|
3882
3891
|
bool,
|
|
3892
|
+
date,
|
|
3893
|
+
datetime,
|
|
3883
3894
|
Concept,
|
|
3884
3895
|
Function,
|
|
3885
3896
|
"Conditional",
|
|
@@ -5004,5 +5015,9 @@ def arg_to_datatype(arg) -> DataType | ListType | StructType | MapType | Numeric
|
|
|
5004
5015
|
return ListType(type=wrapper.type)
|
|
5005
5016
|
elif isinstance(arg, MapWrapper):
|
|
5006
5017
|
return MapType(key_type=arg.key_type, value_type=arg.value_type)
|
|
5018
|
+
elif isinstance(arg, datetime):
|
|
5019
|
+
return DataType.DATETIME
|
|
5020
|
+
elif isinstance(arg, date):
|
|
5021
|
+
return DataType.DATE
|
|
5007
5022
|
else:
|
|
5008
5023
|
raise ValueError(f"Cannot parse arg datatype for arg of raw type {type(arg)}")
|
|
@@ -359,7 +359,6 @@ def generate_node(
|
|
|
359
359
|
input_concepts=[],
|
|
360
360
|
output_concepts=constant_targets,
|
|
361
361
|
environment=environment,
|
|
362
|
-
g=g,
|
|
363
362
|
parents=[],
|
|
364
363
|
depth=depth + 1,
|
|
365
364
|
)
|
|
@@ -569,6 +568,8 @@ def validate_stack(
|
|
|
569
568
|
resolved = node.resolve()
|
|
570
569
|
|
|
571
570
|
for concept in resolved.output_concepts:
|
|
571
|
+
if concept in resolved.hidden_concepts:
|
|
572
|
+
continue
|
|
572
573
|
validate_concept(
|
|
573
574
|
concept,
|
|
574
575
|
node,
|
|
@@ -836,6 +837,7 @@ def _search_concepts(
|
|
|
836
837
|
PurposeLineage.ROWSET,
|
|
837
838
|
PurposeLineage.BASIC,
|
|
838
839
|
PurposeLineage.MULTISELECT,
|
|
840
|
+
PurposeLineage.UNION,
|
|
839
841
|
]:
|
|
840
842
|
skip.add(priority_concept.address)
|
|
841
843
|
break
|
|
@@ -903,7 +905,6 @@ def _search_concepts(
|
|
|
903
905
|
input_concepts=non_virtual,
|
|
904
906
|
output_concepts=non_virtual,
|
|
905
907
|
environment=environment,
|
|
906
|
-
g=g,
|
|
907
908
|
parents=stack,
|
|
908
909
|
depth=depth,
|
|
909
910
|
)
|
|
@@ -984,7 +985,6 @@ def source_query_concepts(
|
|
|
984
985
|
x for x in root.output_concepts if x.address not in root.hidden_concepts
|
|
985
986
|
],
|
|
986
987
|
environment=environment,
|
|
987
|
-
g=g,
|
|
988
988
|
parents=[root],
|
|
989
989
|
partial_concepts=root.partial_concepts,
|
|
990
990
|
)
|
|
@@ -130,7 +130,6 @@ def gen_property_enrichment_node(
|
|
|
130
130
|
),
|
|
131
131
|
output_concepts=base_node.output_concepts + extra_properties,
|
|
132
132
|
environment=environment,
|
|
133
|
-
g=g,
|
|
134
133
|
parents=[
|
|
135
134
|
base_node,
|
|
136
135
|
]
|
|
@@ -209,7 +208,6 @@ def gen_enrichment_node(
|
|
|
209
208
|
input_concepts=unique(join_keys + extra_required + non_hidden, "address"),
|
|
210
209
|
output_concepts=unique(join_keys + extra_required + non_hidden, "address"),
|
|
211
210
|
environment=environment,
|
|
212
|
-
g=g,
|
|
213
211
|
parents=[enrich_node, base_node],
|
|
214
212
|
force_group=False,
|
|
215
213
|
preexisting_conditions=conditions.conditional if conditions else None,
|
{pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/filter_node.py
RENAMED
|
@@ -117,7 +117,6 @@ def gen_filter_node(
|
|
|
117
117
|
input_concepts=row_parent.output_concepts,
|
|
118
118
|
output_concepts=[concept] + row_parent.output_concepts,
|
|
119
119
|
environment=row_parent.environment,
|
|
120
|
-
g=row_parent.g,
|
|
121
120
|
parents=[row_parent],
|
|
122
121
|
depth=row_parent.depth,
|
|
123
122
|
partial_concepts=row_parent.partial_concepts,
|
|
@@ -161,7 +160,6 @@ def gen_filter_node(
|
|
|
161
160
|
),
|
|
162
161
|
output_concepts=[concept, immediate_parent] + parent_row_concepts,
|
|
163
162
|
environment=environment,
|
|
164
|
-
g=g,
|
|
165
163
|
parents=core_parents,
|
|
166
164
|
grain=Grain(
|
|
167
165
|
components=[immediate_parent] + parent_row_concepts,
|
|
@@ -202,7 +200,6 @@ def gen_filter_node(
|
|
|
202
200
|
]
|
|
203
201
|
+ local_optional,
|
|
204
202
|
environment=environment,
|
|
205
|
-
g=g,
|
|
206
203
|
parents=[
|
|
207
204
|
# this node fetches only what we need to filter
|
|
208
205
|
filter_node,
|
{pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/group_to_node.py
RENAMED
|
@@ -45,7 +45,6 @@ def gen_group_to_node(
|
|
|
45
45
|
output_concepts=parent_concepts + [concept],
|
|
46
46
|
input_concepts=parent_concepts,
|
|
47
47
|
environment=environment,
|
|
48
|
-
g=g,
|
|
49
48
|
parents=parents,
|
|
50
49
|
depth=depth,
|
|
51
50
|
)
|
|
@@ -76,7 +75,6 @@ def gen_group_to_node(
|
|
|
76
75
|
+ [x for x in parent_concepts if x.address != concept.address],
|
|
77
76
|
output_concepts=[concept] + local_optional,
|
|
78
77
|
environment=environment,
|
|
79
|
-
g=g,
|
|
80
78
|
parents=[
|
|
81
79
|
# this node gets the group
|
|
82
80
|
group_node,
|
|
@@ -108,7 +108,6 @@ def gen_multiselect_node(
|
|
|
108
108
|
input_concepts=[x for y in base_parents for x in y.output_concepts],
|
|
109
109
|
output_concepts=[x for y in base_parents for x in y.output_concepts],
|
|
110
110
|
environment=environment,
|
|
111
|
-
g=g,
|
|
112
111
|
depth=depth,
|
|
113
112
|
parents=base_parents,
|
|
114
113
|
node_joins=node_joins,
|
|
@@ -178,7 +177,6 @@ def gen_multiselect_node(
|
|
|
178
177
|
input_concepts=enrich_node.output_concepts + node.output_concepts,
|
|
179
178
|
output_concepts=node.output_concepts + local_optional,
|
|
180
179
|
environment=environment,
|
|
181
|
-
g=g,
|
|
182
180
|
depth=depth,
|
|
183
181
|
parents=[
|
|
184
182
|
# this node gets the multiselect
|
{pytrilogy-0.0.2.48 → pytrilogy-0.0.2.50}/trilogy/core/processing/node_generators/rowset_node.py
RENAMED
|
@@ -38,7 +38,7 @@ def gen_rowset_node(
|
|
|
38
38
|
rowset: RowsetDerivationStatement = lineage.rowset
|
|
39
39
|
select: SelectStatement | MultiSelectStatement = lineage.rowset.select
|
|
40
40
|
|
|
41
|
-
node = get_query_node(environment, select
|
|
41
|
+
node = get_query_node(environment, select)
|
|
42
42
|
|
|
43
43
|
if not node:
|
|
44
44
|
logger.info(
|
|
@@ -94,15 +94,22 @@ def gen_rowset_node(
|
|
|
94
94
|
logger.info(
|
|
95
95
|
f"{padding(depth)}{LOGGER_PREFIX} no enrichment required for rowset node as all optional found or no optional; exiting early."
|
|
96
96
|
)
|
|
97
|
-
# node.set_preexisting_conditions(conditions.conditional if conditions else None)
|
|
98
97
|
return node
|
|
99
|
-
|
|
100
|
-
|
|
98
|
+
possible_joins = concept_to_relevant_joins(
|
|
99
|
+
[x for x in node.output_concepts if x.derivation != PurposeLineage.ROWSET]
|
|
100
|
+
)
|
|
101
|
+
logger.info({x.address: x.keys for x in possible_joins})
|
|
101
102
|
if not possible_joins:
|
|
102
103
|
logger.info(
|
|
103
104
|
f"{padding(depth)}{LOGGER_PREFIX} no possible joins for rowset node to get {[x.address for x in local_optional]}; have {[x.address for x in node.output_concepts]}"
|
|
104
105
|
)
|
|
105
106
|
return node
|
|
107
|
+
if any(x.derivation == PurposeLineage.ROWSET for x in possible_joins):
|
|
108
|
+
logger.info(
|
|
109
|
+
f"{padding(depth)}{LOGGER_PREFIX} cannot enrich rowset node with rowset concepts; exiting early"
|
|
110
|
+
)
|
|
111
|
+
return node
|
|
112
|
+
logger.info([x.address for x in possible_joins + local_optional])
|
|
106
113
|
enrich_node: MergeNode = source_concepts( # this fetches the parent + join keys
|
|
107
114
|
# to then connect to the rest of the query
|
|
108
115
|
mandatory_list=possible_joins + local_optional,
|
|
@@ -110,22 +117,34 @@ def gen_rowset_node(
|
|
|
110
117
|
g=g,
|
|
111
118
|
depth=depth + 1,
|
|
112
119
|
conditions=conditions,
|
|
120
|
+
history=history,
|
|
113
121
|
)
|
|
114
122
|
if not enrich_node:
|
|
115
123
|
logger.info(
|
|
116
124
|
f"{padding(depth)}{LOGGER_PREFIX} Cannot generate rowset enrichment node for {concept} with optional {local_optional}, returning just rowset node"
|
|
117
125
|
)
|
|
118
126
|
return node
|
|
127
|
+
|
|
128
|
+
non_hidden = [
|
|
129
|
+
x for x in node.output_concepts if x.address not in node.hidden_concepts
|
|
130
|
+
]
|
|
131
|
+
for x in possible_joins:
|
|
132
|
+
if x.address in node.hidden_concepts:
|
|
133
|
+
node.unhide_output_concepts([x])
|
|
134
|
+
non_hidden_enrich = [
|
|
135
|
+
x
|
|
136
|
+
for x in enrich_node.output_concepts
|
|
137
|
+
if x.address not in enrich_node.hidden_concepts
|
|
138
|
+
]
|
|
119
139
|
return MergeNode(
|
|
120
|
-
input_concepts=
|
|
121
|
-
output_concepts=
|
|
140
|
+
input_concepts=non_hidden + non_hidden_enrich,
|
|
141
|
+
output_concepts=non_hidden + local_optional,
|
|
122
142
|
environment=environment,
|
|
123
|
-
g=g,
|
|
124
143
|
depth=depth,
|
|
125
144
|
parents=[
|
|
126
145
|
node,
|
|
127
146
|
enrich_node,
|
|
128
147
|
],
|
|
129
|
-
partial_concepts=node.partial_concepts,
|
|
148
|
+
partial_concepts=node.partial_concepts + enrich_node.partial_concepts,
|
|
130
149
|
preexisting_conditions=conditions.conditional if conditions else None,
|
|
131
150
|
)
|