pytrilogy 0.0.2.32__tar.gz → 0.0.2.34__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pytrilogy might be problematic. Click here for more details.
- {pytrilogy-0.0.2.32/pytrilogy.egg-info → pytrilogy-0.0.2.34}/PKG-INFO +1 -1
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34/pytrilogy.egg-info}/PKG-INFO +1 -1
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/pytrilogy.egg-info/SOURCES.txt +1 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_environment.py +16 -0
- pytrilogy-0.0.2.34/tests/test_metadata.py +27 -0
- pytrilogy-0.0.2.34/tests/test_parse_engine.py +30 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_undefined_concept.py +1 -1
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/__init__.py +1 -1
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/constants.py +1 -1
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/enums.py +1 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/models.py +41 -16
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/utility.py +2 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/parsing/common.py +7 -2
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/parsing/parse_engine.py +110 -62
- pytrilogy-0.0.2.32/tests/test_metadata.py +0 -11
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/LICENSE.md +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/README.md +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/pyproject.toml +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/pytrilogy.egg-info/dependency_links.txt +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/pytrilogy.egg-info/entry_points.txt +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/pytrilogy.egg-info/requires.txt +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/pytrilogy.egg-info/top_level.txt +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/setup.cfg +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/setup.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_datatypes.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_declarations.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_derived_concepts.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_discovery_nodes.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_executor.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_functions.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_imports.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_models.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_multi_join_assignments.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_parsing.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_partial_handling.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_query_processing.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_select.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_show.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_statements.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/tests/test_where_clause.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/compiler.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/constants.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/env_processor.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/environment_helpers.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/ergonomics.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/exceptions.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/functions.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/graph_models.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/internal.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/optimization.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/optimizations/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/optimizations/base_optimization.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/optimizations/inline_constant.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/optimizations/inline_datasource.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/optimizations/predicate_pushdown.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/concept_strategies_v3.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/graph_utils.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/basic_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/common.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/filter_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/group_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/group_to_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/multiselect_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/node_merge_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/rowset_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/select_merge_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/select_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/unnest_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/window_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/nodes/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/nodes/base_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/nodes/filter_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/nodes/group_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/nodes/merge_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/nodes/select_node_v2.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/nodes/unnest_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/nodes/window_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/query_processor.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/dialect/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/dialect/base.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/dialect/bigquery.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/dialect/common.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/dialect/config.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/dialect/duckdb.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/dialect/enums.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/dialect/postgres.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/dialect/presto.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/dialect/snowflake.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/dialect/sql_server.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/engine.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/executor.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/hooks/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/hooks/base_hook.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/hooks/graph_hook.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/hooks/query_debugger.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/metadata/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/parser.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/parsing/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/parsing/config.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/parsing/exceptions.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/parsing/helpers.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/parsing/render.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/parsing/trilogy.lark +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/py.typed +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/scripts/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/scripts/trilogy.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/utility.py +0 -0
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from trilogy.core.models import Environment
|
|
2
2
|
from pathlib import Path
|
|
3
3
|
from trilogy.core.enums import Modifier
|
|
4
|
+
from trilogy.core.exceptions import UndefinedConceptException
|
|
4
5
|
|
|
5
6
|
|
|
6
7
|
def test_environment_serialization(test_environment: Environment):
|
|
@@ -23,6 +24,21 @@ def test_environment_from_path():
|
|
|
23
24
|
assert "local.id" in env.concepts
|
|
24
25
|
|
|
25
26
|
|
|
27
|
+
def test_environment_invalid():
|
|
28
|
+
|
|
29
|
+
env = Environment()
|
|
30
|
+
env.concepts.fail_on_missing = False
|
|
31
|
+
x = env.concepts["abc"]
|
|
32
|
+
assert x.name == "abc"
|
|
33
|
+
|
|
34
|
+
env.concepts.fail_on_missing = True
|
|
35
|
+
try:
|
|
36
|
+
x = env.concepts["abc"]
|
|
37
|
+
assert 1 == 0
|
|
38
|
+
except Exception as e:
|
|
39
|
+
assert isinstance(e, UndefinedConceptException)
|
|
40
|
+
|
|
41
|
+
|
|
26
42
|
def test_environment_merge():
|
|
27
43
|
env1: Environment
|
|
28
44
|
env1, _ = Environment().parse(
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from trilogy import parse, Environment
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def test_metadata():
|
|
6
|
+
env, _ = parse(
|
|
7
|
+
"""key user_id int metadata(description="the description");
|
|
8
|
+
property user_id.display_name string metadata(description="The display name");"""
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
assert env.concepts["user_id"].metadata.description == "the description"
|
|
12
|
+
assert env.concepts["display_name"].metadata.description == "The display name"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def test_import_metadata():
|
|
16
|
+
env = Environment(working_path=Path(__file__).parent)
|
|
17
|
+
env, _ = parse(
|
|
18
|
+
"""import test_env as env; # Dragon metrics
|
|
19
|
+
import test_env as env2;""",
|
|
20
|
+
environment=env,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
assert "Dragon metrics" in env.concepts["env.id"].metadata.description
|
|
24
|
+
|
|
25
|
+
env2 = env.concepts["env2.id"]
|
|
26
|
+
assert env2.namespace == "env2"
|
|
27
|
+
assert env.concepts["env2.id"].metadata.description is None
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from trilogy.parsing.parse_engine import ParseToObjects, PARSER, unpack_visit_error
|
|
2
|
+
from trilogy import Environment
|
|
3
|
+
from trilogy.core.exceptions import UndefinedConceptException
|
|
4
|
+
from pytest import raises
|
|
5
|
+
|
|
6
|
+
TEXT = """
|
|
7
|
+
const a <- 1;
|
|
8
|
+
|
|
9
|
+
select
|
|
10
|
+
a,
|
|
11
|
+
b
|
|
12
|
+
;
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def test_parser():
|
|
17
|
+
env = Environment()
|
|
18
|
+
x = ParseToObjects(environment=env)
|
|
19
|
+
x.environment.concepts.fail_on_missing = False
|
|
20
|
+
x.set_text(TEXT)
|
|
21
|
+
tokens = PARSER.parse(TEXT)
|
|
22
|
+
x.transform(tokens)
|
|
23
|
+
failed = False
|
|
24
|
+
try:
|
|
25
|
+
x.hydrate_missing()
|
|
26
|
+
except Exception as e:
|
|
27
|
+
failed = True
|
|
28
|
+
with raises(UndefinedConceptException):
|
|
29
|
+
unpack_visit_error(e)
|
|
30
|
+
assert failed
|
|
@@ -32,6 +32,6 @@ def test_undefined_concept_dict():
|
|
|
32
32
|
try:
|
|
33
33
|
env["orid"]
|
|
34
34
|
except UndefinedConceptException as e:
|
|
35
|
-
assert e.suggestions == ["
|
|
35
|
+
assert e.suggestions == ["order_id"]
|
|
36
36
|
assert "suggestions" in e.message.lower()
|
|
37
37
|
assert "order_id" in e.message.lower()
|
|
@@ -2077,6 +2077,11 @@ class Datasource(HasUUID, Namespaced, BaseModel):
|
|
|
2077
2077
|
self, source: Concept, target: Concept, modifiers: List[Modifier]
|
|
2078
2078
|
):
|
|
2079
2079
|
original = [c for c in self.columns if c.concept.address == source.address]
|
|
2080
|
+
early_exit_check = [
|
|
2081
|
+
c for c in self.columns if c.concept.address == target.address
|
|
2082
|
+
]
|
|
2083
|
+
if early_exit_check:
|
|
2084
|
+
return None
|
|
2080
2085
|
if len(original) != 1:
|
|
2081
2086
|
raise ValueError(
|
|
2082
2087
|
f"Expected exactly one column to merge, got {len(original)} for {source.address}, {[x.alias for x in original]}"
|
|
@@ -3213,7 +3218,7 @@ class EnvironmentConceptDict(dict):
|
|
|
3213
3218
|
def __init__(self, *args, **kwargs) -> None:
|
|
3214
3219
|
super().__init__(self, *args, **kwargs)
|
|
3215
3220
|
self.undefined: dict[str, UndefinedConcept] = {}
|
|
3216
|
-
self.fail_on_missing: bool =
|
|
3221
|
+
self.fail_on_missing: bool = True
|
|
3217
3222
|
self.populate_default_concepts()
|
|
3218
3223
|
|
|
3219
3224
|
def populate_default_concepts(self):
|
|
@@ -3332,7 +3337,6 @@ class Environment(BaseModel):
|
|
|
3332
3337
|
|
|
3333
3338
|
materialized_concepts: List[Concept] = Field(default_factory=list)
|
|
3334
3339
|
alias_origin_lookup: Dict[str, Concept] = Field(default_factory=dict)
|
|
3335
|
-
_parse_count: int = 0
|
|
3336
3340
|
|
|
3337
3341
|
@classmethod
|
|
3338
3342
|
def from_file(cls, path: str | Path) -> "Environment":
|
|
@@ -3442,19 +3446,22 @@ class Environment(BaseModel):
|
|
|
3442
3446
|
exists = False
|
|
3443
3447
|
existing = self.imports[alias]
|
|
3444
3448
|
if imp_stm:
|
|
3445
|
-
if any(
|
|
3449
|
+
if any(
|
|
3450
|
+
[x.path == imp_stm.path and x.alias == imp_stm.alias for x in existing]
|
|
3451
|
+
):
|
|
3446
3452
|
exists = True
|
|
3447
|
-
|
|
3448
3453
|
else:
|
|
3449
|
-
if any(
|
|
3454
|
+
if any(
|
|
3455
|
+
[x.path == source.working_path and x.alias == alias for x in existing]
|
|
3456
|
+
):
|
|
3450
3457
|
exists = True
|
|
3451
3458
|
imp_stm = ImportStatement(alias=alias, path=Path(source.working_path))
|
|
3452
|
-
|
|
3453
3459
|
same_namespace = alias == self.namespace
|
|
3454
3460
|
|
|
3455
3461
|
if not exists:
|
|
3456
3462
|
self.imports[alias].append(imp_stm)
|
|
3457
|
-
|
|
3463
|
+
# we can't exit early
|
|
3464
|
+
# as there may be new concepts
|
|
3458
3465
|
for k, concept in source.concepts.items():
|
|
3459
3466
|
if same_namespace:
|
|
3460
3467
|
new = self.add_concept(concept, _ignore_cache=True)
|
|
@@ -3485,13 +3492,25 @@ class Environment(BaseModel):
|
|
|
3485
3492
|
self.gen_concept_list_caches()
|
|
3486
3493
|
return self
|
|
3487
3494
|
|
|
3488
|
-
def add_file_import(
|
|
3489
|
-
|
|
3490
|
-
|
|
3491
|
-
|
|
3492
|
-
|
|
3495
|
+
def add_file_import(
|
|
3496
|
+
self, path: str | Path, alias: str, env: Environment | None = None
|
|
3497
|
+
):
|
|
3498
|
+
from trilogy.parsing.parse_engine import (
|
|
3499
|
+
ParseToObjects,
|
|
3500
|
+
PARSER,
|
|
3501
|
+
gen_cache_lookup,
|
|
3502
|
+
)
|
|
3493
3503
|
|
|
3494
|
-
|
|
3504
|
+
if isinstance(path, str):
|
|
3505
|
+
if path.endswith(".preql"):
|
|
3506
|
+
path = path.rsplit(".", 1)[0]
|
|
3507
|
+
if "." not in path:
|
|
3508
|
+
target = Path(self.working_path, path)
|
|
3509
|
+
else:
|
|
3510
|
+
target = Path(self.working_path, *path.split("."))
|
|
3511
|
+
target = target.with_suffix(".preql")
|
|
3512
|
+
else:
|
|
3513
|
+
target = path
|
|
3495
3514
|
if alias in self.imports:
|
|
3496
3515
|
imports = self.imports[alias]
|
|
3497
3516
|
for x in imports:
|
|
@@ -3502,18 +3521,24 @@ class Environment(BaseModel):
|
|
|
3502
3521
|
ImportStatement(alias=alias, path=target, environment=env)
|
|
3503
3522
|
)
|
|
3504
3523
|
else:
|
|
3524
|
+
parse_address = gen_cache_lookup(str(target), alias, str(self.working_path))
|
|
3505
3525
|
try:
|
|
3506
3526
|
with open(target, "r", encoding="utf-8") as f:
|
|
3507
3527
|
text = f.read()
|
|
3528
|
+
nenv = Environment(
|
|
3529
|
+
working_path=target.parent,
|
|
3530
|
+
)
|
|
3531
|
+
nenv.concepts.fail_on_missing = False
|
|
3508
3532
|
nparser = ParseToObjects(
|
|
3509
|
-
visit_tokens=True,
|
|
3510
|
-
text=text,
|
|
3511
3533
|
environment=Environment(
|
|
3512
3534
|
working_path=target.parent,
|
|
3513
3535
|
),
|
|
3514
|
-
parse_address=
|
|
3536
|
+
parse_address=parse_address,
|
|
3537
|
+
token_address=target,
|
|
3515
3538
|
)
|
|
3539
|
+
nparser.set_text(text)
|
|
3516
3540
|
nparser.transform(PARSER.parse(text))
|
|
3541
|
+
|
|
3517
3542
|
except Exception as e:
|
|
3518
3543
|
raise ImportError(
|
|
3519
3544
|
f"Unable to import file {target.parent}, parsing error: {e}"
|
|
@@ -425,6 +425,8 @@ def is_scalar_condition(
|
|
|
425
425
|
return True
|
|
426
426
|
if element.lineage and isinstance(element.lineage, AggregateWrapper):
|
|
427
427
|
return is_scalar_condition(element.lineage, materialized)
|
|
428
|
+
if element.lineage and isinstance(element.lineage, Function):
|
|
429
|
+
return is_scalar_condition(element.lineage, materialized)
|
|
428
430
|
return True
|
|
429
431
|
elif isinstance(element, AggregateWrapper):
|
|
430
432
|
return is_scalar_condition(element.function, materialized)
|
|
@@ -133,7 +133,9 @@ def constant_to_concept(
|
|
|
133
133
|
)
|
|
134
134
|
|
|
135
135
|
|
|
136
|
-
def function_to_concept(
|
|
136
|
+
def function_to_concept(
|
|
137
|
+
parent: Function, name: str, namespace: str, metadata: Metadata | None = None
|
|
138
|
+
) -> Concept:
|
|
137
139
|
pkeys: List[Concept] = []
|
|
138
140
|
for x in parent.arguments:
|
|
139
141
|
pkeys += [
|
|
@@ -159,6 +161,7 @@ def function_to_concept(parent: Function, name: str, namespace: str) -> Concept:
|
|
|
159
161
|
purpose = Purpose.CONSTANT
|
|
160
162
|
else:
|
|
161
163
|
purpose = parent.output_purpose
|
|
164
|
+
fmetadata = metadata or Metadata()
|
|
162
165
|
if grain is not None:
|
|
163
166
|
return Concept(
|
|
164
167
|
name=name,
|
|
@@ -169,6 +172,7 @@ def function_to_concept(parent: Function, name: str, namespace: str) -> Concept:
|
|
|
169
172
|
keys=keys,
|
|
170
173
|
modifiers=modifiers,
|
|
171
174
|
grain=grain,
|
|
175
|
+
metadata=fmetadata,
|
|
172
176
|
)
|
|
173
177
|
|
|
174
178
|
return Concept(
|
|
@@ -179,6 +183,7 @@ def function_to_concept(parent: Function, name: str, namespace: str) -> Concept:
|
|
|
179
183
|
namespace=namespace,
|
|
180
184
|
keys=keys,
|
|
181
185
|
modifiers=modifiers,
|
|
186
|
+
metadata=fmetadata,
|
|
182
187
|
)
|
|
183
188
|
|
|
184
189
|
|
|
@@ -305,7 +310,7 @@ def arbitrary_to_concept(
|
|
|
305
310
|
elif isinstance(parent, Function):
|
|
306
311
|
if not name:
|
|
307
312
|
name = f"{VIRTUAL_CONCEPT_PREFIX}_func_{parent.operator.value}_{string_to_hash(str(parent))}"
|
|
308
|
-
return function_to_concept(parent, name, namespace)
|
|
313
|
+
return function_to_concept(parent, name, namespace, metadata=metadata)
|
|
309
314
|
elif isinstance(parent, ListWrapper):
|
|
310
315
|
if not name:
|
|
311
316
|
name = f"{VIRTUAL_CONCEPT_PREFIX}_{string_to_hash(str(parent))}"
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from os.path import dirname, join
|
|
2
2
|
from typing import List, Optional, Tuple, Union
|
|
3
3
|
from re import IGNORECASE
|
|
4
|
-
from lark import Lark, Transformer, v_args, Tree
|
|
4
|
+
from lark import Lark, Transformer, v_args, Tree, ParseTree
|
|
5
5
|
from lark.exceptions import (
|
|
6
6
|
UnexpectedCharacters,
|
|
7
7
|
UnexpectedEOF,
|
|
@@ -32,6 +32,7 @@ from trilogy.core.enums import (
|
|
|
32
32
|
ShowCategory,
|
|
33
33
|
FunctionClass,
|
|
34
34
|
IOType,
|
|
35
|
+
ConceptSource,
|
|
35
36
|
)
|
|
36
37
|
from trilogy.core.exceptions import InvalidSyntaxException, UndefinedConceptException
|
|
37
38
|
from trilogy.core.functions import (
|
|
@@ -126,13 +127,16 @@ from trilogy.parsing.common import (
|
|
|
126
127
|
from dataclasses import dataclass
|
|
127
128
|
|
|
128
129
|
|
|
130
|
+
CONSTANT_TYPES = (int, float, str, bool, list, ListWrapper, MapWrapper)
|
|
131
|
+
|
|
132
|
+
SELF_LABEL = "root"
|
|
133
|
+
|
|
134
|
+
|
|
129
135
|
@dataclass
|
|
130
136
|
class WholeGrainWrapper:
|
|
131
137
|
where: WhereClause
|
|
132
138
|
|
|
133
139
|
|
|
134
|
-
CONSTANT_TYPES = (int, float, str, bool, list, ListWrapper, MapWrapper)
|
|
135
|
-
|
|
136
140
|
with open(join(dirname(__file__), "trilogy.lark"), "r") as f:
|
|
137
141
|
PARSER = Lark(
|
|
138
142
|
f.read(),
|
|
@@ -144,6 +148,10 @@ with open(join(dirname(__file__), "trilogy.lark"), "r") as f:
|
|
|
144
148
|
)
|
|
145
149
|
|
|
146
150
|
|
|
151
|
+
def gen_cache_lookup(path: str, alias: str, parent: str) -> str:
|
|
152
|
+
return path + alias + parent
|
|
153
|
+
|
|
154
|
+
|
|
147
155
|
def parse_concept_reference(
|
|
148
156
|
name: str, environment: Environment, purpose: Optional[Purpose] = None
|
|
149
157
|
) -> Tuple[str, str, str, str | None]:
|
|
@@ -219,39 +227,47 @@ def unwrap_transformation(
|
|
|
219
227
|
class ParseToObjects(Transformer):
|
|
220
228
|
def __init__(
|
|
221
229
|
self,
|
|
222
|
-
visit_tokens,
|
|
223
|
-
text,
|
|
224
230
|
environment: Environment,
|
|
225
231
|
parse_address: str | None = None,
|
|
232
|
+
token_address: Path | None = None,
|
|
226
233
|
parsed: dict[str, "ParseToObjects"] | None = None,
|
|
234
|
+
tokens: dict[Path | str, ParseTree] | None = None,
|
|
235
|
+
text_lookup: dict[Path | str, str] | None = None,
|
|
227
236
|
):
|
|
228
|
-
Transformer.__init__(self,
|
|
229
|
-
self.text = text
|
|
237
|
+
Transformer.__init__(self, True)
|
|
230
238
|
self.environment: Environment = environment
|
|
231
|
-
self.parse_address = parse_address or
|
|
239
|
+
self.parse_address: str = parse_address or SELF_LABEL
|
|
240
|
+
self.token_address: Path | str = token_address or SELF_LABEL
|
|
232
241
|
self.parsed: dict[str, ParseToObjects] = parsed if parsed else {}
|
|
242
|
+
self.tokens: dict[Path | str, ParseTree] = tokens or {}
|
|
243
|
+
self.text_lookup: dict[Path | str, str] = text_lookup or {}
|
|
233
244
|
# we do a second pass to pick up circular dependencies
|
|
234
245
|
# after initial parsing
|
|
235
246
|
self.pass_count = 1
|
|
236
|
-
self._results_stash = None
|
|
237
247
|
|
|
238
|
-
def
|
|
248
|
+
def set_text(self, text: str):
|
|
249
|
+
self.text_lookup[self.token_address] = text
|
|
250
|
+
|
|
251
|
+
def transform(self, tree: Tree):
|
|
239
252
|
results = super().transform(tree)
|
|
240
|
-
self.
|
|
241
|
-
self.environment._parse_count += 1
|
|
253
|
+
self.tokens[self.token_address] = tree
|
|
242
254
|
return results
|
|
243
255
|
|
|
256
|
+
def prepare_parse(self):
|
|
257
|
+
self.pass_count = 1
|
|
258
|
+
self.environment.concepts.fail_on_missing = False
|
|
259
|
+
for _, v in self.parsed.items():
|
|
260
|
+
v.prepare_parse()
|
|
261
|
+
|
|
244
262
|
def hydrate_missing(self):
|
|
245
263
|
self.pass_count = 2
|
|
246
264
|
for k, v in self.parsed.items():
|
|
247
|
-
|
|
248
265
|
if v.pass_count == 2:
|
|
249
266
|
continue
|
|
267
|
+
print(f"Hydrating {k}")
|
|
250
268
|
v.hydrate_missing()
|
|
251
269
|
self.environment.concepts.fail_on_missing = True
|
|
252
|
-
|
|
253
|
-
# return self._results_stash
|
|
254
|
-
reparsed = self.transform(PARSER.parse(self.text))
|
|
270
|
+
reparsed = self.transform(self.tokens[self.token_address])
|
|
255
271
|
self.environment.concepts.undefined = {}
|
|
256
272
|
return reparsed
|
|
257
273
|
|
|
@@ -266,6 +282,18 @@ class ParseToObjects(Transformer):
|
|
|
266
282
|
output.concept.metadata.description
|
|
267
283
|
or args[1].text.split("#")[1].strip()
|
|
268
284
|
)
|
|
285
|
+
if isinstance(output, ImportStatement):
|
|
286
|
+
if len(args) > 1 and isinstance(args[1], Comment):
|
|
287
|
+
comment = args[1].text.split("#")[1].strip()
|
|
288
|
+
namespace = output.alias
|
|
289
|
+
for _, v in self.environment.concepts.items():
|
|
290
|
+
if v.namespace == namespace:
|
|
291
|
+
if v.metadata.description:
|
|
292
|
+
v.metadata.description = (
|
|
293
|
+
f"{comment}: {v.metadata.description}"
|
|
294
|
+
)
|
|
295
|
+
else:
|
|
296
|
+
v.metadata.description = comment
|
|
269
297
|
|
|
270
298
|
return args[0]
|
|
271
299
|
|
|
@@ -647,45 +675,41 @@ class ParseToObjects(Transformer):
|
|
|
647
675
|
return Comment(text=args.value)
|
|
648
676
|
|
|
649
677
|
@v_args(meta=True)
|
|
650
|
-
def select_transform(self, meta, args) -> ConceptTransform:
|
|
678
|
+
def select_transform(self, meta: Meta, args) -> ConceptTransform:
|
|
651
679
|
|
|
652
680
|
output: str = args[1]
|
|
653
|
-
|
|
681
|
+
transformation = unwrap_transformation(args[0])
|
|
654
682
|
lookup, namespace, output, parent = parse_concept_reference(
|
|
655
683
|
output, self.environment
|
|
656
684
|
)
|
|
657
685
|
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
concept =
|
|
662
|
-
|
|
663
|
-
concept = filter_item_to_concept(function, namespace=namespace, name=output)
|
|
664
|
-
elif isinstance(function, CONSTANT_TYPES):
|
|
665
|
-
concept = constant_to_concept(function, namespace=namespace, name=output)
|
|
666
|
-
elif isinstance(function, Function):
|
|
667
|
-
concept = function_to_concept(function, namespace=namespace, name=output)
|
|
668
|
-
else:
|
|
669
|
-
if function.output_purpose == Purpose.PROPERTY:
|
|
670
|
-
pkeys = [x for x in function.arguments if isinstance(x, Concept)]
|
|
671
|
-
grain = Grain(components=pkeys)
|
|
672
|
-
keys = tuple(grain.components_copy)
|
|
673
|
-
else:
|
|
674
|
-
grain = None
|
|
675
|
-
keys = None
|
|
676
|
-
concept = Concept(
|
|
677
|
-
name=output,
|
|
678
|
-
datatype=function.output_datatype,
|
|
679
|
-
purpose=function.output_purpose,
|
|
680
|
-
lineage=function,
|
|
681
|
-
namespace=namespace,
|
|
682
|
-
grain=Grain(components=[]) if not grain else grain,
|
|
683
|
-
keys=keys,
|
|
686
|
+
metadata = Metadata(line_number=meta.line, concept_source=ConceptSource.SELECT)
|
|
687
|
+
|
|
688
|
+
if isinstance(transformation, AggregateWrapper):
|
|
689
|
+
concept = agg_wrapper_to_concept(
|
|
690
|
+
transformation, namespace=namespace, name=output, metadata=metadata
|
|
684
691
|
)
|
|
685
|
-
|
|
686
|
-
concept
|
|
692
|
+
elif isinstance(transformation, WindowItem):
|
|
693
|
+
concept = window_item_to_concept(
|
|
694
|
+
transformation, namespace=namespace, name=output, metadata=metadata
|
|
695
|
+
)
|
|
696
|
+
elif isinstance(transformation, FilterItem):
|
|
697
|
+
concept = filter_item_to_concept(
|
|
698
|
+
transformation, namespace=namespace, name=output, metadata=metadata
|
|
699
|
+
)
|
|
700
|
+
elif isinstance(transformation, CONSTANT_TYPES):
|
|
701
|
+
concept = constant_to_concept(
|
|
702
|
+
transformation, namespace=namespace, name=output, metadata=metadata
|
|
703
|
+
)
|
|
704
|
+
elif isinstance(transformation, Function):
|
|
705
|
+
concept = function_to_concept(
|
|
706
|
+
transformation, namespace=namespace, name=output, metadata=metadata
|
|
707
|
+
)
|
|
708
|
+
else:
|
|
709
|
+
raise SyntaxError("Invalid transformation")
|
|
710
|
+
|
|
687
711
|
self.environment.add_concept(concept, meta=meta)
|
|
688
|
-
return ConceptTransform(function=
|
|
712
|
+
return ConceptTransform(function=transformation, output=concept)
|
|
689
713
|
|
|
690
714
|
@v_args(meta=True)
|
|
691
715
|
def concept_nullable_modifier(self, meta: Meta, args) -> Modifier:
|
|
@@ -709,7 +733,7 @@ class ParseToObjects(Transformer):
|
|
|
709
733
|
if len(args) != 1:
|
|
710
734
|
raise ParseError(
|
|
711
735
|
"Malformed select statement"
|
|
712
|
-
f" {args} {self.
|
|
736
|
+
f" {args} {self.text_lookup[self.parse_address][meta.start_pos:meta.end_pos]}"
|
|
713
737
|
)
|
|
714
738
|
content = args[0]
|
|
715
739
|
if isinstance(content, ConceptTransform):
|
|
@@ -807,25 +831,46 @@ class ParseToObjects(Transformer):
|
|
|
807
831
|
path = args[0].split(".")
|
|
808
832
|
|
|
809
833
|
target = join(self.environment.working_path, *path) + ".preql"
|
|
810
|
-
|
|
811
|
-
|
|
834
|
+
|
|
835
|
+
# tokens + text are cached by path
|
|
836
|
+
token_lookup = Path(target)
|
|
837
|
+
|
|
838
|
+
# cache lookups by the target, the alias, and the file we're importing it from
|
|
839
|
+
cache_lookup = gen_cache_lookup(
|
|
840
|
+
path=target, alias=alias, parent=str(self.token_address)
|
|
841
|
+
)
|
|
842
|
+
if token_lookup in self.tokens:
|
|
843
|
+
raw_tokens = self.tokens[token_lookup]
|
|
844
|
+
text = self.text_lookup[token_lookup]
|
|
845
|
+
else:
|
|
846
|
+
text = self.resolve_import_address(target)
|
|
847
|
+
self.text_lookup[token_lookup] = text
|
|
848
|
+
|
|
849
|
+
raw_tokens = PARSER.parse(text)
|
|
850
|
+
self.tokens[token_lookup] = raw_tokens
|
|
851
|
+
|
|
852
|
+
if cache_lookup in self.parsed:
|
|
853
|
+
nparser = self.parsed[cache_lookup]
|
|
812
854
|
else:
|
|
813
855
|
try:
|
|
814
|
-
|
|
856
|
+
new_env = Environment(
|
|
857
|
+
working_path=dirname(target),
|
|
858
|
+
)
|
|
859
|
+
new_env.concepts.fail_on_missing = False
|
|
815
860
|
nparser = ParseToObjects(
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
working_path=dirname(target),
|
|
820
|
-
# namespace=alias,
|
|
821
|
-
),
|
|
822
|
-
parse_address=target,
|
|
861
|
+
environment=new_env,
|
|
862
|
+
parse_address=cache_lookup,
|
|
863
|
+
token_address=token_lookup,
|
|
823
864
|
parsed={**self.parsed, **{self.parse_address: self}},
|
|
865
|
+
tokens={**self.tokens, **{token_lookup: raw_tokens}},
|
|
866
|
+
text_lookup={**self.text_lookup, **{token_lookup: text}},
|
|
824
867
|
)
|
|
825
|
-
nparser.transform(
|
|
826
|
-
self.parsed[
|
|
868
|
+
nparser.transform(raw_tokens)
|
|
869
|
+
self.parsed[cache_lookup] = nparser
|
|
827
870
|
# add the parsed objects of the import in
|
|
828
871
|
self.parsed = {**self.parsed, **nparser.parsed}
|
|
872
|
+
self.tokens = {**self.tokens, **nparser.tokens}
|
|
873
|
+
self.text_lookup = {**self.text_lookup, **nparser.text_lookup}
|
|
829
874
|
except Exception as e:
|
|
830
875
|
raise ImportError(f"Unable to import file {target}, parsing error: {e}")
|
|
831
876
|
|
|
@@ -1912,11 +1957,14 @@ def parse_text(text: str, environment: Optional[Environment] = None) -> Tuple[
|
|
|
1912
1957
|
],
|
|
1913
1958
|
]:
|
|
1914
1959
|
environment = environment or Environment()
|
|
1915
|
-
parser = ParseToObjects(
|
|
1960
|
+
parser = ParseToObjects(environment=environment)
|
|
1916
1961
|
|
|
1917
1962
|
try:
|
|
1963
|
+
parser.set_text(text)
|
|
1964
|
+
# disable fail on missing to allow for circular dependencies
|
|
1965
|
+
parser.prepare_parse()
|
|
1918
1966
|
parser.transform(PARSER.parse(text))
|
|
1919
|
-
#
|
|
1967
|
+
# this will reset fail on missing
|
|
1920
1968
|
pass_two = parser.hydrate_missing()
|
|
1921
1969
|
output = [v for v in pass_two if v]
|
|
1922
1970
|
except VisitError as e:
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
from trilogy import parse
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
def test_metadata():
|
|
5
|
-
env, _ = parse(
|
|
6
|
-
"""key user_id int metadata(description="the description");
|
|
7
|
-
property user_id.display_name string metadata(description="The display name");"""
|
|
8
|
-
)
|
|
9
|
-
|
|
10
|
-
assert env.concepts["user_id"].metadata.description == "the description"
|
|
11
|
-
assert env.concepts["display_name"].metadata.description == "The display name"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/__init__.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/basic_node.py
RENAMED
|
File without changes
|
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/filter_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/group_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/group_to_node.py
RENAMED
|
File without changes
|
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/node_merge_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/rowset_node.py
RENAMED
|
File without changes
|
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/select_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/unnest_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.34}/trilogy/core/processing/node_generators/window_node.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|