pytrilogy 0.0.2.32__tar.gz → 0.0.2.33__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pytrilogy might be problematic. Click here for more details.
- {pytrilogy-0.0.2.32/pytrilogy.egg-info → pytrilogy-0.0.2.33}/PKG-INFO +1 -1
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33/pytrilogy.egg-info}/PKG-INFO +1 -1
- pytrilogy-0.0.2.33/tests/test_metadata.py +27 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/__init__.py +1 -1
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/constants.py +1 -1
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/enums.py +1 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/models.py +35 -14
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/utility.py +2 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/parsing/common.py +7 -2
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/parsing/parse_engine.py +95 -52
- pytrilogy-0.0.2.32/tests/test_metadata.py +0 -11
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/LICENSE.md +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/README.md +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/pyproject.toml +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/pytrilogy.egg-info/SOURCES.txt +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/pytrilogy.egg-info/dependency_links.txt +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/pytrilogy.egg-info/entry_points.txt +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/pytrilogy.egg-info/requires.txt +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/pytrilogy.egg-info/top_level.txt +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/setup.cfg +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/setup.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_datatypes.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_declarations.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_derived_concepts.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_discovery_nodes.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_environment.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_executor.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_functions.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_imports.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_models.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_multi_join_assignments.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_parsing.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_partial_handling.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_query_processing.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_select.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_show.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_statements.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_undefined_concept.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/tests/test_where_clause.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/compiler.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/constants.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/env_processor.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/environment_helpers.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/ergonomics.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/exceptions.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/functions.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/graph_models.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/internal.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/optimization.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/optimizations/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/optimizations/base_optimization.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/optimizations/inline_constant.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/optimizations/inline_datasource.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/optimizations/predicate_pushdown.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/concept_strategies_v3.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/graph_utils.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/basic_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/common.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/filter_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/group_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/group_to_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/multiselect_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/node_merge_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/rowset_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/select_merge_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/select_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/unnest_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/window_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/nodes/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/nodes/base_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/nodes/filter_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/nodes/group_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/nodes/merge_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/nodes/select_node_v2.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/nodes/unnest_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/nodes/window_node.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/query_processor.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/dialect/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/dialect/base.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/dialect/bigquery.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/dialect/common.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/dialect/config.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/dialect/duckdb.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/dialect/enums.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/dialect/postgres.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/dialect/presto.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/dialect/snowflake.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/dialect/sql_server.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/engine.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/executor.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/hooks/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/hooks/base_hook.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/hooks/graph_hook.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/hooks/query_debugger.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/metadata/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/parser.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/parsing/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/parsing/config.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/parsing/exceptions.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/parsing/helpers.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/parsing/render.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/parsing/trilogy.lark +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/py.typed +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/scripts/__init__.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/scripts/trilogy.py +0 -0
- {pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/utility.py +0 -0
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from trilogy import parse, Environment
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def test_metadata():
|
|
6
|
+
env, _ = parse(
|
|
7
|
+
"""key user_id int metadata(description="the description");
|
|
8
|
+
property user_id.display_name string metadata(description="The display name");"""
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
assert env.concepts["user_id"].metadata.description == "the description"
|
|
12
|
+
assert env.concepts["display_name"].metadata.description == "The display name"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def test_import_metadata():
|
|
16
|
+
env = Environment(working_path=Path(__file__).parent)
|
|
17
|
+
env, _ = parse(
|
|
18
|
+
"""import test_env as env; # Dragon metrics
|
|
19
|
+
import test_env as env2;""",
|
|
20
|
+
environment=env,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
assert "Dragon metrics" in env.concepts["env.id"].metadata.description
|
|
24
|
+
|
|
25
|
+
env2 = env.concepts["env2.id"]
|
|
26
|
+
assert env2.namespace == "env2"
|
|
27
|
+
assert env.concepts["env2.id"].metadata.description is None
|
|
@@ -2077,6 +2077,11 @@ class Datasource(HasUUID, Namespaced, BaseModel):
|
|
|
2077
2077
|
self, source: Concept, target: Concept, modifiers: List[Modifier]
|
|
2078
2078
|
):
|
|
2079
2079
|
original = [c for c in self.columns if c.concept.address == source.address]
|
|
2080
|
+
early_exit_check = [
|
|
2081
|
+
c for c in self.columns if c.concept.address == target.address
|
|
2082
|
+
]
|
|
2083
|
+
if early_exit_check:
|
|
2084
|
+
return None
|
|
2080
2085
|
if len(original) != 1:
|
|
2081
2086
|
raise ValueError(
|
|
2082
2087
|
f"Expected exactly one column to merge, got {len(original)} for {source.address}, {[x.alias for x in original]}"
|
|
@@ -3332,7 +3337,6 @@ class Environment(BaseModel):
|
|
|
3332
3337
|
|
|
3333
3338
|
materialized_concepts: List[Concept] = Field(default_factory=list)
|
|
3334
3339
|
alias_origin_lookup: Dict[str, Concept] = Field(default_factory=dict)
|
|
3335
|
-
_parse_count: int = 0
|
|
3336
3340
|
|
|
3337
3341
|
@classmethod
|
|
3338
3342
|
def from_file(cls, path: str | Path) -> "Environment":
|
|
@@ -3442,19 +3446,22 @@ class Environment(BaseModel):
|
|
|
3442
3446
|
exists = False
|
|
3443
3447
|
existing = self.imports[alias]
|
|
3444
3448
|
if imp_stm:
|
|
3445
|
-
if any(
|
|
3449
|
+
if any(
|
|
3450
|
+
[x.path == imp_stm.path and x.alias == imp_stm.alias for x in existing]
|
|
3451
|
+
):
|
|
3446
3452
|
exists = True
|
|
3447
|
-
|
|
3448
3453
|
else:
|
|
3449
|
-
if any(
|
|
3454
|
+
if any(
|
|
3455
|
+
[x.path == source.working_path and x.alias == alias for x in existing]
|
|
3456
|
+
):
|
|
3450
3457
|
exists = True
|
|
3451
3458
|
imp_stm = ImportStatement(alias=alias, path=Path(source.working_path))
|
|
3452
|
-
|
|
3453
3459
|
same_namespace = alias == self.namespace
|
|
3454
3460
|
|
|
3455
3461
|
if not exists:
|
|
3456
3462
|
self.imports[alias].append(imp_stm)
|
|
3457
|
-
|
|
3463
|
+
else:
|
|
3464
|
+
return self
|
|
3458
3465
|
for k, concept in source.concepts.items():
|
|
3459
3466
|
if same_namespace:
|
|
3460
3467
|
new = self.add_concept(concept, _ignore_cache=True)
|
|
@@ -3485,13 +3492,25 @@ class Environment(BaseModel):
|
|
|
3485
3492
|
self.gen_concept_list_caches()
|
|
3486
3493
|
return self
|
|
3487
3494
|
|
|
3488
|
-
def add_file_import(
|
|
3489
|
-
|
|
3490
|
-
|
|
3491
|
-
|
|
3492
|
-
|
|
3495
|
+
def add_file_import(
|
|
3496
|
+
self, path: str | Path, alias: str, env: Environment | None = None
|
|
3497
|
+
):
|
|
3498
|
+
from trilogy.parsing.parse_engine import (
|
|
3499
|
+
ParseToObjects,
|
|
3500
|
+
PARSER,
|
|
3501
|
+
gen_cache_lookup,
|
|
3502
|
+
)
|
|
3493
3503
|
|
|
3494
|
-
|
|
3504
|
+
if isinstance(path, str):
|
|
3505
|
+
if path.endswith(".preql"):
|
|
3506
|
+
path = path.rsplit(".", 1)[0]
|
|
3507
|
+
if "." not in path:
|
|
3508
|
+
target = Path(self.working_path, path)
|
|
3509
|
+
else:
|
|
3510
|
+
target = Path(self.working_path, *path.split("."))
|
|
3511
|
+
target = target.with_suffix(".preql")
|
|
3512
|
+
else:
|
|
3513
|
+
target = path
|
|
3495
3514
|
if alias in self.imports:
|
|
3496
3515
|
imports = self.imports[alias]
|
|
3497
3516
|
for x in imports:
|
|
@@ -3502,17 +3521,19 @@ class Environment(BaseModel):
|
|
|
3502
3521
|
ImportStatement(alias=alias, path=target, environment=env)
|
|
3503
3522
|
)
|
|
3504
3523
|
else:
|
|
3524
|
+
parse_address = gen_cache_lookup(str(target), alias, str(self.working_path))
|
|
3505
3525
|
try:
|
|
3506
3526
|
with open(target, "r", encoding="utf-8") as f:
|
|
3507
3527
|
text = f.read()
|
|
3508
3528
|
nparser = ParseToObjects(
|
|
3509
3529
|
visit_tokens=True,
|
|
3510
|
-
text=text,
|
|
3511
3530
|
environment=Environment(
|
|
3512
3531
|
working_path=target.parent,
|
|
3513
3532
|
),
|
|
3514
|
-
parse_address=
|
|
3533
|
+
parse_address=parse_address,
|
|
3534
|
+
token_address=target,
|
|
3515
3535
|
)
|
|
3536
|
+
nparser.set_text(text)
|
|
3516
3537
|
nparser.transform(PARSER.parse(text))
|
|
3517
3538
|
except Exception as e:
|
|
3518
3539
|
raise ImportError(
|
|
@@ -425,6 +425,8 @@ def is_scalar_condition(
|
|
|
425
425
|
return True
|
|
426
426
|
if element.lineage and isinstance(element.lineage, AggregateWrapper):
|
|
427
427
|
return is_scalar_condition(element.lineage, materialized)
|
|
428
|
+
if element.lineage and isinstance(element.lineage, Function):
|
|
429
|
+
return is_scalar_condition(element.lineage, materialized)
|
|
428
430
|
return True
|
|
429
431
|
elif isinstance(element, AggregateWrapper):
|
|
430
432
|
return is_scalar_condition(element.function, materialized)
|
|
@@ -133,7 +133,9 @@ def constant_to_concept(
|
|
|
133
133
|
)
|
|
134
134
|
|
|
135
135
|
|
|
136
|
-
def function_to_concept(
|
|
136
|
+
def function_to_concept(
|
|
137
|
+
parent: Function, name: str, namespace: str, metadata: Metadata | None = None
|
|
138
|
+
) -> Concept:
|
|
137
139
|
pkeys: List[Concept] = []
|
|
138
140
|
for x in parent.arguments:
|
|
139
141
|
pkeys += [
|
|
@@ -159,6 +161,7 @@ def function_to_concept(parent: Function, name: str, namespace: str) -> Concept:
|
|
|
159
161
|
purpose = Purpose.CONSTANT
|
|
160
162
|
else:
|
|
161
163
|
purpose = parent.output_purpose
|
|
164
|
+
fmetadata = metadata or Metadata()
|
|
162
165
|
if grain is not None:
|
|
163
166
|
return Concept(
|
|
164
167
|
name=name,
|
|
@@ -169,6 +172,7 @@ def function_to_concept(parent: Function, name: str, namespace: str) -> Concept:
|
|
|
169
172
|
keys=keys,
|
|
170
173
|
modifiers=modifiers,
|
|
171
174
|
grain=grain,
|
|
175
|
+
metadata=fmetadata,
|
|
172
176
|
)
|
|
173
177
|
|
|
174
178
|
return Concept(
|
|
@@ -179,6 +183,7 @@ def function_to_concept(parent: Function, name: str, namespace: str) -> Concept:
|
|
|
179
183
|
namespace=namespace,
|
|
180
184
|
keys=keys,
|
|
181
185
|
modifiers=modifiers,
|
|
186
|
+
metadata=fmetadata,
|
|
182
187
|
)
|
|
183
188
|
|
|
184
189
|
|
|
@@ -305,7 +310,7 @@ def arbitrary_to_concept(
|
|
|
305
310
|
elif isinstance(parent, Function):
|
|
306
311
|
if not name:
|
|
307
312
|
name = f"{VIRTUAL_CONCEPT_PREFIX}_func_{parent.operator.value}_{string_to_hash(str(parent))}"
|
|
308
|
-
return function_to_concept(parent, name, namespace)
|
|
313
|
+
return function_to_concept(parent, name, namespace, metadata=metadata)
|
|
309
314
|
elif isinstance(parent, ListWrapper):
|
|
310
315
|
if not name:
|
|
311
316
|
name = f"{VIRTUAL_CONCEPT_PREFIX}_{string_to_hash(str(parent))}"
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from os.path import dirname, join
|
|
2
2
|
from typing import List, Optional, Tuple, Union
|
|
3
3
|
from re import IGNORECASE
|
|
4
|
-
from lark import Lark, Transformer, v_args, Tree
|
|
4
|
+
from lark import Lark, Transformer, v_args, Tree, ParseTree
|
|
5
5
|
from lark.exceptions import (
|
|
6
6
|
UnexpectedCharacters,
|
|
7
7
|
UnexpectedEOF,
|
|
@@ -32,6 +32,7 @@ from trilogy.core.enums import (
|
|
|
32
32
|
ShowCategory,
|
|
33
33
|
FunctionClass,
|
|
34
34
|
IOType,
|
|
35
|
+
ConceptSource,
|
|
35
36
|
)
|
|
36
37
|
from trilogy.core.exceptions import InvalidSyntaxException, UndefinedConceptException
|
|
37
38
|
from trilogy.core.functions import (
|
|
@@ -126,13 +127,16 @@ from trilogy.parsing.common import (
|
|
|
126
127
|
from dataclasses import dataclass
|
|
127
128
|
|
|
128
129
|
|
|
130
|
+
CONSTANT_TYPES = (int, float, str, bool, list, ListWrapper, MapWrapper)
|
|
131
|
+
|
|
132
|
+
SELF_LABEL = "root"
|
|
133
|
+
|
|
134
|
+
|
|
129
135
|
@dataclass
|
|
130
136
|
class WholeGrainWrapper:
|
|
131
137
|
where: WhereClause
|
|
132
138
|
|
|
133
139
|
|
|
134
|
-
CONSTANT_TYPES = (int, float, str, bool, list, ListWrapper, MapWrapper)
|
|
135
|
-
|
|
136
140
|
with open(join(dirname(__file__), "trilogy.lark"), "r") as f:
|
|
137
141
|
PARSER = Lark(
|
|
138
142
|
f.read(),
|
|
@@ -144,6 +148,10 @@ with open(join(dirname(__file__), "trilogy.lark"), "r") as f:
|
|
|
144
148
|
)
|
|
145
149
|
|
|
146
150
|
|
|
151
|
+
def gen_cache_lookup(path: str, alias: str, parent: str) -> str:
|
|
152
|
+
return path + alias + parent
|
|
153
|
+
|
|
154
|
+
|
|
147
155
|
def parse_concept_reference(
|
|
148
156
|
name: str, environment: Environment, purpose: Optional[Purpose] = None
|
|
149
157
|
) -> Tuple[str, str, str, str | None]:
|
|
@@ -220,25 +228,30 @@ class ParseToObjects(Transformer):
|
|
|
220
228
|
def __init__(
|
|
221
229
|
self,
|
|
222
230
|
visit_tokens,
|
|
223
|
-
text,
|
|
224
231
|
environment: Environment,
|
|
225
232
|
parse_address: str | None = None,
|
|
233
|
+
token_address: Path | None = None,
|
|
226
234
|
parsed: dict[str, "ParseToObjects"] | None = None,
|
|
235
|
+
tokens: dict[Path | str, ParseTree] | None = None,
|
|
236
|
+
text_lookup: dict[Path | str, str] | None = None,
|
|
227
237
|
):
|
|
228
238
|
Transformer.__init__(self, visit_tokens)
|
|
229
|
-
self.text = text
|
|
230
239
|
self.environment: Environment = environment
|
|
231
|
-
self.parse_address = parse_address or
|
|
240
|
+
self.parse_address: str = parse_address or SELF_LABEL
|
|
241
|
+
self.token_address: Path | str = token_address or SELF_LABEL
|
|
232
242
|
self.parsed: dict[str, ParseToObjects] = parsed if parsed else {}
|
|
243
|
+
self.tokens: dict[Path | str, ParseTree] = tokens or {}
|
|
244
|
+
self.text_lookup: dict[Path | str, str] = text_lookup or {}
|
|
233
245
|
# we do a second pass to pick up circular dependencies
|
|
234
246
|
# after initial parsing
|
|
235
247
|
self.pass_count = 1
|
|
236
|
-
self._results_stash = None
|
|
237
248
|
|
|
238
|
-
def
|
|
249
|
+
def set_text(self, text: str):
|
|
250
|
+
self.text_lookup[self.token_address] = text
|
|
251
|
+
|
|
252
|
+
def transform(self, tree: Tree):
|
|
239
253
|
results = super().transform(tree)
|
|
240
|
-
self.
|
|
241
|
-
self.environment._parse_count += 1
|
|
254
|
+
self.tokens[self.token_address] = tree
|
|
242
255
|
return results
|
|
243
256
|
|
|
244
257
|
def hydrate_missing(self):
|
|
@@ -248,10 +261,10 @@ class ParseToObjects(Transformer):
|
|
|
248
261
|
if v.pass_count == 2:
|
|
249
262
|
continue
|
|
250
263
|
v.hydrate_missing()
|
|
251
|
-
self.environment.concepts.fail_on_missing = True
|
|
264
|
+
# self.environment.concepts.fail_on_missing = True
|
|
252
265
|
# if not self.environment.concepts.undefined:
|
|
253
266
|
# return self._results_stash
|
|
254
|
-
reparsed = self.transform(
|
|
267
|
+
reparsed = self.transform(self.tokens[self.token_address])
|
|
255
268
|
self.environment.concepts.undefined = {}
|
|
256
269
|
return reparsed
|
|
257
270
|
|
|
@@ -266,6 +279,18 @@ class ParseToObjects(Transformer):
|
|
|
266
279
|
output.concept.metadata.description
|
|
267
280
|
or args[1].text.split("#")[1].strip()
|
|
268
281
|
)
|
|
282
|
+
if isinstance(output, ImportStatement):
|
|
283
|
+
if len(args) > 1 and isinstance(args[1], Comment):
|
|
284
|
+
comment = args[1].text.split("#")[1].strip()
|
|
285
|
+
namespace = output.alias
|
|
286
|
+
for _, v in self.environment.concepts.items():
|
|
287
|
+
if v.namespace == namespace:
|
|
288
|
+
if v.metadata.description:
|
|
289
|
+
v.metadata.description = (
|
|
290
|
+
f"{comment}: {v.metadata.description}"
|
|
291
|
+
)
|
|
292
|
+
else:
|
|
293
|
+
v.metadata.description = comment
|
|
269
294
|
|
|
270
295
|
return args[0]
|
|
271
296
|
|
|
@@ -647,45 +672,41 @@ class ParseToObjects(Transformer):
|
|
|
647
672
|
return Comment(text=args.value)
|
|
648
673
|
|
|
649
674
|
@v_args(meta=True)
|
|
650
|
-
def select_transform(self, meta, args) -> ConceptTransform:
|
|
675
|
+
def select_transform(self, meta: Meta, args) -> ConceptTransform:
|
|
651
676
|
|
|
652
677
|
output: str = args[1]
|
|
653
|
-
|
|
678
|
+
transformation = unwrap_transformation(args[0])
|
|
654
679
|
lookup, namespace, output, parent = parse_concept_reference(
|
|
655
680
|
output, self.environment
|
|
656
681
|
)
|
|
657
682
|
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
concept =
|
|
662
|
-
|
|
663
|
-
concept = filter_item_to_concept(function, namespace=namespace, name=output)
|
|
664
|
-
elif isinstance(function, CONSTANT_TYPES):
|
|
665
|
-
concept = constant_to_concept(function, namespace=namespace, name=output)
|
|
666
|
-
elif isinstance(function, Function):
|
|
667
|
-
concept = function_to_concept(function, namespace=namespace, name=output)
|
|
668
|
-
else:
|
|
669
|
-
if function.output_purpose == Purpose.PROPERTY:
|
|
670
|
-
pkeys = [x for x in function.arguments if isinstance(x, Concept)]
|
|
671
|
-
grain = Grain(components=pkeys)
|
|
672
|
-
keys = tuple(grain.components_copy)
|
|
673
|
-
else:
|
|
674
|
-
grain = None
|
|
675
|
-
keys = None
|
|
676
|
-
concept = Concept(
|
|
677
|
-
name=output,
|
|
678
|
-
datatype=function.output_datatype,
|
|
679
|
-
purpose=function.output_purpose,
|
|
680
|
-
lineage=function,
|
|
681
|
-
namespace=namespace,
|
|
682
|
-
grain=Grain(components=[]) if not grain else grain,
|
|
683
|
-
keys=keys,
|
|
683
|
+
metadata = Metadata(line_number=meta.line, concept_source=ConceptSource.SELECT)
|
|
684
|
+
|
|
685
|
+
if isinstance(transformation, AggregateWrapper):
|
|
686
|
+
concept = agg_wrapper_to_concept(
|
|
687
|
+
transformation, namespace=namespace, name=output, metadata=metadata
|
|
684
688
|
)
|
|
685
|
-
|
|
686
|
-
concept
|
|
689
|
+
elif isinstance(transformation, WindowItem):
|
|
690
|
+
concept = window_item_to_concept(
|
|
691
|
+
transformation, namespace=namespace, name=output, metadata=metadata
|
|
692
|
+
)
|
|
693
|
+
elif isinstance(transformation, FilterItem):
|
|
694
|
+
concept = filter_item_to_concept(
|
|
695
|
+
transformation, namespace=namespace, name=output, metadata=metadata
|
|
696
|
+
)
|
|
697
|
+
elif isinstance(transformation, CONSTANT_TYPES):
|
|
698
|
+
concept = constant_to_concept(
|
|
699
|
+
transformation, namespace=namespace, name=output, metadata=metadata
|
|
700
|
+
)
|
|
701
|
+
elif isinstance(transformation, Function):
|
|
702
|
+
concept = function_to_concept(
|
|
703
|
+
transformation, namespace=namespace, name=output, metadata=metadata
|
|
704
|
+
)
|
|
705
|
+
else:
|
|
706
|
+
raise SyntaxError("Invalid transformation")
|
|
707
|
+
|
|
687
708
|
self.environment.add_concept(concept, meta=meta)
|
|
688
|
-
return ConceptTransform(function=
|
|
709
|
+
return ConceptTransform(function=transformation, output=concept)
|
|
689
710
|
|
|
690
711
|
@v_args(meta=True)
|
|
691
712
|
def concept_nullable_modifier(self, meta: Meta, args) -> Modifier:
|
|
@@ -709,7 +730,7 @@ class ParseToObjects(Transformer):
|
|
|
709
730
|
if len(args) != 1:
|
|
710
731
|
raise ParseError(
|
|
711
732
|
"Malformed select statement"
|
|
712
|
-
f" {args} {self.
|
|
733
|
+
f" {args} {self.text_lookup[self.parse_address][meta.start_pos:meta.end_pos]}"
|
|
713
734
|
)
|
|
714
735
|
content = args[0]
|
|
715
736
|
if isinstance(content, ConceptTransform):
|
|
@@ -807,25 +828,46 @@ class ParseToObjects(Transformer):
|
|
|
807
828
|
path = args[0].split(".")
|
|
808
829
|
|
|
809
830
|
target = join(self.environment.working_path, *path) + ".preql"
|
|
810
|
-
|
|
811
|
-
|
|
831
|
+
|
|
832
|
+
# tokens + text are cached by path
|
|
833
|
+
token_lookup = Path(target)
|
|
834
|
+
|
|
835
|
+
# cache lookups by the target, the alias, and the file we're importing it from
|
|
836
|
+
cache_lookup = gen_cache_lookup(
|
|
837
|
+
path=target, alias=alias, parent=str(self.token_address)
|
|
838
|
+
)
|
|
839
|
+
if token_lookup in self.tokens:
|
|
840
|
+
raw_tokens = self.tokens[token_lookup]
|
|
841
|
+
text = self.text_lookup[token_lookup]
|
|
842
|
+
else:
|
|
843
|
+
text = self.resolve_import_address(target)
|
|
844
|
+
self.text_lookup[token_lookup] = text
|
|
845
|
+
|
|
846
|
+
raw_tokens = PARSER.parse(text)
|
|
847
|
+
self.tokens[token_lookup] = raw_tokens
|
|
848
|
+
|
|
849
|
+
if cache_lookup in self.parsed:
|
|
850
|
+
nparser = self.parsed[cache_lookup]
|
|
812
851
|
else:
|
|
813
852
|
try:
|
|
814
|
-
text = self.resolve_import_address(target)
|
|
815
853
|
nparser = ParseToObjects(
|
|
816
854
|
visit_tokens=True,
|
|
817
|
-
text=text,
|
|
818
855
|
environment=Environment(
|
|
819
856
|
working_path=dirname(target),
|
|
820
857
|
# namespace=alias,
|
|
821
858
|
),
|
|
822
|
-
parse_address=
|
|
859
|
+
parse_address=cache_lookup,
|
|
860
|
+
token_address=token_lookup,
|
|
823
861
|
parsed={**self.parsed, **{self.parse_address: self}},
|
|
862
|
+
tokens={**self.tokens, **{token_lookup: raw_tokens}},
|
|
863
|
+
text_lookup={**self.text_lookup, **{token_lookup: text}},
|
|
824
864
|
)
|
|
825
|
-
nparser.transform(
|
|
826
|
-
self.parsed[
|
|
865
|
+
nparser.transform(raw_tokens)
|
|
866
|
+
self.parsed[cache_lookup] = nparser
|
|
827
867
|
# add the parsed objects of the import in
|
|
828
868
|
self.parsed = {**self.parsed, **nparser.parsed}
|
|
869
|
+
self.tokens = {**self.tokens, **nparser.tokens}
|
|
870
|
+
self.text_lookup = {**self.text_lookup, **nparser.text_lookup}
|
|
829
871
|
except Exception as e:
|
|
830
872
|
raise ImportError(f"Unable to import file {target}, parsing error: {e}")
|
|
831
873
|
|
|
@@ -1912,9 +1954,10 @@ def parse_text(text: str, environment: Optional[Environment] = None) -> Tuple[
|
|
|
1912
1954
|
],
|
|
1913
1955
|
]:
|
|
1914
1956
|
environment = environment or Environment()
|
|
1915
|
-
parser = ParseToObjects(visit_tokens=True,
|
|
1957
|
+
parser = ParseToObjects(visit_tokens=True, environment=environment)
|
|
1916
1958
|
|
|
1917
1959
|
try:
|
|
1960
|
+
parser.set_text(text)
|
|
1918
1961
|
parser.transform(PARSER.parse(text))
|
|
1919
1962
|
# handle circular dependencies
|
|
1920
1963
|
pass_two = parser.hydrate_missing()
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
from trilogy import parse
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
def test_metadata():
|
|
5
|
-
env, _ = parse(
|
|
6
|
-
"""key user_id int metadata(description="the description");
|
|
7
|
-
property user_id.display_name string metadata(description="The display name");"""
|
|
8
|
-
)
|
|
9
|
-
|
|
10
|
-
assert env.concepts["user_id"].metadata.description == "the description"
|
|
11
|
-
assert env.concepts["display_name"].metadata.description == "The display name"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/__init__.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/basic_node.py
RENAMED
|
File without changes
|
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/filter_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/group_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/group_to_node.py
RENAMED
|
File without changes
|
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/node_merge_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/rowset_node.py
RENAMED
|
File without changes
|
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/select_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/unnest_node.py
RENAMED
|
File without changes
|
{pytrilogy-0.0.2.32 → pytrilogy-0.0.2.33}/trilogy/core/processing/node_generators/window_node.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|