pytrilogy 0.0.3.4__py3-none-any.whl → 0.0.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: pytrilogy
3
- Version: 0.0.3.4
3
+ Version: 0.0.3.5
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -1,8 +1,8 @@
1
- trilogy/__init__.py,sha256=GsU58IPbyhWVC4PT599Dyal9U1Q1BIKeIhpRY0_115Y,302
1
+ trilogy/__init__.py,sha256=7rK8M4Aw3BDqS9a5ou3PjyRkVMzEhaVMf6a7V-8ll4E,302
2
2
  trilogy/compiler.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  trilogy/constants.py,sha256=qZ1d0hoKPPV2HHCoFwPYTVB7b6bXjpWvXd3lE-zEhy8,1494
4
4
  trilogy/engine.py,sha256=yOPnR7XCjWG82Gym_LLZBkYKKJdLCvqdCyt8zguNcnM,1103
5
- trilogy/executor.py,sha256=nvi8F8ls7stAXvYUIRs6zh8X4q6O_plZcfazPnL-hKw,16745
5
+ trilogy/executor.py,sha256=sssEPDnIDPiQtMSrt5pFiJXUfcDc6gSi4m2Eliod_BM,16844
6
6
  trilogy/parser.py,sha256=o4cfk3j3yhUFoiDKq9ZX_GjBF3dKhDjXEwb63rcBkBM,293
7
7
  trilogy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  trilogy/utility.py,sha256=euQccZLKoYBz0LNg5tzLlvv2YHvXh9HArnYp1V3uXsM,763
@@ -18,14 +18,14 @@ trilogy/core/functions.py,sha256=7Pq9jYSJd45L2pxT7AI-_rXVZmeLnmTPp8d1lA4z4Vk,244
18
18
  trilogy/core/graph_models.py,sha256=z17EoO8oky2QOuO6E2aMWoVNKEVJFhLdsQZOhC4fNLU,2079
19
19
  trilogy/core/internal.py,sha256=iicDBlC6nM8d7e7jqzf_ZOmpUsW8yrr2AA8AqEiLx-s,1577
20
20
  trilogy/core/optimization.py,sha256=xGO8piVsLrpqrx-Aid_Y56_5slSv4eZmlP64hCHRiEc,7957
21
- trilogy/core/query_processor.py,sha256=HyDxBhQsD9KX-Y7pYznlpCAW6AvI76RqPTQNa1mreoE,19450
21
+ trilogy/core/query_processor.py,sha256=Do8YpdPBdsbKtl9n37hobzk8SORMGqH-e_zNNxd-BE4,19456
22
22
  trilogy/core/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
- trilogy/core/models/author.py,sha256=ydg1FwMjL58AWsM5cJTB0B8AR8jFaxAZdXfQroiTC0M,67264
23
+ trilogy/core/models/author.py,sha256=oRCKWhz-i1fO1LlHWiHE3l1awCHdQ3yx6FKH9n9RxRU,67188
24
24
  trilogy/core/models/build.py,sha256=kiq31T8LtUtgmT37m617Q2MlMvQTuAxJzwb6947EiWU,56127
25
25
  trilogy/core/models/build_environment.py,sha256=8UggvlPU708GZWYPJMc_ou2r7M3TY2g69eqGvz03YX0,5528
26
26
  trilogy/core/models/core.py,sha256=yie1uuq62uOQ5fjob9NMJbdvQPrCErXUT7JTCuYRyjI,9697
27
27
  trilogy/core/models/datasource.py,sha256=c0tGxyH2WwTmAD047tr69U0a6GNVf-ug26H68yii7DA,9257
28
- trilogy/core/models/environment.py,sha256=QSl-H6nwarzKbQgNRjtwDKMJtA4F_GVQpRs-NMNt-6Q,24983
28
+ trilogy/core/models/environment.py,sha256=GU8D3cn6lCFAseVPYfW_a-cnBbD1sYEeDVOkbZSWCxk,25943
29
29
  trilogy/core/models/execute.py,sha256=ABylFQgtavjjCfFkEsFdUwfMB4UBQLHjdzQ9E67QlAE,33521
30
30
  trilogy/core/optimizations/__init__.py,sha256=EBanqTXEzf1ZEYjAneIWoIcxtMDite5-n2dQ5xcfUtg,356
31
31
  trilogy/core/optimizations/base_optimization.py,sha256=gzDOKImoFn36k7XBD3ysEYDnbnb6vdVIztUfFQZsGnM,513
@@ -33,7 +33,7 @@ trilogy/core/optimizations/inline_constant.py,sha256=lvNTIXaLNkw3HseJyXyDNk5R52d
33
33
  trilogy/core/optimizations/inline_datasource.py,sha256=AHuTGh2x0GQ8usOe0NiFncfTFQ_KogdgDl4uucmhIbI,4241
34
34
  trilogy/core/optimizations/predicate_pushdown.py,sha256=g4AYE8Aw_iMlAh68TjNXGP754NTurrDduFECkUjoBnc,9399
35
35
  trilogy/core/processing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
36
- trilogy/core/processing/concept_strategies_v3.py,sha256=0rFnasSQlkXTRIfAFMHyHVux1pTQ3ryKeQds-SFSot0,40290
36
+ trilogy/core/processing/concept_strategies_v3.py,sha256=wPlpg4L7uw-f0DgJBkI8VRdcisjDT1X6iApjEE6CmfA,40291
37
37
  trilogy/core/processing/graph_utils.py,sha256=8QUVrkE9j-9C1AyrCb1nQEh8daCe0u1HuXl-Te85lag,1205
38
38
  trilogy/core/processing/utility.py,sha256=Oc5tLGeDDpzhbfo2ZcF8ex1kez-NcJDMcG2Lm5BjS4c,20548
39
39
  trilogy/core/processing/node_generators/__init__.py,sha256=o8rOFHPSo-s_59hREwXMW6gjUJCsiXumdbJNozHUf-Y,800
@@ -88,14 +88,14 @@ trilogy/parsing/common.py,sha256=yAE3x4SyO4PfAb7HhZ_l9sNPYaf_pcM1K8ioEy76SCU,203
88
88
  trilogy/parsing/config.py,sha256=Z-DaefdKhPDmSXLgg5V4pebhSB0h590vI0_VtHnlukI,111
89
89
  trilogy/parsing/exceptions.py,sha256=92E5i2frv5hj9wxObJZsZqj5T6bglvPzvdvco_vW1Zk,38
90
90
  trilogy/parsing/helpers.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
91
- trilogy/parsing/parse_engine.py,sha256=5SZ572CMcmSFv-LIIUmvABZFljoOS75948S6hjlU0xU,54064
91
+ trilogy/parsing/parse_engine.py,sha256=uazcnUhY3pERY8Xa116IHl1JhCPSlvQG6i4vP7I4Qpk,54638
92
92
  trilogy/parsing/render.py,sha256=o_XuQWhcwx1lD9eGVqkqZEwkmQK0HdmWWokGBtdeH4I,17837
93
93
  trilogy/parsing/trilogy.lark,sha256=EazfEvYPuvkPkNjUnVzFi0uD9baavugbSI8CyfawShk,12573
94
94
  trilogy/scripts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
95
95
  trilogy/scripts/trilogy.py,sha256=1L0XrH4mVHRt1C9T1HnaDv2_kYEfbWTb5_-cBBke79w,3774
96
- pytrilogy-0.0.3.4.dist-info/LICENSE.md,sha256=5ZRvtTyCCFwz1THxDTjAu3Lidds9WjPvvzgVwPSYNDo,1042
97
- pytrilogy-0.0.3.4.dist-info/METADATA,sha256=qxTMW9Dh0nhD4Ea42IlNy2tRWVPp54EDGBcjDmvlaZM,8983
98
- pytrilogy-0.0.3.4.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
99
- pytrilogy-0.0.3.4.dist-info/entry_points.txt,sha256=0petKryjvvtEfTlbZC1AuMFumH_WQ9v8A19LvoS6G6c,54
100
- pytrilogy-0.0.3.4.dist-info/top_level.txt,sha256=cAy__NW_eMAa_yT9UnUNlZLFfxcg6eimUAZ184cdNiE,8
101
- pytrilogy-0.0.3.4.dist-info/RECORD,,
96
+ pytrilogy-0.0.3.5.dist-info/LICENSE.md,sha256=5ZRvtTyCCFwz1THxDTjAu3Lidds9WjPvvzgVwPSYNDo,1042
97
+ pytrilogy-0.0.3.5.dist-info/METADATA,sha256=W8SsLe2qtgpl4SDr_xlL3lOwReWbKYBClHXkjLQSIqk,8983
98
+ pytrilogy-0.0.3.5.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
99
+ pytrilogy-0.0.3.5.dist-info/entry_points.txt,sha256=0petKryjvvtEfTlbZC1AuMFumH_WQ9v8A19LvoS6G6c,54
100
+ pytrilogy-0.0.3.5.dist-info/top_level.txt,sha256=cAy__NW_eMAa_yT9UnUNlZLFfxcg6eimUAZ184cdNiE,8
101
+ pytrilogy-0.0.3.5.dist-info/RECORD,,
trilogy/__init__.py CHANGED
@@ -4,6 +4,6 @@ from trilogy.dialect.enums import Dialects
4
4
  from trilogy.executor import Executor
5
5
  from trilogy.parser import parse
6
6
 
7
- __version__ = "0.0.3.4"
7
+ __version__ = "0.0.3.5"
8
8
 
9
9
  __all__ = ["parse", "Executor", "Dialects", "Environment", "CONFIG"]
@@ -166,10 +166,8 @@ class UndefinedConcept(ConceptRef):
166
166
 
167
167
 
168
168
  def address_with_namespace(address: str, namespace: str) -> str:
169
- ns = address.split(".", 1)[0]
170
- if ns == namespace:
171
- return address
172
- if ns == DEFAULT_NAMESPACE:
169
+ existing_ns = address.split(".", 1)[0]
170
+ if existing_ns == DEFAULT_NAMESPACE:
173
171
  return f"{namespace}.{address.split('.',1)[1]}"
174
172
  return f"{namespace}.{address}"
175
173
 
@@ -203,7 +201,7 @@ class Parenthetical(
203
201
  def __repr__(self):
204
202
  return f"({str(self.content)})"
205
203
 
206
- def with_namespace(self, namespace: str):
204
+ def with_namespace(self, namespace: str) -> Parenthetical:
207
205
  return Parenthetical.model_construct(
208
206
  content=(
209
207
  self.content.with_namespace(namespace)
@@ -917,8 +915,6 @@ class Concept(Addressable, DataTyped, ConceptArgs, Mergeable, Namespaced, BaseMo
917
915
  return self.name.replace(".", "_")
918
916
 
919
917
  def with_namespace(self, namespace: str) -> Self:
920
- if namespace == self.namespace:
921
- return self
922
918
  return self.__class__.model_construct(
923
919
  name=self.name,
924
920
  datatype=self.datatype,
@@ -8,6 +8,7 @@ from pathlib import Path
8
8
  from typing import (
9
9
  TYPE_CHECKING,
10
10
  Annotated,
11
+ Any,
11
12
  Dict,
12
13
  ItemsView,
13
14
  List,
@@ -383,14 +384,13 @@ class Environment(BaseModel):
383
384
  ):
384
385
  exists = True
385
386
  imp_stm = Import(alias=alias, path=Path(source.working_path))
386
- same_namespace = alias == self.namespace
387
+ same_namespace = alias == DEFAULT_NAMESPACE
387
388
 
388
389
  if not exists:
389
390
  self.imports[alias].append(imp_stm)
390
391
  # we can't exit early
391
392
  # as there may be new concepts
392
393
  for k, concept in source.concepts.items():
393
-
394
394
  # skip internal namespace
395
395
  if INTERNAL_NAMESPACE in concept.address:
396
396
  continue
@@ -416,7 +416,6 @@ class Environment(BaseModel):
416
416
  self.alias_origin_lookup[address_with_namespace(key, alias)] = (
417
417
  val.with_namespace(alias)
418
418
  )
419
-
420
419
  return self
421
420
 
422
421
  def add_file_import(
@@ -457,8 +456,10 @@ class Environment(BaseModel):
457
456
  token_address=target,
458
457
  )
459
458
  nparser.set_text(text)
459
+ nparser.environment.concepts.fail_on_missing = False
460
460
  nparser.transform(PARSER.parse(text))
461
461
  nparser.hydrate_missing()
462
+ nparser.environment.concepts.fail_on_missing = True
462
463
 
463
464
  except Exception as e:
464
465
  raise ImportError(
@@ -664,37 +665,59 @@ class LazyEnvironment(Environment):
664
665
  until relevant attributes accessed."""
665
666
 
666
667
  load_path: Path
668
+ setup_queries: list[Any] = Field(default_factory=list)
667
669
  loaded: bool = False
668
670
 
671
+ @property
672
+ def setup_path(self) -> Path:
673
+ return self.load_path.parent / "setup.preql"
674
+
669
675
  def __init__(self, **data):
676
+ if not data.get("working_path"):
677
+ data["working_path"] = data["load_path"].parent
670
678
  super().__init__(**data)
679
+ assert self.working_path == self.load_path.parent
671
680
 
672
681
  def _add_path_concepts(self):
673
682
  pass
674
683
 
684
+ def _load(self):
685
+ if self.loaded:
686
+ return
687
+ from trilogy import parse
688
+
689
+ env = Environment(working_path=self.load_path.parent)
690
+ assert env.working_path == self.load_path.parent
691
+ with open(self.load_path, "r") as f:
692
+ env, _ = parse(f.read(), env)
693
+ if self.setup_path.exists():
694
+ with open(self.setup_path, "r") as f2:
695
+ env, q = parse(f2.read(), env)
696
+ for q in q:
697
+ self.setup_queries.append(q)
698
+ self.loaded = True
699
+ self.datasources = env.datasources
700
+ self.concepts = env.concepts
701
+ self.imports = env.imports
702
+ self.alias_origin_lookup = env.alias_origin_lookup
703
+ self.materialized_concepts = env.materialized_concepts
704
+ self.functions = env.functions
705
+ self.data_types = env.data_types
706
+ self.cte_name_map = env.cte_name_map
707
+
675
708
  def __getattribute__(self, name):
676
- if name in (
677
- "load_path",
678
- "loaded",
679
- "working_path",
680
- "model_config",
681
- "model_fields",
682
- "model_post_init",
709
+ if name not in (
710
+ "datasources",
711
+ "concepts",
712
+ "imports",
713
+ "materialized_concepts",
714
+ "functions",
715
+ "datatypes",
716
+ "cte_name_map",
683
717
  ) or name.startswith("_"):
684
718
  return super().__getattribute__(name)
685
719
  if not self.loaded:
686
- logger.info(
687
- f"lazily evaluating load path {self.load_path} to access {name}"
688
- )
689
- from trilogy import parse
690
-
691
- env = Environment(working_path=str(self.working_path))
692
- with open(self.load_path, "r") as f:
693
- parse(f.read(), env)
694
- self.loaded = True
695
- self.datasources = env.datasources
696
- self.concepts = env.concepts
697
- self.imports = env.imports
720
+ self._load()
698
721
  return super().__getattribute__(name)
699
722
 
700
723
 
@@ -1046,7 +1046,7 @@ def source_query_concepts(
1046
1046
  f"{c.address}<{c.purpose}>{c.derivation}>" for c in output_concepts
1047
1047
  ]
1048
1048
  raise ValueError(
1049
- f"Could not resolve conections between {error_strings} from environment graph."
1049
+ f"Could not resolve connections between {error_strings} from environment graph."
1050
1050
  )
1051
1051
  final = [x for x in root.output_concepts if x.address not in root.hidden_concepts]
1052
1052
  logger.info(
@@ -385,7 +385,7 @@ def get_query_node(
385
385
  )
386
386
  graph = generate_graph(build_environment)
387
387
  logger.info(
388
- f"{LOGGER_PREFIX} getting source datasource for outputs {statement.output_components} grain {build_statement.grain}"
388
+ f"{LOGGER_PREFIX} getting source datasource for outputs {build_statement.output_components} grain {build_statement.grain}"
389
389
  )
390
390
 
391
391
  search_concepts: list[BuildConcept] = build_statement.output_components
trilogy/executor.py CHANGED
@@ -346,10 +346,10 @@ class Executor(object):
346
346
  file = Path(file)
347
347
  with open(file, "r") as f:
348
348
  command = f.read()
349
- return self.parse_text_generator(command, persist=persist)
349
+ return self.parse_text_generator(command, persist=persist, root=file)
350
350
 
351
351
  def parse_text(
352
- self, command: str, persist: bool = False
352
+ self, command: str, persist: bool = False, root: Path | None = None
353
353
  ) -> List[
354
354
  ProcessedQuery
355
355
  | ProcessedQueryPersist
@@ -357,9 +357,11 @@ class Executor(object):
357
357
  | ProcessedRawSQLStatement
358
358
  | ProcessedCopyStatement
359
359
  ]:
360
- return list(self.parse_text_generator(command, persist=persist))
360
+ return list(self.parse_text_generator(command, persist=persist, root=root))
361
361
 
362
- def parse_text_generator(self, command: str, persist: bool = False) -> Generator[
362
+ def parse_text_generator(
363
+ self, command: str, persist: bool = False, root: Path | None = None
364
+ ) -> Generator[
363
365
  ProcessedQuery
364
366
  | ProcessedQueryPersist
365
367
  | ProcessedShowStatement
@@ -369,7 +371,7 @@ class Executor(object):
369
371
  None,
370
372
  ]:
371
373
  """Process a preql text command"""
372
- _, parsed = parse_text(command, self.environment)
374
+ _, parsed = parse_text(command, self.environment, root=root)
373
375
  generatable = [
374
376
  x
375
377
  for x in parsed
@@ -147,6 +147,9 @@ with open(join(dirname(__file__), "trilogy.lark"), "r") as f:
147
147
 
148
148
 
149
149
  def gen_cache_lookup(path: str, alias: str, parent: str) -> str:
150
+ # path is the path of the file
151
+ # alias is what it's being imported under
152
+ # parent is the...direct parnet?
150
153
  return path + alias + parent
151
154
 
152
155
 
@@ -252,10 +255,12 @@ class ParseToObjects(Transformer):
252
255
  for _, v in self.parsed.items():
253
256
  v.prepare_parse()
254
257
 
255
- def hydrate_missing(self):
258
+ def hydrate_missing(self, force: bool = False):
259
+ if self.token_address not in self.tokens:
260
+ return []
256
261
  self.parse_pass = ParsePass.VALIDATION
257
- for k, v in self.parsed.items():
258
- if v.parse_pass == ParsePass.VALIDATION:
262
+ for _, v in list(self.parsed.items()):
263
+ if v.parse_pass == ParsePass.VALIDATION and not force:
259
264
  continue
260
265
  v.hydrate_missing()
261
266
  reparsed = self.transform(self.tokens[self.token_address])
@@ -390,7 +395,6 @@ class ParseToObjects(Transformer):
390
395
 
391
396
  @v_args(meta=True)
392
397
  def column_assignment(self, meta: Meta, args):
393
- # TODO -> deal with conceptual modifiers
394
398
  modifiers = []
395
399
  alias = args[0]
396
400
  concept_list = args[1]
@@ -398,6 +402,7 @@ class ParseToObjects(Transformer):
398
402
  if len(concept_list) > 1:
399
403
  modifiers += concept_list[:-1]
400
404
  concept = concept_list[-1]
405
+ assert not self.environment.concepts.fail_on_missing
401
406
  resolved = self.environment.concepts.__getitem__( # type: ignore
402
407
  key=concept, line_no=meta.line, file=self.token_address
403
408
  )
@@ -879,10 +884,14 @@ class ParseToObjects(Transformer):
879
884
  if cache_lookup in self.parsed:
880
885
  nparser = self.parsed[cache_lookup]
881
886
  new_env = nparser.environment
887
+ if nparser.parse_pass != ParsePass.VALIDATION:
888
+ # nparser.transform(raw_tokens)
889
+ nparser.hydrate_missing()
882
890
  else:
883
891
  try:
884
892
  new_env = Environment(
885
893
  working_path=dirname(target),
894
+ env_file_path=token_lookup,
886
895
  )
887
896
  new_env.concepts.fail_on_missing = False
888
897
  self.parsed[self.parse_address] = self
@@ -900,10 +909,11 @@ class ParseToObjects(Transformer):
900
909
  raise ImportError(
901
910
  f"Unable to import file {target}, parsing error: {e}"
902
911
  ) from e
912
+ parsed_path = Path(args[0])
913
+ imps = ImportStatement(alias=alias, path=parsed_path)
903
914
 
904
- imps = ImportStatement(alias=alias, path=Path(args[0]))
905
915
  self.environment.add_import(
906
- alias, new_env, Import(alias=alias, path=Path(args[0]))
916
+ alias, new_env, Import(alias=alias, path=parsed_path)
907
917
  )
908
918
  return imps
909
919
 
@@ -1594,7 +1604,9 @@ def parse_text_raw(text: str, environment: Optional[Environment] = None):
1594
1604
  PARSER.parse(text)
1595
1605
 
1596
1606
 
1597
- def parse_text(text: str, environment: Optional[Environment] = None) -> Tuple[
1607
+ def parse_text(
1608
+ text: str, environment: Optional[Environment] = None, root: Path | None = None
1609
+ ) -> Tuple[
1598
1610
  Environment,
1599
1611
  List[
1600
1612
  Datasource
@@ -1606,7 +1618,9 @@ def parse_text(text: str, environment: Optional[Environment] = None) -> Tuple[
1606
1618
  | None
1607
1619
  ],
1608
1620
  ]:
1609
- environment = environment or Environment()
1621
+ environment = environment or (
1622
+ Environment(working_path=root) if root else Environment()
1623
+ )
1610
1624
  parser = ParseToObjects(environment=environment)
1611
1625
 
1612
1626
  try:
@@ -1615,7 +1629,7 @@ def parse_text(text: str, environment: Optional[Environment] = None) -> Tuple[
1615
1629
  parser.prepare_parse()
1616
1630
  parser.transform(PARSER.parse(text))
1617
1631
  # this will reset fail on missing
1618
- pass_two = parser.hydrate_missing()
1632
+ pass_two = parser.hydrate_missing(force=True)
1619
1633
  output = [v for v in pass_two if v]
1620
1634
  environment.concepts.fail_on_missing = True
1621
1635
  except VisitError as e: