pytrilogy 0.0.2.18__tar.gz → 0.0.2.20__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

Files changed (106) hide show
  1. {pytrilogy-0.0.2.18/pytrilogy.egg-info → pytrilogy-0.0.2.20}/PKG-INFO +1 -1
  2. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20/pytrilogy.egg-info}/PKG-INFO +1 -1
  3. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/__init__.py +1 -1
  4. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/models.py +17 -25
  5. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/node_generators/group_node.py +13 -3
  6. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/node_generators/node_merge_node.py +0 -1
  7. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/node_generators/select_merge_node.py +36 -21
  8. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/utility.py +0 -1
  9. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/parsing/parse_engine.py +7 -2
  10. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/LICENSE.md +0 -0
  11. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/README.md +0 -0
  12. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/pyproject.toml +0 -0
  13. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/pytrilogy.egg-info/SOURCES.txt +0 -0
  14. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/pytrilogy.egg-info/dependency_links.txt +0 -0
  15. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/pytrilogy.egg-info/entry_points.txt +0 -0
  16. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/pytrilogy.egg-info/requires.txt +0 -0
  17. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/pytrilogy.egg-info/top_level.txt +0 -0
  18. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/setup.cfg +0 -0
  19. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/setup.py +0 -0
  20. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_datatypes.py +0 -0
  21. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_declarations.py +0 -0
  22. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_derived_concepts.py +0 -0
  23. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_discovery_nodes.py +0 -0
  24. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_environment.py +0 -0
  25. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_functions.py +0 -0
  26. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_imports.py +0 -0
  27. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_metadata.py +0 -0
  28. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_models.py +0 -0
  29. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_multi_join_assignments.py +0 -0
  30. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_parsing.py +0 -0
  31. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_partial_handling.py +0 -0
  32. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_query_processing.py +0 -0
  33. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_select.py +0 -0
  34. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_statements.py +0 -0
  35. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_undefined_concept.py +0 -0
  36. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/tests/test_where_clause.py +0 -0
  37. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/compiler.py +0 -0
  38. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/constants.py +0 -0
  39. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/__init__.py +0 -0
  40. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/constants.py +0 -0
  41. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/enums.py +0 -0
  42. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/env_processor.py +0 -0
  43. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/environment_helpers.py +0 -0
  44. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/ergonomics.py +0 -0
  45. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/exceptions.py +0 -0
  46. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/functions.py +0 -0
  47. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/graph_models.py +0 -0
  48. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/internal.py +0 -0
  49. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/optimization.py +0 -0
  50. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/optimizations/__init__.py +0 -0
  51. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/optimizations/base_optimization.py +0 -0
  52. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/optimizations/inline_constant.py +0 -0
  53. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/optimizations/inline_datasource.py +0 -0
  54. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/optimizations/predicate_pushdown.py +0 -0
  55. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/__init__.py +0 -0
  56. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/concept_strategies_v3.py +0 -0
  57. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/graph_utils.py +0 -0
  58. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/node_generators/__init__.py +0 -0
  59. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/node_generators/basic_node.py +0 -0
  60. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/node_generators/common.py +0 -0
  61. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/node_generators/filter_node.py +0 -0
  62. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/node_generators/group_to_node.py +0 -0
  63. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/node_generators/multiselect_node.py +0 -0
  64. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/node_generators/rowset_node.py +0 -0
  65. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/node_generators/select_node.py +0 -0
  66. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/node_generators/unnest_node.py +0 -0
  67. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/node_generators/window_node.py +0 -0
  68. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/nodes/__init__.py +0 -0
  69. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/nodes/base_node.py +0 -0
  70. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/nodes/filter_node.py +0 -0
  71. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/nodes/group_node.py +0 -0
  72. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/nodes/merge_node.py +0 -0
  73. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/nodes/select_node_v2.py +0 -0
  74. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/nodes/unnest_node.py +0 -0
  75. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/processing/nodes/window_node.py +0 -0
  76. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/core/query_processor.py +0 -0
  77. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/dialect/__init__.py +0 -0
  78. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/dialect/base.py +0 -0
  79. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/dialect/bigquery.py +0 -0
  80. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/dialect/common.py +0 -0
  81. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/dialect/config.py +0 -0
  82. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/dialect/duckdb.py +0 -0
  83. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/dialect/enums.py +0 -0
  84. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/dialect/postgres.py +0 -0
  85. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/dialect/presto.py +0 -0
  86. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/dialect/snowflake.py +0 -0
  87. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/dialect/sql_server.py +0 -0
  88. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/engine.py +0 -0
  89. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/executor.py +0 -0
  90. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/hooks/__init__.py +0 -0
  91. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/hooks/base_hook.py +0 -0
  92. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/hooks/graph_hook.py +0 -0
  93. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/hooks/query_debugger.py +0 -0
  94. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/metadata/__init__.py +0 -0
  95. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/parser.py +0 -0
  96. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/parsing/__init__.py +0 -0
  97. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/parsing/common.py +0 -0
  98. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/parsing/config.py +0 -0
  99. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/parsing/exceptions.py +0 -0
  100. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/parsing/helpers.py +0 -0
  101. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/parsing/render.py +0 -0
  102. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/parsing/trilogy.lark +0 -0
  103. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/py.typed +0 -0
  104. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/scripts/__init__.py +0 -0
  105. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/scripts/trilogy.py +0 -0
  106. {pytrilogy-0.0.2.18 → pytrilogy-0.0.2.20}/trilogy/utility.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pytrilogy
3
- Version: 0.0.2.18
3
+ Version: 0.0.2.20
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pytrilogy
3
- Version: 0.0.2.18
3
+ Version: 0.0.2.20
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -4,6 +4,6 @@ from trilogy.executor import Executor
4
4
  from trilogy.parser import parse
5
5
  from trilogy.constants import CONFIG
6
6
 
7
- __version__ = "0.0.2.18"
7
+ __version__ = "0.0.2.20"
8
8
 
9
9
  __all__ = ["parse", "Executor", "Dialects", "Environment", "CONFIG"]
@@ -559,19 +559,16 @@ class Concept(Mergeable, Namespaced, SelectContext, BaseModel):
559
559
  grain = ",".join([str(c.address) for c in self.grain.components])
560
560
  return f"{self.namespace}.{self.name}<{grain}>"
561
561
 
562
- @property
562
+ @cached_property
563
563
  def address(self) -> str:
564
- if not self._address_cache:
565
- self._address_cache = f"{self.namespace}.{self.name}"
566
- return self._address_cache
567
-
568
- @address.setter
569
- def address(self, address: str) -> None:
570
- self._address_cache = address
564
+ return f"{self.namespace}.{self.name}"
571
565
 
572
566
  def set_name(self, name: str):
573
567
  self.name = name
574
- self.address = f"{self.namespace}.{self.name}"
568
+ try:
569
+ del self.address
570
+ except AttributeError:
571
+ pass
575
572
 
576
573
  @property
577
574
  def output(self) -> "Concept":
@@ -3260,23 +3257,13 @@ class Environment(BaseModel):
3260
3257
  for datasource in self.datasources.values():
3261
3258
  for concept in datasource.output_concepts:
3262
3259
  concrete_addresses.add(concept.address)
3263
- current_mat = [x.address for x in self.materialized_concepts]
3264
3260
  self.materialized_concepts = [
3265
3261
  c for c in self.concepts.values() if c.address in concrete_addresses
3266
- ]
3267
- # include aliased concepts
3268
- self.materialized_concepts += [
3262
+ ] + [
3269
3263
  c
3270
3264
  for c in self.alias_origin_lookup.values()
3271
3265
  if c.address in concrete_addresses
3272
3266
  ]
3273
- new = [
3274
- x.address
3275
- for x in self.materialized_concepts
3276
- if x.address not in current_mat
3277
- ]
3278
- if new:
3279
- logger.debug(f"Environment added new materialized concepts {new}")
3280
3267
 
3281
3268
  def validate_concept(self, lookup: str, meta: Meta | None = None):
3282
3269
  existing: Concept = self.concepts.get(lookup) # type: ignore
@@ -3400,6 +3387,7 @@ class Environment(BaseModel):
3400
3387
  meta: Meta | None = None,
3401
3388
  force: bool = False,
3402
3389
  add_derived: bool = True,
3390
+ _ignore_cache: bool = False,
3403
3391
  ):
3404
3392
  if not force:
3405
3393
  self.validate_concept(concept.address, meta=meta)
@@ -3410,13 +3398,15 @@ class Environment(BaseModel):
3410
3398
  from trilogy.core.environment_helpers import generate_related_concepts
3411
3399
 
3412
3400
  generate_related_concepts(concept, self, meta=meta, add_derived=add_derived)
3413
- self.gen_concept_list_caches()
3401
+ if not _ignore_cache:
3402
+ self.gen_concept_list_caches()
3414
3403
  return concept
3415
3404
 
3416
3405
  def add_datasource(
3417
3406
  self,
3418
3407
  datasource: Datasource,
3419
3408
  meta: Meta | None = None,
3409
+ _ignore_cache: bool = False,
3420
3410
  ):
3421
3411
 
3422
3412
  self.datasources[datasource.env_label] = datasource
@@ -3428,11 +3418,13 @@ class Environment(BaseModel):
3428
3418
  new_concept.set_name("_pre_persist_" + current_concept.name)
3429
3419
  # remove the associated lineage
3430
3420
  current_concept.lineage = None
3431
- self.add_concept(new_concept, meta=meta, force=True)
3432
- self.add_concept(current_concept, meta=meta, force=True)
3421
+ self.add_concept(new_concept, meta=meta, force=True, _ignore_cache=True)
3422
+ self.add_concept(
3423
+ current_concept, meta=meta, force=True, _ignore_cache=True
3424
+ )
3433
3425
  self.merge_concept(new_concept, current_concept, [])
3434
-
3435
- self.gen_concept_list_caches()
3426
+ if not _ignore_cache:
3427
+ self.gen_concept_list_caches()
3436
3428
  return datasource
3437
3429
 
3438
3430
  def delete_datasource(
@@ -5,6 +5,7 @@ from trilogy.core.models import (
5
5
  WhereClause,
6
6
  Function,
7
7
  AggregateWrapper,
8
+ Grain,
8
9
  )
9
10
  from trilogy.utility import unique
10
11
  from trilogy.core.processing.nodes import GroupNode, StrategyNode, History
@@ -50,9 +51,9 @@ def gen_group_node(
50
51
  parent_concepts += grain_components
51
52
  output_concepts += grain_components
52
53
  for possible_agg in local_optional:
54
+ if not isinstance(possible_agg.lineage, (AggregateWrapper, Function)):
55
+ continue
53
56
  if possible_agg.grain and possible_agg.grain == concept.grain:
54
- if not isinstance(possible_agg.lineage, (AggregateWrapper, Function)):
55
- continue
56
57
  agg_parents: List[Concept] = resolve_function_parent_concepts(
57
58
  possible_agg
58
59
  )
@@ -60,7 +61,16 @@ def gen_group_node(
60
61
  set([x.address for x in parent_concepts])
61
62
  ):
62
63
  output_concepts.append(possible_agg)
63
-
64
+ logger.info(
65
+ f"{padding(depth)}{LOGGER_PREFIX} found equivalent group by optional concept {possible_agg.address} for {concept.address}"
66
+ )
67
+ elif Grain(components=agg_parents) == Grain(components=parent_concepts):
68
+ extra = [x for x in agg_parents if x.address not in parent_concepts]
69
+ parent_concepts += extra
70
+ output_concepts.append(possible_agg)
71
+ logger.info(
72
+ f"{padding(depth)}{LOGGER_PREFIX} found equivalent group by optional concept {possible_agg.address} for {concept.address}"
73
+ )
64
74
  if parent_concepts:
65
75
  logger.info(
66
76
  f"{padding(depth)}{LOGGER_PREFIX} fetching group node parents {LooseConceptList(concepts=parent_concepts)}"
@@ -148,7 +148,6 @@ def detect_ambiguity_and_raise(
148
148
  common = common.intersection(ja)
149
149
  if all(set(ja).issubset(y) for y in reduced_concept_sets):
150
150
  final_candidates.append(ja)
151
-
152
151
  if not final_candidates:
153
152
  filtered_paths = [x.difference(common) for x in reduced_concept_sets]
154
153
  raise AmbiguousRelationshipResolutionException(
@@ -21,7 +21,6 @@ from trilogy.constants import logger
21
21
  from trilogy.core.processing.utility import padding
22
22
  from trilogy.core.enums import PurposeLineage
23
23
 
24
-
25
24
  LOGGER_PREFIX = "[GEN_ROOT_MERGE_NODE]"
26
25
 
27
26
 
@@ -52,18 +51,19 @@ def get_graph_grain_length(g: nx.DiGraph) -> dict[str, int]:
52
51
  def create_pruned_concept_graph(
53
52
  g: nx.DiGraph, all_concepts: List[Concept], accept_partial: bool = False
54
53
  ) -> nx.DiGraph:
54
+ orig_g = g
55
55
  g = g.copy()
56
56
  target_addresses = set([c.address for c in all_concepts])
57
- concepts: dict[str, Concept] = nx.get_node_attributes(g, "concept")
58
- datasources: dict[str, Datasource] = nx.get_node_attributes(g, "datasource")
57
+ concepts: dict[str, Concept] = nx.get_node_attributes(orig_g, "concept")
58
+ datasources: dict[str, Datasource] = nx.get_node_attributes(orig_g, "datasource")
59
59
  relevant_concepts_pre = {
60
60
  n: x.address
61
61
  for n in g.nodes()
62
62
  # filter out synonyms
63
63
  if (x := concepts.get(n, None)) and x.address in target_addresses
64
64
  }
65
- relevant_concepts = list(relevant_concepts_pre.keys())
66
- relevent_datasets = []
65
+ relevant_concepts: list[str] = list(relevant_concepts_pre.keys())
66
+ relevent_datasets: list[str] = []
67
67
  if not accept_partial:
68
68
  partial = {}
69
69
  for node in g.nodes:
@@ -95,15 +95,22 @@ def create_pruned_concept_graph(
95
95
  ]
96
96
  if actual_neighbors:
97
97
  relevent_datasets.append(n)
98
- for n in g.nodes():
99
- if n.startswith("c~") and n not in relevant_concepts:
100
- neighbor_count = 0
101
- for x in nx.all_neighbors(g, n):
102
- if x in relevent_datasets:
103
- neighbor_count += 1
104
- if neighbor_count > 1:
105
- relevant_concepts.append(concepts.get(n))
106
98
 
99
+ # for injecting extra join concepts that are shared between datasets
100
+ # use the original graph, pre-partial pruning
101
+ for n in orig_g.nodes:
102
+ # readd ignoring grain
103
+ # we want to join inclusive of all concepts
104
+ roots: dict[str, set[str]] = {}
105
+ if n.startswith("c~") and n not in relevant_concepts:
106
+ root = n.split("@")[0]
107
+ neighbors = roots.get(root, set())
108
+ for neighbor in nx.all_neighbors(orig_g, n):
109
+ if neighbor in relevent_datasets:
110
+ neighbors.add(neighbor)
111
+ if len(neighbors) > 1:
112
+ relevant_concepts.append(n)
113
+ roots[root] = set()
107
114
  g.remove_nodes_from(
108
115
  [
109
116
  n
@@ -111,6 +118,18 @@ def create_pruned_concept_graph(
111
118
  if n not in relevent_datasets and n not in relevant_concepts
112
119
  ]
113
120
  )
121
+
122
+ subgraphs = list(nx.connected_components(g.to_undirected()))
123
+ if not subgraphs:
124
+ return None
125
+ if subgraphs and len(subgraphs) != 1:
126
+ return None
127
+ # add back any relevant edges that might have been partially filtered
128
+ relevant = set(relevant_concepts + relevent_datasets)
129
+ for edge in orig_g.edges():
130
+ if edge[0] in relevant and edge[1] in relevant:
131
+ g.add_edge(edge[0], edge[1])
132
+
114
133
  return g
115
134
 
116
135
 
@@ -243,19 +262,15 @@ def gen_select_merge_node(
243
262
  )
244
263
  for attempt in [False, True]:
245
264
  pruned_concept_graph = create_pruned_concept_graph(g, non_constant, attempt)
246
- subgraphs = list(nx.connected_components(pruned_concept_graph.to_undirected()))
247
-
248
- if subgraphs and len(subgraphs) == 1:
265
+ if pruned_concept_graph:
249
266
  logger.info(
250
267
  f"{padding(depth)}{LOGGER_PREFIX} found covering graph w/ partial flag {attempt}"
251
268
  )
252
269
  break
253
- if len(subgraphs) > 1:
254
- # from trilogy.hooks.graph_hook import GraphHook
255
- # GraphHook().query_graph_built(pruned_concept_graph.to_undirected(), highlight_nodes=[concept_to_node(c.with_default_grain()) for c in all_concepts if "__preql_internal" not in c.address])
256
- # raise SyntaxError(f'Too many subgraphs found for {[c.address for c in all_concepts]}: got {subgraphs}')
270
+
271
+ if not pruned_concept_graph:
257
272
  logger.info(
258
- f"{padding(depth)}{LOGGER_PREFIX} Too many subgraphs found for {[c.address for c in non_constant]}: got {subgraphs}'"
273
+ f"{padding(depth)}{LOGGER_PREFIX} no covering graph found {attempt}"
259
274
  )
260
275
  return None
261
276
 
@@ -199,7 +199,6 @@ def get_node_joins(
199
199
  identifier_map: dict[str, Datasource | QueryDatasource] = {
200
200
  x.identifier: x for x in datasources
201
201
  }
202
-
203
202
  grain_pseudonyms: set[str] = set()
204
203
  for g in grain:
205
204
  env_lookup = environment.concepts[g.address]
@@ -814,14 +814,19 @@ class ParseToObjects(Transformer):
814
814
  raise ImportError(f"Unable to import file {target}, parsing error: {e}")
815
815
 
816
816
  for _, concept in nparser.environment.concepts.items():
817
- self.environment.add_concept(concept.with_namespace(alias))
817
+ self.environment.add_concept(
818
+ concept.with_namespace(alias), _ignore_cache=True
819
+ )
818
820
 
819
821
  for _, datasource in nparser.environment.datasources.items():
820
- self.environment.add_datasource(datasource.with_namespace(alias))
822
+ self.environment.add_datasource(
823
+ datasource.with_namespace(alias), _ignore_cache=True
824
+ )
821
825
  imps = ImportStatement(
822
826
  alias=alias, path=Path(args[0]), environment=nparser.environment
823
827
  )
824
828
  self.environment.imports[alias] = imps
829
+ self.environment.gen_concept_list_caches()
825
830
  return imps
826
831
 
827
832
  @v_args(meta=True)
File without changes
File without changes
File without changes
File without changes