pytrilogy 0.0.2.18__py3-none-any.whl → 0.0.2.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pytrilogy
3
- Version: 0.0.2.18
3
+ Version: 0.0.2.19
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -1,4 +1,4 @@
1
- trilogy/__init__.py,sha256=NVclSieaZqXKRfCCzUhXoqSrNUdoiMn3ytKw0jCWj7A,291
1
+ trilogy/__init__.py,sha256=3gRtKqbvnX1RJDJWia2dlhjAU87WHpfzoZZM3KSaFaw,291
2
2
  trilogy/compiler.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  trilogy/constants.py,sha256=pZkOneh_65f9Ua6NICu1bHAFAbmQxmiXRXS7tsmCWbQ,1235
4
4
  trilogy/engine.py,sha256=R5ubIxYyrxRExz07aZCUfrTsoXCHQ8DKFTDsobXdWdA,1102
@@ -27,17 +27,17 @@ trilogy/core/optimizations/predicate_pushdown.py,sha256=1l9WnFOSv79e341typG3tTdk
27
27
  trilogy/core/processing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
28
  trilogy/core/processing/concept_strategies_v3.py,sha256=DO9gybVLku8GEkO3uNPaCeqhalnufsjYYbvDs-gkwNc,35295
29
29
  trilogy/core/processing/graph_utils.py,sha256=aq-kqk4Iado2HywDxWEejWc-7PGO6Oa-ZQLAM6XWPHw,1199
30
- trilogy/core/processing/utility.py,sha256=plpaAmssWjcxPee8_J4hleazTzQIBL6mBLJ33FKfwOM,19421
30
+ trilogy/core/processing/utility.py,sha256=v06sqXpnuYct_MMZXxEaiP0WwkeblWpO81QG1Ns3yGc,19420
31
31
  trilogy/core/processing/node_generators/__init__.py,sha256=-mzYkRsaRNa_dfTckYkKVFSR8h8a3ihEiPJDU_tAmDo,672
32
32
  trilogy/core/processing/node_generators/basic_node.py,sha256=WQNgJ1MwrMS_BQ-b3XwGGB6eToDykelAVj_fesJuqe0,2069
33
33
  trilogy/core/processing/node_generators/common.py,sha256=LwDgPlhWeuw0t07f3kX9IE5LXBdZhXfh-aY0XGk50ak,8946
34
34
  trilogy/core/processing/node_generators/filter_node.py,sha256=Vz9Rb67e1dfZgnliekwwLeDPVkthMbdrnrKRdz7J1ik,7654
35
- trilogy/core/processing/node_generators/group_node.py,sha256=Dn9vEY-WPFHNN-LtXfgWiHIXspzHDKfkKL5a2KE2gD0,4252
35
+ trilogy/core/processing/node_generators/group_node.py,sha256=r54IVEhXW-tzod6uEHIQObrxgQt6aNySk5emWkWyqCU,4938
36
36
  trilogy/core/processing/node_generators/group_to_node.py,sha256=R9i_wHipxjXJyfYEwfeTw2EPpuanXVA327XyfcP2tBg,2537
37
37
  trilogy/core/processing/node_generators/multiselect_node.py,sha256=_KO9lqzHQoy4VAviO0ttQlmK0tjaqrJj4SJPhmoIYm8,6229
38
- trilogy/core/processing/node_generators/node_merge_node.py,sha256=yRDfY8muZN7G2vsdYXF2X1iqbQ2zDUNGlxvSIyKVoWU,13512
38
+ trilogy/core/processing/node_generators/node_merge_node.py,sha256=4aoSkynWYcKAxeN4fU5jnCdxausa5rNgFokoVhPXI80,13511
39
39
  trilogy/core/processing/node_generators/rowset_node.py,sha256=gU_ybfYXO9tZqHjUSABIioVpb8AWtITpegj3IGSf2GI,4587
40
- trilogy/core/processing/node_generators/select_merge_node.py,sha256=ipSxw1Oqk-hVVGhPhZlvRbptC0Vpwh52hZ7z8oOj2yk,10065
40
+ trilogy/core/processing/node_generators/select_merge_node.py,sha256=MKjlXqFBSin6cTnS6n5lEcNBJsMvSefDIXOwYNVbM0s,10371
41
41
  trilogy/core/processing/node_generators/select_node.py,sha256=vUg3gXHGvagdbniIAE7DdqJcQ0V1VAfHtTrw3edYPso,1734
42
42
  trilogy/core/processing/node_generators/unnest_node.py,sha256=cZ26CN338CBnd6asML1OBUtNcDzmNlFpY0Vnade4yrc,2256
43
43
  trilogy/core/processing/node_generators/window_node.py,sha256=jy3FF8uN0VA7yyrBeR40B9CAqR_5qBP4PiS6Gr-f-7w,2590
@@ -75,9 +75,9 @@ trilogy/parsing/render.py,sha256=8yxerPAi4AhlhPBlAfbYbOM3F9rz6HzpWVEWPtK2VEg,123
75
75
  trilogy/parsing/trilogy.lark,sha256=0JAvQBACFNL-X61I0tB_0QPZgsguZgerfHBv903oKh0,11623
76
76
  trilogy/scripts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
77
77
  trilogy/scripts/trilogy.py,sha256=PHxvv6f2ODv0esyyhWxlARgra8dVhqQhYl0lTrSyVNo,3729
78
- pytrilogy-0.0.2.18.dist-info/LICENSE.md,sha256=5ZRvtTyCCFwz1THxDTjAu3Lidds9WjPvvzgVwPSYNDo,1042
79
- pytrilogy-0.0.2.18.dist-info/METADATA,sha256=erqF5E59Qz6Xr2ZKiBKejkmQUFMGpDxK9NerqOoT6K0,8132
80
- pytrilogy-0.0.2.18.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
81
- pytrilogy-0.0.2.18.dist-info/entry_points.txt,sha256=0petKryjvvtEfTlbZC1AuMFumH_WQ9v8A19LvoS6G6c,54
82
- pytrilogy-0.0.2.18.dist-info/top_level.txt,sha256=cAy__NW_eMAa_yT9UnUNlZLFfxcg6eimUAZ184cdNiE,8
83
- pytrilogy-0.0.2.18.dist-info/RECORD,,
78
+ pytrilogy-0.0.2.19.dist-info/LICENSE.md,sha256=5ZRvtTyCCFwz1THxDTjAu3Lidds9WjPvvzgVwPSYNDo,1042
79
+ pytrilogy-0.0.2.19.dist-info/METADATA,sha256=BgEvyZ13aahBi8bULS0lT9Rx5ixjpZ99ZbHeC8mg5Mw,8132
80
+ pytrilogy-0.0.2.19.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
81
+ pytrilogy-0.0.2.19.dist-info/entry_points.txt,sha256=0petKryjvvtEfTlbZC1AuMFumH_WQ9v8A19LvoS6G6c,54
82
+ pytrilogy-0.0.2.19.dist-info/top_level.txt,sha256=cAy__NW_eMAa_yT9UnUNlZLFfxcg6eimUAZ184cdNiE,8
83
+ pytrilogy-0.0.2.19.dist-info/RECORD,,
trilogy/__init__.py CHANGED
@@ -4,6 +4,6 @@ from trilogy.executor import Executor
4
4
  from trilogy.parser import parse
5
5
  from trilogy.constants import CONFIG
6
6
 
7
- __version__ = "0.0.2.18"
7
+ __version__ = "0.0.2.19"
8
8
 
9
9
  __all__ = ["parse", "Executor", "Dialects", "Environment", "CONFIG"]
@@ -5,6 +5,7 @@ from trilogy.core.models import (
5
5
  WhereClause,
6
6
  Function,
7
7
  AggregateWrapper,
8
+ Grain,
8
9
  )
9
10
  from trilogy.utility import unique
10
11
  from trilogy.core.processing.nodes import GroupNode, StrategyNode, History
@@ -50,9 +51,9 @@ def gen_group_node(
50
51
  parent_concepts += grain_components
51
52
  output_concepts += grain_components
52
53
  for possible_agg in local_optional:
54
+ if not isinstance(possible_agg.lineage, (AggregateWrapper, Function)):
55
+ continue
53
56
  if possible_agg.grain and possible_agg.grain == concept.grain:
54
- if not isinstance(possible_agg.lineage, (AggregateWrapper, Function)):
55
- continue
56
57
  agg_parents: List[Concept] = resolve_function_parent_concepts(
57
58
  possible_agg
58
59
  )
@@ -60,7 +61,16 @@ def gen_group_node(
60
61
  set([x.address for x in parent_concepts])
61
62
  ):
62
63
  output_concepts.append(possible_agg)
63
-
64
+ logger.info(
65
+ f"{padding(depth)}{LOGGER_PREFIX} found equivalent group by optional concept {possible_agg.address} for {concept.address}"
66
+ )
67
+ elif Grain(components=agg_parents) == Grain(components=parent_concepts):
68
+ extra = [x for x in agg_parents if x.address not in parent_concepts]
69
+ parent_concepts += extra
70
+ output_concepts.append(possible_agg)
71
+ logger.info(
72
+ f"{padding(depth)}{LOGGER_PREFIX} found equivalent group by optional concept {possible_agg.address} for {concept.address}"
73
+ )
64
74
  if parent_concepts:
65
75
  logger.info(
66
76
  f"{padding(depth)}{LOGGER_PREFIX} fetching group node parents {LooseConceptList(concepts=parent_concepts)}"
@@ -148,7 +148,6 @@ def detect_ambiguity_and_raise(
148
148
  common = common.intersection(ja)
149
149
  if all(set(ja).issubset(y) for y in reduced_concept_sets):
150
150
  final_candidates.append(ja)
151
-
152
151
  if not final_candidates:
153
152
  filtered_paths = [x.difference(common) for x in reduced_concept_sets]
154
153
  raise AmbiguousRelationshipResolutionException(
@@ -21,7 +21,6 @@ from trilogy.constants import logger
21
21
  from trilogy.core.processing.utility import padding
22
22
  from trilogy.core.enums import PurposeLineage
23
23
 
24
-
25
24
  LOGGER_PREFIX = "[GEN_ROOT_MERGE_NODE]"
26
25
 
27
26
 
@@ -52,18 +51,19 @@ def get_graph_grain_length(g: nx.DiGraph) -> dict[str, int]:
52
51
  def create_pruned_concept_graph(
53
52
  g: nx.DiGraph, all_concepts: List[Concept], accept_partial: bool = False
54
53
  ) -> nx.DiGraph:
54
+ orig_g = g
55
55
  g = g.copy()
56
56
  target_addresses = set([c.address for c in all_concepts])
57
- concepts: dict[str, Concept] = nx.get_node_attributes(g, "concept")
58
- datasources: dict[str, Datasource] = nx.get_node_attributes(g, "datasource")
57
+ concepts: dict[str, Concept] = nx.get_node_attributes(orig_g, "concept")
58
+ datasources: dict[str, Datasource] = nx.get_node_attributes(orig_g, "datasource")
59
59
  relevant_concepts_pre = {
60
60
  n: x.address
61
61
  for n in g.nodes()
62
62
  # filter out synonyms
63
63
  if (x := concepts.get(n, None)) and x.address in target_addresses
64
64
  }
65
- relevant_concepts = list(relevant_concepts_pre.keys())
66
- relevent_datasets = []
65
+ relevant_concepts: list[str] = list(relevant_concepts_pre.keys())
66
+ relevent_datasets: list[str] = []
67
67
  if not accept_partial:
68
68
  partial = {}
69
69
  for node in g.nodes:
@@ -95,15 +95,22 @@ def create_pruned_concept_graph(
95
95
  ]
96
96
  if actual_neighbors:
97
97
  relevent_datasets.append(n)
98
- for n in g.nodes():
99
- if n.startswith("c~") and n not in relevant_concepts:
100
- neighbor_count = 0
101
- for x in nx.all_neighbors(g, n):
102
- if x in relevent_datasets:
103
- neighbor_count += 1
104
- if neighbor_count > 1:
105
- relevant_concepts.append(concepts.get(n))
106
98
 
99
+ # for injecting extra join concepts that are shared between datasets
100
+ # use the original graph, pre-partial pruning
101
+ for n in orig_g.nodes:
102
+ # readd ignoring grain
103
+ # we want to join inclusive of all concepts
104
+ roots: dict[str, set[str]] = {}
105
+ if n.startswith("c~") and n not in relevant_concepts:
106
+ root = n.split("@")[0]
107
+ neighbors = roots.get(root, set())
108
+ for neighbor in nx.all_neighbors(orig_g, n):
109
+ if neighbor in relevent_datasets:
110
+ neighbors.add(neighbor)
111
+ if len(neighbors) > 1:
112
+ relevant_concepts.append(n)
113
+ roots[root] = set()
107
114
  g.remove_nodes_from(
108
115
  [
109
116
  n
@@ -111,6 +118,18 @@ def create_pruned_concept_graph(
111
118
  if n not in relevent_datasets and n not in relevant_concepts
112
119
  ]
113
120
  )
121
+
122
+ subgraphs = list(nx.connected_components(g.to_undirected()))
123
+ if not subgraphs:
124
+ return None
125
+ if subgraphs and len(subgraphs) != 1:
126
+ return None
127
+ # add back any relevant edges that might have been partially filtered
128
+ relevant = set(relevant_concepts + relevent_datasets)
129
+ for edge in orig_g.edges():
130
+ if edge[0] in relevant and edge[1] in relevant:
131
+ g.add_edge(edge[0], edge[1])
132
+
114
133
  return g
115
134
 
116
135
 
@@ -243,19 +262,15 @@ def gen_select_merge_node(
243
262
  )
244
263
  for attempt in [False, True]:
245
264
  pruned_concept_graph = create_pruned_concept_graph(g, non_constant, attempt)
246
- subgraphs = list(nx.connected_components(pruned_concept_graph.to_undirected()))
247
-
248
- if subgraphs and len(subgraphs) == 1:
265
+ if pruned_concept_graph:
249
266
  logger.info(
250
267
  f"{padding(depth)}{LOGGER_PREFIX} found covering graph w/ partial flag {attempt}"
251
268
  )
252
269
  break
253
- if len(subgraphs) > 1:
254
- # from trilogy.hooks.graph_hook import GraphHook
255
- # GraphHook().query_graph_built(pruned_concept_graph.to_undirected(), highlight_nodes=[concept_to_node(c.with_default_grain()) for c in all_concepts if "__preql_internal" not in c.address])
256
- # raise SyntaxError(f'Too many subgraphs found for {[c.address for c in all_concepts]}: got {subgraphs}')
270
+
271
+ if not pruned_concept_graph:
257
272
  logger.info(
258
- f"{padding(depth)}{LOGGER_PREFIX} Too many subgraphs found for {[c.address for c in non_constant]}: got {subgraphs}'"
273
+ f"{padding(depth)}{LOGGER_PREFIX} no covering graph found {attempt}"
259
274
  )
260
275
  return None
261
276
 
@@ -199,7 +199,6 @@ def get_node_joins(
199
199
  identifier_map: dict[str, Datasource | QueryDatasource] = {
200
200
  x.identifier: x for x in datasources
201
201
  }
202
-
203
202
  grain_pseudonyms: set[str] = set()
204
203
  for g in grain:
205
204
  env_lookup = environment.concepts[g.address]