pytrilogy 0.0.1.102__tar.gz → 0.0.1.103__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

Files changed (99) hide show
  1. {pytrilogy-0.0.1.102/pytrilogy.egg-info → pytrilogy-0.0.1.103}/PKG-INFO +2 -2
  2. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/README.md +1 -1
  3. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103/pytrilogy.egg-info}/PKG-INFO +2 -2
  4. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/tests/test_models.py +19 -5
  5. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/__init__.py +1 -1
  6. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/env_processor.py +5 -1
  7. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/models.py +7 -3
  8. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/concept_strategies_v3.py +1 -1
  9. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/node_generators/rowset_node.py +10 -6
  10. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/node_generators/select_node.py +77 -63
  11. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/scripts/trilogy.py +1 -1
  12. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/LICENSE.md +0 -0
  13. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/pyproject.toml +0 -0
  14. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/pytrilogy.egg-info/SOURCES.txt +0 -0
  15. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/pytrilogy.egg-info/dependency_links.txt +0 -0
  16. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/pytrilogy.egg-info/entry_points.txt +0 -0
  17. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/pytrilogy.egg-info/requires.txt +0 -0
  18. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/pytrilogy.egg-info/top_level.txt +0 -0
  19. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/setup.cfg +0 -0
  20. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/setup.py +0 -0
  21. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/tests/test_declarations.py +0 -0
  22. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/tests/test_derived_concepts.py +0 -0
  23. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/tests/test_discovery_nodes.py +0 -0
  24. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/tests/test_environment.py +0 -0
  25. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/tests/test_functions.py +0 -0
  26. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/tests/test_imports.py +0 -0
  27. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/tests/test_metadata.py +0 -0
  28. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/tests/test_multi_join_assignments.py +0 -0
  29. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/tests/test_parsing.py +0 -0
  30. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/tests/test_partial_handling.py +0 -0
  31. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/tests/test_query_processing.py +0 -0
  32. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/tests/test_select.py +0 -0
  33. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/tests/test_statements.py +0 -0
  34. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/tests/test_undefined_concept.py +0 -0
  35. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/tests/test_where_clause.py +0 -0
  36. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/compiler.py +0 -0
  37. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/constants.py +0 -0
  38. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/__init__.py +0 -0
  39. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/constants.py +0 -0
  40. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/enums.py +0 -0
  41. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/environment_helpers.py +0 -0
  42. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/ergonomics.py +0 -0
  43. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/exceptions.py +0 -0
  44. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/functions.py +0 -0
  45. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/graph_models.py +0 -0
  46. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/internal.py +0 -0
  47. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/__init__.py +0 -0
  48. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/graph_utils.py +0 -0
  49. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/node_generators/__init__.py +0 -0
  50. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/node_generators/basic_node.py +0 -0
  51. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/node_generators/common.py +0 -0
  52. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/node_generators/concept_merge.py +0 -0
  53. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/node_generators/filter_node.py +0 -0
  54. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/node_generators/group_node.py +0 -0
  55. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/node_generators/group_to_node.py +0 -0
  56. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/node_generators/merge_node.py +0 -0
  57. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/node_generators/multiselect_node.py +0 -0
  58. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/node_generators/unnest_node.py +0 -0
  59. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/node_generators/window_node.py +0 -0
  60. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/nodes/__init__.py +0 -0
  61. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/nodes/base_node.py +0 -0
  62. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/nodes/filter_node.py +0 -0
  63. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/nodes/group_node.py +0 -0
  64. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/nodes/merge_node.py +0 -0
  65. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/nodes/select_node_v2.py +0 -0
  66. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/nodes/unnest_node.py +0 -0
  67. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/nodes/window_node.py +0 -0
  68. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/processing/utility.py +0 -0
  69. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/core/query_processor.py +0 -0
  70. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/dialect/__init__.py +0 -0
  71. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/dialect/base.py +0 -0
  72. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/dialect/bigquery.py +0 -0
  73. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/dialect/common.py +0 -0
  74. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/dialect/config.py +0 -0
  75. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/dialect/duckdb.py +0 -0
  76. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/dialect/enums.py +0 -0
  77. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/dialect/postgres.py +0 -0
  78. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/dialect/presto.py +0 -0
  79. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/dialect/snowflake.py +0 -0
  80. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/dialect/sql_server.py +0 -0
  81. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/docs/__init__.py +0 -0
  82. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/engine.py +0 -0
  83. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/executor.py +0 -0
  84. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/hooks/__init__.py +0 -0
  85. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/hooks/base_hook.py +0 -0
  86. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/hooks/graph_hook.py +0 -0
  87. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/hooks/query_debugger.py +0 -0
  88. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/metadata/__init__.py +0 -0
  89. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/parser.py +0 -0
  90. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/parsing/__init__.py +0 -0
  91. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/parsing/common.py +0 -0
  92. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/parsing/config.py +0 -0
  93. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/parsing/exceptions.py +0 -0
  94. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/parsing/helpers.py +0 -0
  95. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/parsing/parse_engine.py +0 -0
  96. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/parsing/render.py +0 -0
  97. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/py.typed +0 -0
  98. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/scripts/__init__.py +0 -0
  99. {pytrilogy-0.0.1.102 → pytrilogy-0.0.1.103}/trilogy/utility.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pytrilogy
3
- Version: 0.0.1.102
3
+ Version: 0.0.1.103
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -27,7 +27,7 @@ Requires-Dist: sqlalchemy-bigquery; extra == "bigquery"
27
27
  Provides-Extra: snowflake
28
28
  Requires-Dist: snowflake-sqlalchemy; extra == "snowflake"
29
29
 
30
- ##Trilogy
30
+ ## Trilogy
31
31
  [![Website](https://img.shields.io/badge/INTRO-WEB-orange?)](https://trilogydata.dev/)
32
32
  [![Discord](https://img.shields.io/badge/DISCORD-CHAT-red?logo=discord)](https://discord.gg/Z4QSSuqGEd)
33
33
 
@@ -1,4 +1,4 @@
1
- ##Trilogy
1
+ ## Trilogy
2
2
  [![Website](https://img.shields.io/badge/INTRO-WEB-orange?)](https://trilogydata.dev/)
3
3
  [![Discord](https://img.shields.io/badge/DISCORD-CHAT-red?logo=discord)](https://discord.gg/Z4QSSuqGEd)
4
4
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pytrilogy
3
- Version: 0.0.1.102
3
+ Version: 0.0.1.103
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -27,7 +27,7 @@ Requires-Dist: sqlalchemy-bigquery; extra == "bigquery"
27
27
  Provides-Extra: snowflake
28
28
  Requires-Dist: snowflake-sqlalchemy; extra == "snowflake"
29
29
 
30
- ##Trilogy
30
+ ## Trilogy
31
31
  [![Website](https://img.shields.io/badge/INTRO-WEB-orange?)](https://trilogydata.dev/)
32
32
  [![Discord](https://img.shields.io/badge/DISCORD-CHAT-red?logo=discord)](https://discord.gg/Z4QSSuqGEd)
33
33
 
@@ -1,4 +1,4 @@
1
- from trilogy.core.enums import BooleanOperator, Purpose, JoinType
1
+ from trilogy.core.enums import BooleanOperator, Purpose, JoinType, ComparisonOperator
2
2
  from trilogy.core.models import (
3
3
  CTE,
4
4
  Grain,
@@ -9,6 +9,7 @@ from trilogy.core.models import (
9
9
  Address,
10
10
  UndefinedConcept,
11
11
  BaseJoin,
12
+ Comparison,
12
13
  )
13
14
 
14
15
 
@@ -73,11 +74,17 @@ def test_conditional(test_environment, test_environment_graph):
73
74
  condition_b = Conditional(
74
75
  left=test_concept, right=test_concept, operator=BooleanOperator.AND
75
76
  )
76
-
77
77
  merged = condition_a + condition_b
78
- assert merged.left == condition_a
79
- assert merged.right == condition_b
80
- assert merged.operator == BooleanOperator.AND
78
+ assert merged == condition_a
79
+
80
+ test_concept_two = list(test_environment.concepts.values())[-2]
81
+ condition_c = Conditional(
82
+ left=test_concept, right=test_concept_two, operator=BooleanOperator.AND
83
+ )
84
+ merged_two = condition_a + condition_c
85
+ assert merged_two.left == condition_a
86
+ assert merged_two.right == condition_c
87
+ assert merged_two.operator == BooleanOperator.AND
81
88
 
82
89
 
83
90
  def test_grain(test_environment):
@@ -177,3 +184,10 @@ def test_base_join(test_environment: Environment):
177
184
  exc3 = exc4
178
185
  pass
179
186
  assert isinstance(exc3, SyntaxError)
187
+
188
+
189
+ def test_comparison():
190
+ try:
191
+ Comparison(left=1, right="abc", operator=ComparisonOperator.EQ)
192
+ except Exception as exc:
193
+ assert isinstance(exc, SyntaxError)
@@ -3,6 +3,6 @@ from trilogy.dialect.enums import Dialects
3
3
  from trilogy.executor import Executor
4
4
  from trilogy.parser import parse
5
5
 
6
- __version__ = "0.0.1.102"
6
+ __version__ = "0.0.1.103"
7
7
 
8
8
  __all__ = ["parse", "Executor", "Dialects", "Environment"]
@@ -1,4 +1,8 @@
1
- from trilogy.core.graph_models import ReferenceGraph, concept_to_node, datasource_to_node
1
+ from trilogy.core.graph_models import (
2
+ ReferenceGraph,
3
+ concept_to_node,
4
+ datasource_to_node,
5
+ )
2
6
  from trilogy.core.models import Environment
3
7
  from trilogy.core.enums import PurposeLineage
4
8
 
@@ -338,7 +338,7 @@ class Concept(Namespaced, SelectGrain, BaseModel):
338
338
 
339
339
  def __eq__(self, other: object):
340
340
  if isinstance(other, str):
341
- if self.address == str:
341
+ if self.address == other:
342
342
  return True
343
343
  if not isinstance(other, Concept):
344
344
  return False
@@ -1887,7 +1887,9 @@ class QueryDatasource(BaseModel):
1887
1887
  else None
1888
1888
  ),
1889
1889
  source_type=self.source_type,
1890
- partial_concepts=self.partial_concepts + other.partial_concepts,
1890
+ partial_concepts=unique(
1891
+ self.partial_concepts + other.partial_concepts, "address"
1892
+ ),
1891
1893
  join_derived_concepts=self.join_derived_concepts,
1892
1894
  force_group=self.force_group,
1893
1895
  )
@@ -2530,7 +2532,7 @@ class Comparison(Namespaced, SelectGrain, BaseModel):
2530
2532
 
2531
2533
  def __post_init__(self):
2532
2534
  if arg_to_datatype(self.left) != arg_to_datatype(self.right):
2533
- raise ValueError(
2535
+ raise SyntaxError(
2534
2536
  f"Cannot compare {self.left} and {self.right} of different types"
2535
2537
  )
2536
2538
 
@@ -2704,6 +2706,8 @@ class Conditional(Namespaced, SelectGrain, BaseModel):
2704
2706
  def __add__(self, other) -> "Conditional":
2705
2707
  if other is None:
2706
2708
  return self
2709
+ elif str(other) == str(self):
2710
+ return self
2707
2711
  elif isinstance(other, (Comparison, Conditional, Parenthetical)):
2708
2712
  return Conditional(left=self, right=other, operator=BooleanOperator.AND)
2709
2713
  raise ValueError(f"Cannot add {self.__class__} and {type(other)}")
@@ -318,7 +318,7 @@ def generate_node(
318
318
  )
319
319
  elif concept.derivation == PurposeLineage.ROOT:
320
320
  logger.info(
321
- f"{depth_to_prefix(depth)}{LOGGER_PREFIX} for {concept.address}, generating select node"
321
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} for {concept.address}, generating select node with optional {[x.address for x in local_optional]}"
322
322
  )
323
323
  return gen_select_node(
324
324
  concept,
@@ -35,26 +35,30 @@ def gen_rowset_node(
35
35
  lineage: RowsetItem = concept.lineage
36
36
  rowset: RowsetDerivationStatement = lineage.rowset
37
37
  select: SelectStatement | MultiSelectStatement = lineage.rowset.select
38
+ if where := select.where_clause:
39
+ targets = select.output_components + where.conditional.concept_arguments
40
+ else:
41
+ targets = select.output_components
38
42
  node: StrategyNode = source_concepts(
39
- mandatory_list=select.output_components,
43
+ mandatory_list=targets,
40
44
  environment=environment,
41
45
  g=g,
42
46
  depth=depth + 1,
43
47
  history=history,
44
48
  )
45
- node.conditions = select.where_clause.conditional if select.where_clause else None
46
- # rebuild any cached info with the new condition clause
47
- node.rebuild_cache()
48
49
  if not node:
49
50
  logger.info(
50
51
  f"{padding(depth)}{LOGGER_PREFIX} Cannot generate rowset node for {concept}"
51
52
  )
52
53
  return None
54
+ node.conditions = select.where_clause.conditional if select.where_clause else None
55
+ # rebuild any cached info with the new condition clause
56
+ node.rebuild_cache()
53
57
  enrichment = set([x.address for x in local_optional])
54
58
  rowset_relevant = [
55
59
  x
56
60
  for x in rowset.derived_concepts
57
- if x.address == concept.address or x.address in enrichment
61
+ # if x.address == concept.address or x.address in enrichment
58
62
  ]
59
63
  additional_relevant = [
60
64
  x for x in select.output_components if x.address in enrichment
@@ -68,7 +72,7 @@ def gen_rowset_node(
68
72
  for item in additional_relevant:
69
73
  node.partial_concepts.append(item)
70
74
 
71
- # assume grain to be outoput of select
75
+ # assume grain to be output of select
72
76
  # but don't include anything aggregate at this point
73
77
  assert node.resolution_cache
74
78
  node.resolution_cache.grain = concept_list_to_grain(
@@ -50,26 +50,21 @@ def gen_select_node_from_table(
50
50
  candidates: dict[str, StrategyNode] = {}
51
51
  scores: dict[str, int] = {}
52
52
  # otherwise, we need to look for a table
53
+ nodes_to_find = [concept_to_node(x.with_default_grain()) for x in all_concepts]
53
54
  for datasource in environment.datasources.values():
54
55
  all_found = True
55
- for raw_concept in all_concepts:
56
- # look for connection to abstract grain
57
- req_concept = raw_concept.with_default_grain()
58
- # if we don't have a concept in the graph
59
- # exit early
60
- if concept_to_node(req_concept) not in g.nodes:
61
- raise ValueError(concept_to_node(req_concept))
56
+ for idx, req_concept in enumerate(nodes_to_find):
62
57
  try:
63
58
  path = nx.shortest_path(
64
59
  g,
65
60
  source=datasource_to_node(datasource),
66
- target=concept_to_node(req_concept),
61
+ target=req_concept,
67
62
  )
68
63
  except nx.NodeNotFound as e:
69
64
  # just to provide better error
70
65
  ncandidates = [
71
66
  datasource_to_node(datasource),
72
- concept_to_node(req_concept),
67
+ req_concept,
73
68
  ]
74
69
  for ncandidate in ncandidates:
75
70
  try:
@@ -94,75 +89,77 @@ def gen_select_node_from_table(
94
89
  for node in path:
95
90
  if g.nodes[node]["type"] == "datasource":
96
91
  continue
97
- if g.nodes[node]["concept"].address == raw_concept.address:
92
+ if g.nodes[node]["concept"].address == all_concepts[idx].address:
98
93
  continue
99
94
  all_found = False
100
95
  break
101
96
 
102
- if all_found:
103
- partial_concepts = [
104
- c.concept
105
- for c in datasource.columns
106
- if not c.is_complete and c.concept in all_lcl
107
- ]
108
- partial_lcl = LooseConceptList(concepts=partial_concepts)
109
- if not accept_partial and target_concept in partial_lcl:
110
- continue
111
- logger.info(
112
- f"{padding(depth)}{LOGGER_PREFIX} target grain is {str(target_grain)}"
113
- )
114
- if target_grain and target_grain.issubset(datasource.grain):
97
+ if not all_found:
98
+ # skip to next node
99
+ continue
100
+ partial_concepts = [
101
+ c.concept
102
+ for c in datasource.columns
103
+ if not c.is_complete and c.concept in all_lcl
104
+ ]
105
+ partial_lcl = LooseConceptList(concepts=partial_concepts)
106
+ if not accept_partial and target_concept in partial_lcl:
107
+ continue
108
+ logger.info(
109
+ f"{padding(depth)}{LOGGER_PREFIX} target grain is {str(target_grain)}"
110
+ )
111
+ if target_grain and target_grain.issubset(datasource.grain):
115
112
 
116
- if all([x in all_lcl for x in target_grain.components]):
117
- force_group = False
118
- # if we are not returning the grain
119
- # we have to group
120
- else:
121
- logger.info(
122
- f"{padding(depth)}{LOGGER_PREFIX} not all grain components are in output {str(all_lcl)}, group to actual grain"
123
- )
124
- force_group = True
125
- elif all([x in all_lcl for x in datasource.grain.components]):
126
- logger.info(
127
- f"{padding(depth)}{LOGGER_PREFIX} query output includes all grain components, no reason to group further"
128
- )
113
+ if all([x in all_lcl for x in target_grain.components]):
129
114
  force_group = False
115
+ # if we are not returning the grain
116
+ # we have to group
130
117
  else:
131
118
  logger.info(
132
- f"{padding(depth)}{LOGGER_PREFIX} target grain is not subset of datasource grain {datasource.grain}, required to group"
119
+ f"{padding(depth)}{LOGGER_PREFIX} not all grain components are in output {str(all_lcl)}, group to actual grain"
133
120
  )
134
121
  force_group = True
122
+ elif all([x in all_lcl for x in datasource.grain.components]):
123
+ logger.info(
124
+ f"{padding(depth)}{LOGGER_PREFIX} query output includes all grain components, no reason to group further"
125
+ )
126
+ force_group = False
127
+ else:
128
+ logger.info(
129
+ f"{padding(depth)}{LOGGER_PREFIX} target grain is not subset of datasource grain {datasource.grain}, required to group"
130
+ )
131
+ force_group = True
135
132
 
136
- bcandidate: StrategyNode = SelectNode(
137
- input_concepts=[c.concept for c in datasource.columns],
133
+ bcandidate: StrategyNode = SelectNode(
134
+ input_concepts=[c.concept for c in datasource.columns],
135
+ output_concepts=all_concepts,
136
+ environment=environment,
137
+ g=g,
138
+ parents=[],
139
+ depth=depth,
140
+ partial_concepts=[c for c in all_concepts if c in partial_lcl],
141
+ accept_partial=accept_partial,
142
+ datasource=datasource,
143
+ grain=Grain(components=all_concepts),
144
+ )
145
+ # we need to nest the group node one further
146
+ if force_group is True:
147
+ candidate: StrategyNode = GroupNode(
138
148
  output_concepts=all_concepts,
149
+ input_concepts=all_concepts,
139
150
  environment=environment,
140
151
  g=g,
141
- parents=[],
152
+ parents=[bcandidate],
142
153
  depth=depth,
143
- partial_concepts=[c for c in all_concepts if c in partial_lcl],
144
- accept_partial=accept_partial,
145
- datasource=datasource,
146
- grain=Grain(components=all_concepts),
154
+ partial_concepts=bcandidate.partial_concepts,
147
155
  )
148
- # we need to ntest the group node one further
149
- if force_group is True:
150
- candidate: StrategyNode = GroupNode(
151
- output_concepts=all_concepts,
152
- input_concepts=all_concepts,
153
- environment=environment,
154
- g=g,
155
- parents=[bcandidate],
156
- depth=depth,
157
- partial_concepts=bcandidate.partial_concepts,
158
- )
159
- else:
160
- candidate = bcandidate
161
- logger.info(
162
- f"{padding(depth)}{LOGGER_PREFIX} found select node with {datasource.identifier}, returning {candidate.output_lcl}"
163
- )
164
- candidates[datasource.identifier] = candidate
165
- scores[datasource.identifier] = -len(partial_concepts)
156
+ else:
157
+ candidate = bcandidate
158
+ logger.info(
159
+ f"{padding(depth)}{LOGGER_PREFIX} found select node with {datasource.identifier}, returning {candidate.output_lcl}"
160
+ )
161
+ candidates[datasource.identifier] = candidate
162
+ scores[datasource.identifier] = -len(partial_concepts)
166
163
  if not candidates:
167
164
  return None
168
165
  final = max(candidates, key=lambda x: scores[x])
@@ -227,6 +224,21 @@ def gen_select_node(
227
224
  f"{padding(depth)}{LOGGER_PREFIX} looking for multiple sources that can satisfy"
228
225
  )
229
226
  all_found = False
227
+ unreachable: list[str] = []
228
+ # first pass
229
+ for opt_con in local_optional:
230
+ ds = gen_select_node_from_table(
231
+ concept,
232
+ [concept, opt_con],
233
+ g=g,
234
+ environment=environment,
235
+ depth=depth + 1,
236
+ accept_partial=accept_partial,
237
+ target_grain=Grain(components=all_concepts),
238
+ )
239
+ if not ds:
240
+ unreachable.append(opt_con.address)
241
+ # actual search
230
242
  for x in reversed(range(1, len(local_optional) + 1)):
231
243
  if all_found:
232
244
  break
@@ -234,7 +246,9 @@ def gen_select_node(
234
246
  if all_found:
235
247
  break
236
248
  # filter to just the original ones we need to get
237
- local_combo = [x for x in combo if x not in found]
249
+ local_combo = [
250
+ x for x in combo if x not in found and x.address not in unreachable
251
+ ]
238
252
  # skip if nothing new in this combo
239
253
  if not local_combo:
240
254
  continue
@@ -115,7 +115,7 @@ def run(ctx, input, dialect: str, conn_args):
115
115
 
116
116
  print_tabulate(results, tabulate.tabulate)
117
117
  except ImportError:
118
- print('Install tabulate (pip install tabulate) for a prettier output')
118
+ print("Install tabulate (pip install tabulate) for a prettier output")
119
119
  print(", ".join(results.keys()))
120
120
  for row in results:
121
121
  print(row)
File without changes
File without changes
File without changes