pytrilogy 0.0.3.103__py3-none-any.whl → 0.0.3.104__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

Files changed (27) hide show
  1. {pytrilogy-0.0.3.103.dist-info → pytrilogy-0.0.3.104.dist-info}/METADATA +1 -1
  2. {pytrilogy-0.0.3.103.dist-info → pytrilogy-0.0.3.104.dist-info}/RECORD +27 -26
  3. trilogy/__init__.py +1 -1
  4. trilogy/constants.py +1 -1
  5. trilogy/core/models/execute.py +1 -6
  6. trilogy/core/optimizations/predicate_pushdown.py +9 -1
  7. trilogy/core/processing/concept_strategies_v3.py +35 -14
  8. trilogy/core/processing/discovery_node_factory.py +6 -1
  9. trilogy/core/processing/discovery_utility.py +163 -14
  10. trilogy/core/processing/node_generators/basic_node.py +1 -0
  11. trilogy/core/processing/node_generators/common.py +1 -0
  12. trilogy/core/processing/node_generators/filter_node.py +0 -10
  13. trilogy/core/processing/node_generators/group_node.py +36 -0
  14. trilogy/core/processing/node_generators/multiselect_node.py +1 -1
  15. trilogy/core/processing/node_generators/node_merge_node.py +2 -6
  16. trilogy/core/processing/node_generators/rowset_node.py +1 -1
  17. trilogy/core/processing/nodes/base_node.py +13 -2
  18. trilogy/core/processing/nodes/group_node.py +9 -91
  19. trilogy/core/processing/nodes/merge_node.py +9 -0
  20. trilogy/core/processing/utility.py +8 -0
  21. trilogy/dialect/base.py +3 -0
  22. trilogy/std/color.preql +3 -0
  23. trilogy/std/display.preql +3 -3
  24. {pytrilogy-0.0.3.103.dist-info → pytrilogy-0.0.3.104.dist-info}/WHEEL +0 -0
  25. {pytrilogy-0.0.3.103.dist-info → pytrilogy-0.0.3.104.dist-info}/entry_points.txt +0 -0
  26. {pytrilogy-0.0.3.103.dist-info → pytrilogy-0.0.3.104.dist-info}/licenses/LICENSE.md +0 -0
  27. {pytrilogy-0.0.3.103.dist-info → pytrilogy-0.0.3.104.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.103
3
+ Version: 0.0.3.104
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -1,6 +1,6 @@
1
- pytrilogy-0.0.3.103.dist-info/licenses/LICENSE.md,sha256=5ZRvtTyCCFwz1THxDTjAu3Lidds9WjPvvzgVwPSYNDo,1042
2
- trilogy/__init__.py,sha256=lBanLP2CsDaPdJvJ3K68ncf8sO3sMfgsapECGG5J4fk,304
3
- trilogy/constants.py,sha256=ohmro6so7PPNp2ruWQKVc0ijjXYPOyRrxB9LI8dr3TU,1746
1
+ pytrilogy-0.0.3.104.dist-info/licenses/LICENSE.md,sha256=5ZRvtTyCCFwz1THxDTjAu3Lidds9WjPvvzgVwPSYNDo,1042
2
+ trilogy/__init__.py,sha256=HyZF9WId40s9G3BjFS2OBMeHI7XNeE-YU1cFLvNOSWk,304
3
+ trilogy/constants.py,sha256=g_zkVCNjGop6coZ1kM8eXXAzCnUN22ldx3TYFz0E9sc,1747
4
4
  trilogy/engine.py,sha256=3MiADf5MKcmxqiHBuRqiYdsXiLj7oitDfVvXvHrfjkA,2178
5
5
  trilogy/executor.py,sha256=KgCAQhHPT-j0rPkBbALX0f84W9-Q-bkjHayGuavg99w,16490
6
6
  trilogy/parser.py,sha256=o4cfk3j3yhUFoiDKq9ZX_GjBF3dKhDjXEwb63rcBkBM,293
@@ -28,30 +28,30 @@ trilogy/core/models/build_environment.py,sha256=mpx7MKGc60fnZLVdeLi2YSREy7eQbQYy
28
28
  trilogy/core/models/core.py,sha256=iT9WdZoiXeglmUHWn6bZyXCTBpkApTGPKtNm_Mhbu_g,12987
29
29
  trilogy/core/models/datasource.py,sha256=wogTevZ-9CyUW2a8gjzqMCieircxi-J5lkI7EOAZnck,9596
30
30
  trilogy/core/models/environment.py,sha256=hwTIRnJgaHUdCYof7U5A9NPitGZ2s9yxqiW5O2SaJ9Y,28759
31
- trilogy/core/models/execute.py,sha256=lsNzNjS3nZvoW5CHjYwxDTwBe502NZyytpK1eq8CwW4,42357
31
+ trilogy/core/models/execute.py,sha256=pdL3voYB4dCQR_KMHwFaofP3ZpRbALRC2ELHueWyTko,42191
32
32
  trilogy/core/optimizations/__init__.py,sha256=yspWc25M5SgAuvXYoSt5J8atyPbDlOfsKjIo5yGD9s4,368
33
33
  trilogy/core/optimizations/base_optimization.py,sha256=gzDOKImoFn36k7XBD3ysEYDnbnb6vdVIztUfFQZsGnM,513
34
34
  trilogy/core/optimizations/hide_unused_concept.py,sha256=DbsP8NqQOxmPv9omDOoFNPUGObUkqsRRNrr5d1xDxx4,1962
35
35
  trilogy/core/optimizations/inline_datasource.py,sha256=2sWNRpoRInnTgo9wExVT_r9RfLAQHI57reEV5cGHUcg,4329
36
- trilogy/core/optimizations/predicate_pushdown.py,sha256=g4AYE8Aw_iMlAh68TjNXGP754NTurrDduFECkUjoBnc,9399
36
+ trilogy/core/optimizations/predicate_pushdown.py,sha256=5ubatgq1IwWQ4L2FDt4--y168YLuGP-vwqH0m8IeTIw,9786
37
37
  trilogy/core/processing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
- trilogy/core/processing/concept_strategies_v3.py,sha256=tvSN_aiqb1H7LkTl96vj7YK_DKcq_1nDdRJ69wZCLc8,22158
39
- trilogy/core/processing/discovery_node_factory.py,sha256=r1JAnVhnB9YHEB1TW3racNH9mJvXjKRPZjzZrXsuiqg,15348
40
- trilogy/core/processing/discovery_utility.py,sha256=Xntgug6VnEF96uw5Zwen1qMEUwKjqrm_ZDUr4i4tc1U,5595
38
+ trilogy/core/processing/concept_strategies_v3.py,sha256=AcMU1d5uCo8I1PFCkBtmcC6iFmM9vN6xSdKxSVMGfpA,23080
39
+ trilogy/core/processing/discovery_node_factory.py,sha256=p23jiiHyhrW-Q8ndbnRlqMHJKT8ZqPOA89SzE4xaFFo,15445
40
+ trilogy/core/processing/discovery_utility.py,sha256=wIuLsE6yuVykeYZdIqRSagivDNU3-ooiS7z6in4yqho,11518
41
41
  trilogy/core/processing/discovery_validation.py,sha256=eZ4HfHMpqZLI8MGG2jez8arS8THs6ceuVrQFIY6gXrU,5364
42
42
  trilogy/core/processing/graph_utils.py,sha256=8QUVrkE9j-9C1AyrCb1nQEh8daCe0u1HuXl-Te85lag,1205
43
- trilogy/core/processing/utility.py,sha256=PGQKZgX58kI3gG4nprY8HRGayc2D8fF5RmbvBhQj8ok,23319
43
+ trilogy/core/processing/utility.py,sha256=1_oNnk6lWiy-D7LKYr07kU_v7iAM4i6ITUAS4bIiCr4,23444
44
44
  trilogy/core/processing/node_generators/__init__.py,sha256=iVJ-crowPxYeut-hFjyEjfibKIDq7PfB4LEuDAUCjGY,943
45
- trilogy/core/processing/node_generators/basic_node.py,sha256=0Uhnf07056SBbRkt-wYLw4DZqsFR6jztGLUaE9ebPZs,5577
46
- trilogy/core/processing/node_generators/common.py,sha256=PdysdroW9DUADP7f5Wv_GKPUyCTROZV1g3L45fawxi8,9443
45
+ trilogy/core/processing/node_generators/basic_node.py,sha256=74LoVZXLinRvSzk2LmI1kwza96TnuH3ELoYRIbHB29A,5578
46
+ trilogy/core/processing/node_generators/common.py,sha256=xF32Kf6B08dZgKs2SOow1HomptSiSC057GCUCHFlS5s,9464
47
47
  trilogy/core/processing/node_generators/constant_node.py,sha256=LfpDq2WrBRZ3tGsLxw77LuigKfhbteWWh9L8BGdMGwk,1146
48
- trilogy/core/processing/node_generators/filter_node.py,sha256=ArBsQJl-4fWBJWCE28CRQ7UT7ErnFfbcseoQQZrBodY,11220
49
- trilogy/core/processing/node_generators/group_node.py,sha256=yqOWl5TCV4PrdJua4OJkPUIHkljaLoSW2Y8eRAmVddQ,6733
48
+ trilogy/core/processing/node_generators/filter_node.py,sha256=ndPznkcFu_cdCNgaRpgot8oqnzdHv4KAIfjeUIzrE2w,10816
49
+ trilogy/core/processing/node_generators/group_node.py,sha256=NdK1rl6Ze94XFWtgeC2dlRiL4pS3lh1ArKGPEltLtnw,8525
50
50
  trilogy/core/processing/node_generators/group_to_node.py,sha256=jKcNCDOY6fNblrdZwaRU0sbUSr9H0moQbAxrGgX6iGA,3832
51
- trilogy/core/processing/node_generators/multiselect_node.py,sha256=GWV5yLmKTe1yyPhN60RG1Rnrn4ktfn9lYYXi_FVU4UI,7061
52
- trilogy/core/processing/node_generators/node_merge_node.py,sha256=1joMV7XpQ9Gpe-d5y7JUMBHIqakV5wFJi3Mtvs4UcL4,23415
51
+ trilogy/core/processing/node_generators/multiselect_node.py,sha256=a505AEixjsjp5jI8Ng3H5KF_AaehkS6HfRfTef64l_o,7063
52
+ trilogy/core/processing/node_generators/node_merge_node.py,sha256=hNcZxnDLTZyYJWfojg769zH9HB9PfZfESmpN1lcHWXg,23172
53
53
  trilogy/core/processing/node_generators/recursive_node.py,sha256=l5zdh0dURKwmAy8kK4OpMtZfyUEQRk6N-PwSWIyBpSM,2468
54
- trilogy/core/processing/node_generators/rowset_node.py,sha256=T11Rqj-tsfubjFvBO0rzIVxtv9tOwwKXjGyut0r9xIY,5919
54
+ trilogy/core/processing/node_generators/rowset_node.py,sha256=MuVNIexXhqGONho_mewqMOwaYXNUnjjvyPvk_RDGNYE,5943
55
55
  trilogy/core/processing/node_generators/select_merge_node.py,sha256=KQvGoNT5ZBWQ_caEomRTtG1PKZC7OPT4PKfY0QmwMGE,22270
56
56
  trilogy/core/processing/node_generators/select_node.py,sha256=Ta1G39V94gjX_AgyZDz9OqnwLz4BjY3D6Drx9YpziMQ,3555
57
57
  trilogy/core/processing/node_generators/synonym_node.py,sha256=AnAsa_Wj50NJ_IK0HSgab_7klYmKVrv0WI1uUe-GvEY,3766
@@ -61,10 +61,10 @@ trilogy/core/processing/node_generators/window_node.py,sha256=A90linr4pkZtTNfn9k
61
61
  trilogy/core/processing/node_generators/select_helpers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
62
  trilogy/core/processing/node_generators/select_helpers/datasource_injection.py,sha256=m2YQ4OmG0N2O61a7NEq1ZzbTa7JsCC00lxB2ymjcYRI,8224
63
63
  trilogy/core/processing/nodes/__init__.py,sha256=zTge1EzwzEydlcMliIFO_TT7h7lS8l37lyZuQDir1h0,5487
64
- trilogy/core/processing/nodes/base_node.py,sha256=TQZLEz_xfXpdVyFa9R5BwvikH1OqzJUioOPw8vTETWc,18144
64
+ trilogy/core/processing/nodes/base_node.py,sha256=6LPQ5zP_dZJ6-k_dmX9ZSLsHaQMHgqiR5DEylpHYGZA,18478
65
65
  trilogy/core/processing/nodes/filter_node.py,sha256=5VtRfKbCORx0dV-vQfgy3gOEkmmscL9f31ExvlODwvY,2461
66
- trilogy/core/processing/nodes/group_node.py,sha256=njz-5T7OJ3-kaBC7EhdtPra3G77HnI7apjUwMGhUeXo,10569
67
- trilogy/core/processing/nodes/merge_node.py,sha256=daJywBxh44Gqk-7eTiXbYtY7xo6O6fNvqX-DagTOTmE,16231
66
+ trilogy/core/processing/nodes/group_node.py,sha256=sKsRP_BWEKg6z63T1X5ZlkJF2IMif0IEbVWTk-cdOH8,7100
67
+ trilogy/core/processing/nodes/merge_node.py,sha256=uc0tlz30Yt9SnCwLhMcWuPVbXLzm3dzy0XqbyirqqTo,16521
68
68
  trilogy/core/processing/nodes/recursive_node.py,sha256=k0rizxR8KE64ievfHx_GPfQmU8QAP118Laeyq5BLUOk,1526
69
69
  trilogy/core/processing/nodes/select_node_v2.py,sha256=IWyKyNgFlV8A2S1FUTPdIaogg6PzaHh-HmQo6v24sbg,8862
70
70
  trilogy/core/processing/nodes/union_node.py,sha256=hLAXXVWqEgMWi7dlgSHfCF59fon64av14-uPgJzoKzM,1870
@@ -82,7 +82,7 @@ trilogy/core/validation/datasource.py,sha256=nJeEFyb6iMBwlEVdYVy1vLzAbdRZwOsUjGx
82
82
  trilogy/core/validation/environment.py,sha256=ymvhQyt7jLK641JAAIQkqjQaAmr9C5022ILzYvDgPP0,2835
83
83
  trilogy/core/validation/fix.py,sha256=Z818UFNLxndMTLiyhB3doLxIfnOZ-16QGvVFWuD7UsA,3750
84
84
  trilogy/dialect/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
85
- trilogy/dialect/base.py,sha256=mgARj-aldkFAqdwps_25da03NLIAxU6Xg9Jq_VcOtp0,50181
85
+ trilogy/dialect/base.py,sha256=hFX0_3N-m3ZRTCyv1S650a8OPlx9qjp5Zh8wzTBx6E8,50338
86
86
  trilogy/dialect/bigquery.py,sha256=XS3hpybeowgfrOrkycAigAF3NX2YUzTzfgE6f__2fT4,4316
87
87
  trilogy/dialect/common.py,sha256=cUI7JMmpG_A5KcaxRI-GoyqwLMD6jTf0JJhgcOdwQK4,5833
88
88
  trilogy/dialect/config.py,sha256=olnyeVU5W5T6b9-dMeNAnvxuPlyc2uefb7FRME094Ec,3834
@@ -110,16 +110,17 @@ trilogy/parsing/trilogy.lark,sha256=6eBDD6d4D9N1Nnn4CtmaoB-NpOpjHrEn5oi0JykAlbE,
110
110
  trilogy/scripts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
111
111
  trilogy/scripts/trilogy.py,sha256=1L0XrH4mVHRt1C9T1HnaDv2_kYEfbWTb5_-cBBke79w,3774
112
112
  trilogy/std/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
113
+ trilogy/std/color.preql,sha256=sS9AXLDkECuDbNGnBMi2KnUuJukyVZVThKI9mP-ZOZI,50
113
114
  trilogy/std/date.preql,sha256=HWZm4t4HWyxr5geWRsY05RnHBVDMci8z8YA2cu0-OOw,188
114
- trilogy/std/display.preql,sha256=S20HW8qbShBc4OZPcHYiRlLdcaBp9dwruozWBoXKscs,293
115
+ trilogy/std/display.preql,sha256=ZJ08crsZnC3kaWwNUrMB1ZH5j6DUUbz8RaUgihA8sm4,299
115
116
  trilogy/std/geography.preql,sha256=1A9Sq5PPMBnEPPf7f-rPVYxJfsnWpQ8oV_k4Fm3H2dU,675
116
117
  trilogy/std/metric.preql,sha256=DRECGhkMyqfit5Fl4Ut9zbWrJuSMI1iO9HikuyoBpE0,421
117
118
  trilogy/std/money.preql,sha256=XWwvAV3WxBsHX9zfptoYRnBigcfYwrYtBHXTME0xJuQ,2082
118
119
  trilogy/std/net.preql,sha256=WZCuvH87_rZntZiuGJMmBDMVKkdhTtxeHOkrXNwJ1EE,416
119
120
  trilogy/std/ranking.preql,sha256=LDoZrYyz4g3xsII9XwXfmstZD-_92i1Eox1UqkBIfi8,83
120
121
  trilogy/std/report.preql,sha256=LbV-XlHdfw0jgnQ8pV7acG95xrd1-p65fVpiIc-S7W4,202
121
- pytrilogy-0.0.3.103.dist-info/METADATA,sha256=RnMfz8EH2sCtqHEDAraYhAb_V7oPbovtuI3PsL2F3Ms,11839
122
- pytrilogy-0.0.3.103.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
123
- pytrilogy-0.0.3.103.dist-info/entry_points.txt,sha256=ewBPU2vLnVexZVnB-NrVj-p3E-4vukg83Zk8A55Wp2w,56
124
- pytrilogy-0.0.3.103.dist-info/top_level.txt,sha256=cAy__NW_eMAa_yT9UnUNlZLFfxcg6eimUAZ184cdNiE,8
125
- pytrilogy-0.0.3.103.dist-info/RECORD,,
122
+ pytrilogy-0.0.3.104.dist-info/METADATA,sha256=IJmkrwnxe7gz3s89ZYVrDe6SkRY2cf6xNpmj5GTXkSE,11839
123
+ pytrilogy-0.0.3.104.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
124
+ pytrilogy-0.0.3.104.dist-info/entry_points.txt,sha256=ewBPU2vLnVexZVnB-NrVj-p3E-4vukg83Zk8A55Wp2w,56
125
+ pytrilogy-0.0.3.104.dist-info/top_level.txt,sha256=cAy__NW_eMAa_yT9UnUNlZLFfxcg6eimUAZ184cdNiE,8
126
+ pytrilogy-0.0.3.104.dist-info/RECORD,,
trilogy/__init__.py CHANGED
@@ -4,6 +4,6 @@ from trilogy.dialect.enums import Dialects
4
4
  from trilogy.executor import Executor
5
5
  from trilogy.parser import parse
6
6
 
7
- __version__ = "0.0.3.103"
7
+ __version__ = "0.0.3.104"
8
8
 
9
9
  __all__ = ["parse", "Executor", "Dialects", "Environment", "CONFIG"]
trilogy/constants.py CHANGED
@@ -37,7 +37,7 @@ class Comments:
37
37
 
38
38
  show: bool = False
39
39
  basic: bool = True
40
- joins: bool = True
40
+ joins: bool = False
41
41
  nullable: bool = True
42
42
  partial: bool = True
43
43
  source_map: bool = False
@@ -711,8 +711,6 @@ class QueryDatasource(BaseModel):
711
711
  f" {[c.address for c in self.output_concepts]} concepts and"
712
712
  f" {other.name} with {[c.address for c in other.output_concepts]} concepts"
713
713
  )
714
- logger.info(self.source_map)
715
- logger.info(other.source_map)
716
714
 
717
715
  merged_datasources: dict[str, Union[BuildDatasource, "QueryDatasource"]] = {}
718
716
 
@@ -816,10 +814,7 @@ class QueryDatasource(BaseModel):
816
814
  use_raw_name,
817
815
  force_alias=force_alias,
818
816
  )
819
- except ValueError as e:
820
- from trilogy.constants import logger
821
-
822
- logger.debug(e)
817
+ except ValueError:
823
818
  continue
824
819
  existing = [c.with_grain(self.grain) for c in self.output_concepts]
825
820
  if concept in existing:
@@ -1,5 +1,6 @@
1
1
  from trilogy.core.enums import (
2
2
  BooleanOperator,
3
+ SourceType,
3
4
  )
4
5
  from trilogy.core.models.build import (
5
6
  BuildComparison,
@@ -59,12 +60,19 @@ class PredicatePushdown(OptimizationRule):
59
60
  )
60
61
  return False
61
62
  materialized = {k for k, v in parent_cte.source_map.items() if v != []}
63
+
62
64
  if not row_conditions or not materialized:
63
65
  return False
64
66
  output_addresses = {x.address for x in parent_cte.output_columns}
65
67
  # if any of the existence conditions are created on the asset, we can't push up to it
66
68
  if existence_conditions and existence_conditions.intersection(output_addresses):
67
69
  return False
70
+ if existence_conditions:
71
+ self.log(
72
+ f"Not pushing up existence {candidate} to {parent_cte.name} as it is a filter node"
73
+ )
74
+ if parent_cte.source.source_type == SourceType.FILTER:
75
+ return False
68
76
  # if it's a root datasource, we can filter on _any_ of the output concepts
69
77
  if parent_cte.is_root_datasource:
70
78
  extra_check = {
@@ -81,7 +89,7 @@ class PredicatePushdown(OptimizationRule):
81
89
  children = inverse_map.get(parent_cte.name, [])
82
90
  if all([is_child_of(candidate, child.condition) for child in children]):
83
91
  self.log(
84
- f"All concepts are found on {parent_cte.name} with existing {parent_cte.condition} and all it's {len(children)} children include same filter; pushing up {candidate}"
92
+ f"All concepts [{row_conditions}] and existence conditions [{existence_conditions}] not block pushup of [{output_addresses}]found on {parent_cte.name} with existing {parent_cte.condition} and all it's {len(children)} children include same filter; pushing up {candidate}"
85
93
  )
86
94
  if parent_cte.condition and not is_scalar_condition(
87
95
  parent_cte.condition
@@ -19,7 +19,7 @@ from trilogy.core.processing.discovery_utility import (
19
19
  LOGGER_PREFIX,
20
20
  depth_to_prefix,
21
21
  get_priority_concept,
22
- group_if_required,
22
+ group_if_required_v2,
23
23
  )
24
24
  from trilogy.core.processing.discovery_validation import (
25
25
  ValidationResult,
@@ -66,7 +66,19 @@ def generate_candidates_restrictive(
66
66
 
67
67
  # if it's single row, joins are irrelevant. Fetch without keys.
68
68
  if priority_concept.granularity == Granularity.SINGLE_ROW:
69
- return [], conditions
69
+ logger.info("Have single row concept, including only other single row optional")
70
+ optional = (
71
+ [
72
+ x
73
+ for x in candidates
74
+ if x.granularity == Granularity.SINGLE_ROW
75
+ and x.address not in priority_concept.pseudonyms
76
+ and priority_concept.address not in x.pseudonyms
77
+ ]
78
+ if priority_concept.derivation == Derivation.AGGREGATE
79
+ else []
80
+ )
81
+ return optional, conditions
70
82
 
71
83
  if conditions and priority_concept.derivation in ROOT_DERIVATIONS:
72
84
  logger.info(
@@ -374,15 +386,21 @@ def generate_loop_completion(context: LoopContext, virtual: set[str]) -> Strateg
374
386
  logger.info(
375
387
  f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Found different non-virtual output concepts ({non_virtual_difference_values}), removing condition injected values by setting outputs to {[x.address for x in output.output_concepts if x.address in non_virtual_output]}"
376
388
  )
377
- output.set_output_concepts(
378
- [
379
- x
380
- for x in output.output_concepts
381
- if x.address in non_virtual_output
382
- or any(c in non_virtual_output for c in x.pseudonyms)
383
- ],
384
- rebuild=False,
385
- )
389
+ # output.set_output_concepts(
390
+ # [
391
+ # x
392
+ # for x in output.output_concepts
393
+ # if x.address not in non_virtual_difference_values
394
+ # or any(c in non_virtual_output for c in x.pseudonyms)
395
+ # ],
396
+ # rebuild=True,
397
+ # change_visibility=False
398
+ # )
399
+ # output.set_output_concepts(context.original_mandatory)
400
+
401
+ # if isinstance(output, MergeNode):
402
+ # output.force_group = True
403
+ # output.rebuild_cache()
386
404
 
387
405
  logger.info(
388
406
  f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Source stack has single node, returning that {type(output)}"
@@ -416,14 +434,17 @@ def generate_loop_completion(context: LoopContext, virtual: set[str]) -> Strateg
416
434
  logger.info(
417
435
  f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Graph is connected, returning {type(output)} node output {[x.address for x in output.usable_outputs]} partial {[c.address for c in output.partial_concepts or []]} with {context.conditions}"
418
436
  )
437
+ from trilogy.core.processing.discovery_utility import group_if_required_v2
438
+
419
439
  if condition_required and context.conditions and non_virtual_different:
420
440
  logger.info(
421
441
  f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Conditions {context.conditions} were injected, checking if we need a group to restore grain"
422
442
  )
423
- return group_if_required(
443
+ return group_if_required_v2(
424
444
  output, context.original_mandatory, context.environment
425
445
  )
426
- return output
446
+
447
+ return group_if_required_v2(output, context.original_mandatory, context.environment)
427
448
 
428
449
 
429
450
  def _search_concepts(
@@ -588,4 +609,4 @@ def source_query_concepts(
588
609
  logger.info(
589
610
  f"{depth_to_prefix(0)}{LOGGER_PREFIX} final concepts are {[x.address for x in final]}"
590
611
  )
591
- return group_if_required(root, output_concepts, environment)
612
+ return group_if_required_v2(root, output_concepts, environment)
@@ -177,7 +177,12 @@ def _generate_union_node(ctx: NodeGenerationContext) -> StrategyNode | None:
177
177
  def _generate_aggregate_node(ctx: NodeGenerationContext) -> StrategyNode | None:
178
178
  # Filter out constants to avoid multiplication issues
179
179
  agg_optional = [
180
- x for x in ctx.local_optional if x.granularity != Granularity.SINGLE_ROW
180
+ x
181
+ for x in ctx.local_optional
182
+ if not (
183
+ x.granularity == Granularity.SINGLE_ROW
184
+ and x.derivation != Derivation.AGGREGATE
185
+ )
181
186
  ]
182
187
 
183
188
  logger.info(
@@ -1,13 +1,18 @@
1
1
  from typing import List
2
2
 
3
3
  from trilogy.constants import logger
4
- from trilogy.core.enums import Derivation
4
+ from trilogy.core.enums import Derivation, Purpose
5
5
  from trilogy.core.models.build import (
6
6
  BuildConcept,
7
+ BuildDatasource,
8
+ BuildFilterItem,
9
+ BuildGrain,
7
10
  BuildRowsetItem,
8
11
  )
9
12
  from trilogy.core.models.build_environment import BuildEnvironment
13
+ from trilogy.core.models.execute import QueryDatasource, UnnestJoin
10
14
  from trilogy.core.processing.nodes import GroupNode, MergeNode, StrategyNode
15
+ from trilogy.core.processing.utility import GroupRequiredResponse
11
16
 
12
17
 
13
18
  def depth_to_prefix(depth: int) -> str:
@@ -17,31 +22,175 @@ def depth_to_prefix(depth: int) -> str:
17
22
  LOGGER_PREFIX = "[DISCOVERY LOOP]"
18
23
 
19
24
 
20
- def group_if_required(
25
+ def calculate_effective_parent_grain(
26
+ node: QueryDatasource | BuildDatasource,
27
+ ) -> BuildGrain:
28
+ # calculate the effective grain of the parent node
29
+ # this is the union of all parent grains
30
+ if isinstance(node, MergeNode):
31
+ grain = BuildGrain()
32
+ qds = node.resolve()
33
+ if not qds.joins:
34
+ return qds.datasources[0].grain
35
+ for join in qds.joins:
36
+ if isinstance(join, UnnestJoin):
37
+ continue
38
+ pairs = join.concept_pairs or []
39
+ for key in pairs:
40
+ left = key.existing_datasource
41
+ grain += left.grain
42
+ keys = [key.right for key in pairs]
43
+ join_grain = BuildGrain.from_concepts(keys)
44
+ if join_grain == join.right_datasource.grain:
45
+ logger.info(f"irrelevant right join {join}, does not change grain")
46
+ else:
47
+ logger.info(
48
+ f"join changes grain, adding {join.right_datasource.grain} to {grain}"
49
+ )
50
+ grain += join.right_datasource.grain
51
+ return grain
52
+ else:
53
+ return node.grain or BuildGrain()
54
+
55
+
56
+ def check_if_group_required(
57
+ downstream_concepts: List[BuildConcept],
58
+ parents: list[QueryDatasource | BuildDatasource],
59
+ environment: BuildEnvironment,
60
+ depth: int = 0,
61
+ ) -> GroupRequiredResponse:
62
+ padding = "\t" * depth
63
+ target_grain = BuildGrain.from_concepts(
64
+ downstream_concepts,
65
+ environment=environment,
66
+ )
67
+
68
+ comp_grain = BuildGrain()
69
+ for source in parents:
70
+ # comp_grain += source.grain
71
+ comp_grain += calculate_effective_parent_grain(source)
72
+
73
+ # dynamically select if we need to group
74
+ # we must avoid grouping if we are already at grain
75
+ if comp_grain.issubset(target_grain):
76
+
77
+ logger.info(
78
+ f"{padding}{LOGGER_PREFIX} Group requirement check: {comp_grain}, target: {target_grain}, grain is subset of target, no group node required"
79
+ )
80
+ return GroupRequiredResponse(target_grain, comp_grain, False)
81
+ # find out what extra is in the comp grain vs target grain
82
+ difference = [
83
+ environment.concepts[c] for c in (comp_grain - target_grain).components
84
+ ]
85
+ logger.info(
86
+ f"{padding}{LOGGER_PREFIX} Group requirement check: upstream grain: {comp_grain}, desired grain: {target_grain} from , difference {[x.address for x in difference]}"
87
+ )
88
+ for x in difference:
89
+ logger.info(
90
+ f"{padding}{LOGGER_PREFIX} Difference concept {x.address} purpose {x.purpose} keys {x.keys}"
91
+ )
92
+
93
+ # if the difference is all unique properties whose keys are in the source grain
94
+ # we can also suppress the group
95
+ if all(
96
+ [
97
+ x.keys
98
+ and all(
99
+ environment.concepts[z].address in comp_grain.components for z in x.keys
100
+ )
101
+ for x in difference
102
+ ]
103
+ ):
104
+ logger.info(
105
+ f"{padding}{LOGGER_PREFIX} Group requirement check: skipped due to unique property validation"
106
+ )
107
+ return GroupRequiredResponse(target_grain, comp_grain, False)
108
+ if all([x.purpose == Purpose.KEY for x in difference]):
109
+ logger.info(
110
+ f"{padding}{LOGGER_PREFIX} checking if downstream is unique properties of key"
111
+ )
112
+ replaced_grain_raw: list[set[str]] = [
113
+ (
114
+ x.keys or set()
115
+ if x.purpose == Purpose.UNIQUE_PROPERTY
116
+ else set([x.address])
117
+ )
118
+ for x in downstream_concepts
119
+ if x.address in target_grain.components
120
+ ]
121
+ # flatten the list of lists
122
+ replaced_grain = [item for sublist in replaced_grain_raw for item in sublist]
123
+ # if the replaced grain is a subset of the comp grain, we can skip the group
124
+ unique_grain_comp = BuildGrain.from_concepts(
125
+ replaced_grain, environment=environment
126
+ )
127
+ if comp_grain.issubset(unique_grain_comp):
128
+ logger.info(
129
+ f"{padding}{LOGGER_PREFIX} Group requirement check: skipped due to unique property validation"
130
+ )
131
+ return GroupRequiredResponse(target_grain, comp_grain, False)
132
+ logger.info(
133
+ f"{padding}{LOGGER_PREFIX} Checking for grain equivalence for filters and rowsets"
134
+ )
135
+ ngrain = []
136
+ for con in target_grain.components:
137
+ full = environment.concepts[con]
138
+ if full.derivation == Derivation.ROWSET:
139
+ ngrain.append(full.address.split(".", 1)[1])
140
+ elif full.derivation == Derivation.FILTER:
141
+ assert isinstance(full.lineage, BuildFilterItem)
142
+ if isinstance(full.lineage.content, BuildConcept):
143
+ ngrain.append(full.lineage.content.address)
144
+ else:
145
+ ngrain.append(full.address)
146
+ target_grain2 = BuildGrain.from_concepts(
147
+ ngrain,
148
+ environment=environment,
149
+ )
150
+ if comp_grain.issubset(target_grain2):
151
+ logger.info(
152
+ f"{padding}{LOGGER_PREFIX} Group requirement check: {comp_grain}, {target_grain2}, pre rowset grain is subset of target, no group node required"
153
+ )
154
+ return GroupRequiredResponse(target_grain2, comp_grain, False)
155
+
156
+ logger.info(f"{padding}{LOGGER_PREFIX} Group requirement check: group required")
157
+ return GroupRequiredResponse(target_grain, comp_grain, True)
158
+
159
+
160
+ def group_if_required_v2(
21
161
  root: StrategyNode, final: List[BuildConcept], environment: BuildEnvironment
22
162
  ):
23
- if isinstance(root, MergeNode) and root.force_group is True:
24
- return root
25
- elif isinstance(root, GroupNode):
26
- return root
27
- elif GroupNode.check_if_required(
28
- downstream_concepts=final,
29
- parents=[root.resolve()],
30
- environment=environment,
31
- ).required:
163
+ required = check_if_group_required(
164
+ downstream_concepts=final, parents=[root.resolve()], environment=environment
165
+ )
166
+ targets = [
167
+ x
168
+ for x in root.output_concepts
169
+ if x.address in final or any(c in final for c in x.pseudonyms)
170
+ ]
171
+ if required.required:
32
172
  if isinstance(root, MergeNode):
33
173
  root.force_group = True
34
- root.set_output_concepts(final, rebuild=False)
174
+ root.set_output_concepts(targets, rebuild=False, change_visibility=False)
35
175
  root.rebuild_cache()
36
176
  return root
177
+ elif isinstance(root, GroupNode):
178
+ # root.set_output_concepts(final, rebuild=False)
179
+ # root.rebuild_cache()
180
+ return root
37
181
  return GroupNode(
38
- output_concepts=final,
39
- input_concepts=final,
182
+ output_concepts=targets,
183
+ input_concepts=targets,
40
184
  environment=environment,
41
185
  parents=[root],
42
186
  partial_concepts=root.partial_concepts,
43
187
  preexisting_conditions=root.preexisting_conditions,
44
188
  )
189
+ elif isinstance(root, GroupNode):
190
+
191
+ return root
192
+ else:
193
+ root.set_output_concepts(targets, rebuild=False, change_visibility=False)
45
194
  return root
46
195
 
47
196
 
@@ -143,4 +143,5 @@ def gen_basic_node(
143
143
  logger.info(
144
144
  f"{depth_prefix}{LOGGER_PREFIX} Returning basic select for {concept}: input: {[x.address for x in parent_node.input_concepts]} output {[x.address for x in parent_node.output_concepts]} hidden {[x for x in parent_node.hidden_concepts]}"
145
145
  )
146
+
146
147
  return parent_node
@@ -222,6 +222,7 @@ def gen_enrichment_node(
222
222
  parents=[enrich_node, base_node],
223
223
  force_group=False,
224
224
  preexisting_conditions=conditions.conditional if conditions else None,
225
+ depth=depth,
225
226
  )
226
227
 
227
228
 
@@ -246,13 +246,6 @@ def gen_filter_node(
246
246
  filter_node = parent
247
247
  else:
248
248
  core_parent_nodes.append(row_parent)
249
- filters = [concept] + same_filter_optional
250
- parents_for_grain = [
251
- x.lineage.content
252
- for x in filters
253
- if isinstance(x.lineage, BuildFilterItem)
254
- and isinstance(x.lineage.content, BuildConcept)
255
- ]
256
249
  filter_node = FilterNode(
257
250
  input_concepts=unique(
258
251
  parent_row_concepts + flattened_existence,
@@ -261,9 +254,6 @@ def gen_filter_node(
261
254
  output_concepts=[concept] + same_filter_optional + parent_row_concepts,
262
255
  environment=environment,
263
256
  parents=core_parent_nodes,
264
- grain=BuildGrain.from_concepts(
265
- parents_for_grain + parent_row_concepts, environment=environment
266
- ),
267
257
  preexisting_conditions=conditions.conditional if conditions else None,
268
258
  )
269
259
 
@@ -108,6 +108,42 @@ def gen_group_node(
108
108
  logger.info(
109
109
  f"{padding(depth)}{LOGGER_PREFIX} cannot include optional agg {possible_agg.address}; it has mismatched parent grain {comp_grain } vs local parent {build_grain_parents}"
110
110
  )
111
+ elif concept.grain.abstract:
112
+ for possible_agg in local_optional:
113
+ if not isinstance(
114
+ possible_agg.lineage,
115
+ (BuildAggregateWrapper, BuildFunction),
116
+ ):
117
+
118
+ continue
119
+ logger.info(
120
+ f"{padding(depth)}{LOGGER_PREFIX} considering optional agg {possible_agg.address} for {concept.address}"
121
+ )
122
+ agg_parents = resolve_function_parent_concepts(
123
+ possible_agg,
124
+ environment=environment,
125
+ )
126
+ comp_grain = get_aggregate_grain(possible_agg, environment)
127
+ if not possible_agg.grain.abstract:
128
+ continue
129
+ if set([x.address for x in agg_parents]).issubset(
130
+ set([x.address for x in parent_concepts])
131
+ ):
132
+ output_concepts.append(possible_agg)
133
+ logger.info(
134
+ f"{padding(depth)}{LOGGER_PREFIX} found equivalent group by optional concept {possible_agg.address} for {concept.address}"
135
+ )
136
+ elif comp_grain == get_aggregate_grain(concept, environment):
137
+ extra = [x for x in agg_parents if x.address not in parent_concepts]
138
+ parent_concepts += extra
139
+ output_concepts.append(possible_agg)
140
+ logger.info(
141
+ f"{padding(depth)}{LOGGER_PREFIX} found equivalent group by optional concept {possible_agg.address} for {concept.address}"
142
+ )
143
+ else:
144
+ logger.info(
145
+ f"{padding(depth)}{LOGGER_PREFIX} cannot include optional agg {possible_agg.address}; it has mismatched parent grain {comp_grain } vs local parent {get_aggregate_grain(concept, environment)}"
146
+ )
111
147
  if parent_concepts:
112
148
  target_grain = BuildGrain.from_concepts(parent_concepts)
113
149
  logger.info(
@@ -156,7 +156,7 @@ def gen_multiselect_node(
156
156
  possible_joins = concept_to_relevant_joins(additional_relevant)
157
157
  if not local_optional:
158
158
  logger.info(
159
- f"{padding(depth)}{LOGGER_PREFIX} no enriched required for rowset node; exiting early"
159
+ f"{padding(depth)}{LOGGER_PREFIX} no enrichment required for rowset node; exiting early"
160
160
  )
161
161
  return node
162
162
  if not possible_joins:
@@ -20,9 +20,6 @@ from trilogy.core.models.build import (
20
20
  BuildWhereClause,
21
21
  )
22
22
  from trilogy.core.models.build_environment import BuildEnvironment
23
- from trilogy.core.processing.discovery_utility import (
24
- group_if_required,
25
- )
26
23
  from trilogy.core.processing.nodes import History, MergeNode, StrategyNode
27
24
  from trilogy.core.processing.utility import padding
28
25
  from trilogy.utility import unique
@@ -510,7 +507,7 @@ def subgraphs_to_merge_node(
510
507
  search_conditions: BuildWhereClause | None = None,
511
508
  enable_early_exit: bool = True,
512
509
  ):
513
- target_grain = BuildGrain.from_concepts(output_concepts, environment=environment)
510
+
514
511
  parents: List[StrategyNode] = []
515
512
  logger.info(
516
513
  f"{padding(depth)}{LOGGER_PREFIX} fetching subgraphs {[[c.address for c in subgraph] for subgraph in concept_subgraphs]}"
@@ -555,7 +552,7 @@ def subgraphs_to_merge_node(
555
552
  f"{padding(depth)}{LOGGER_PREFIX} only one parent node, exiting early w/ {[c.address for c in parents[0].output_concepts]}"
556
553
  )
557
554
  parent = parents[0]
558
- return group_if_required(parent, output_concepts, environment)
555
+ return parent
559
556
 
560
557
  rval = MergeNode(
561
558
  input_concepts=unique(input_c, "address"),
@@ -563,7 +560,6 @@ def subgraphs_to_merge_node(
563
560
  environment=environment,
564
561
  parents=parents,
565
562
  depth=depth,
566
- grain=target_grain,
567
563
  # hidden_concepts=[]
568
564
  # conditions=conditions,
569
565
  # conditions=search_conditions.conditional,
@@ -92,7 +92,7 @@ def gen_rowset_node(
92
92
 
93
93
  node.rebuild_cache()
94
94
  logger.info(
95
- f"{padding(depth)}{LOGGER_PREFIX} final output is {[x.address for x in node.output_concepts]}"
95
+ f"{padding(depth)}{LOGGER_PREFIX} final output is {[x.address for x in node.output_concepts]} with grain {node.grain}"
96
96
  )
97
97
  if not local_optional or all(
98
98
  (
@@ -285,12 +285,23 @@ class StrategyNode:
285
285
  self.rebuild_cache()
286
286
  return self
287
287
 
288
- def set_output_concepts(self, concepts: List[BuildConcept], rebuild: bool = True):
288
+ def set_visible_concepts(self, concepts: List[BuildConcept]):
289
+ for x in self.output_concepts:
290
+ if x.address not in [c.address for c in concepts]:
291
+ self.hidden_concepts.add(x.address)
292
+ return self
293
+
294
+ def set_output_concepts(
295
+ self,
296
+ concepts: List[BuildConcept],
297
+ rebuild: bool = True,
298
+ change_visibility: bool = True,
299
+ ):
289
300
  # exit if no changes
290
301
  if self.output_concepts == concepts:
291
302
  return self
292
303
  self.output_concepts = concepts
293
- if self.hidden_concepts:
304
+ if self.hidden_concepts and change_visibility:
294
305
  self.hidden_concepts = set(
295
306
  x for x in self.hidden_concepts if x not in concepts
296
307
  )
@@ -1,14 +1,12 @@
1
- from dataclasses import dataclass
2
1
  from typing import List, Optional
3
2
 
4
3
  from trilogy.constants import logger
5
- from trilogy.core.enums import Purpose, SourceType
4
+ from trilogy.core.enums import SourceType
6
5
  from trilogy.core.models.build import (
7
6
  BuildComparison,
8
7
  BuildConcept,
9
8
  BuildConditional,
10
9
  BuildDatasource,
11
- BuildGrain,
12
10
  BuildOrderBy,
13
11
  BuildParenthetical,
14
12
  )
@@ -18,19 +16,16 @@ from trilogy.core.processing.nodes.base_node import (
18
16
  StrategyNode,
19
17
  resolve_concept_map,
20
18
  )
21
- from trilogy.core.processing.utility import find_nullable_concepts, is_scalar_condition
19
+ from trilogy.core.processing.utility import (
20
+ GroupRequiredResponse,
21
+ find_nullable_concepts,
22
+ is_scalar_condition,
23
+ )
22
24
  from trilogy.utility import unique
23
25
 
24
26
  LOGGER_PREFIX = "[CONCEPT DETAIL - GROUP NODE]"
25
27
 
26
28
 
27
- @dataclass
28
- class GroupRequiredResponse:
29
- target: BuildGrain
30
- upstream: BuildGrain
31
- required: bool
32
-
33
-
34
29
  class GroupNode(StrategyNode):
35
30
  source_type = SourceType.GROUP
36
31
 
@@ -80,92 +75,15 @@ class GroupNode(StrategyNode):
80
75
  environment: BuildEnvironment,
81
76
  depth: int = 0,
82
77
  ) -> GroupRequiredResponse:
83
- padding = "\t" * depth
84
- target_grain = BuildGrain.from_concepts(
85
- downstream_concepts,
86
- environment=environment,
87
- )
88
-
89
- # the concepts of the souce grain might not exist in the output environment
90
- # so we need to construct a new
91
- concept_map: dict[str, BuildConcept] = {}
92
- comp_grain = BuildGrain()
93
- for source in parents:
94
- comp_grain += source.grain
95
- for x in source.output_concepts:
96
- concept_map[x.address] = x
97
- lookups: list[BuildConcept | str] = [
98
- concept_map[x] if x in concept_map else x for x in comp_grain.components
99
- ]
100
-
101
- comp_grain = BuildGrain.from_concepts(lookups, environment=environment)
102
-
103
- # dynamically select if we need to group
104
- # because sometimes, we are already at required grain
105
- if comp_grain.issubset(target_grain):
106
-
107
- logger.info(
108
- f"{padding}{LOGGER_PREFIX} Group requirement check: {comp_grain}, {target_grain}, grain is subset of target, no group node required"
109
- )
110
- return GroupRequiredResponse(target_grain, comp_grain, False)
111
- # find out what extra is in the comp grain vs target grain
112
- difference = [
113
- environment.concepts[c] for c in (comp_grain - target_grain).components
114
- ]
115
- logger.info(
116
- f"{padding}{LOGGER_PREFIX} Group requirement check: {comp_grain}, {target_grain}, difference {[x.address for x in difference]}"
117
- )
78
+ from trilogy.core.processing.discovery_utility import check_if_group_required
118
79
 
119
- # if the difference is all unique properties whose keys are in the source grain
120
- # we can also suppress the group
121
- if all(
122
- [
123
- x.keys
124
- and all(
125
- environment.concepts[z].address in comp_grain.components
126
- for z in x.keys
127
- )
128
- for x in difference
129
- ]
130
- ):
131
- logger.info(
132
- f"{padding}{LOGGER_PREFIX} Group requirement check: skipped due to unique property validation"
133
- )
134
- return GroupRequiredResponse(target_grain, comp_grain, False)
135
- if all([x.purpose == Purpose.KEY for x in difference]):
136
- logger.info(
137
- f"{padding}{LOGGER_PREFIX} checking if downstream is unique properties of key"
138
- )
139
- replaced_grain_raw: list[set[str]] = [
140
- (
141
- x.keys or set()
142
- if x.purpose == Purpose.UNIQUE_PROPERTY
143
- else set([x.address])
144
- )
145
- for x in downstream_concepts
146
- if x.address in target_grain.components
147
- ]
148
- # flatten the list of lists
149
- replaced_grain = [
150
- item for sublist in replaced_grain_raw for item in sublist
151
- ]
152
- # if the replaced grain is a subset of the comp grain, we can skip the group
153
- unique_grain_comp = BuildGrain.from_concepts(
154
- replaced_grain, environment=environment
155
- )
156
- if comp_grain.issubset(unique_grain_comp):
157
- logger.info(
158
- f"{padding}{LOGGER_PREFIX} Group requirement check: skipped due to unique property validation"
159
- )
160
- return GroupRequiredResponse(target_grain, comp_grain, False)
161
-
162
- logger.info(f"{padding}{LOGGER_PREFIX} Group requirement check: group required")
163
- return GroupRequiredResponse(target_grain, comp_grain, True)
80
+ return check_if_group_required(downstream_concepts, parents, environment, depth)
164
81
 
165
82
  def _resolve(self) -> QueryDatasource:
166
83
  parent_sources: List[QueryDatasource | BuildDatasource] = [
167
84
  p.resolve() for p in self.parents
168
85
  ]
86
+
169
87
  grains = self.check_if_required(
170
88
  self.output_concepts, parent_sources, self.environment, self.depth
171
89
  )
@@ -341,6 +341,7 @@ class MergeNode(StrategyNode):
341
341
  full_join_concepts += join.input_concepts
342
342
 
343
343
  if self.force_group is True:
344
+
344
345
  force_group = True
345
346
  elif self.whole_grain:
346
347
  force_group = False
@@ -367,6 +368,14 @@ class MergeNode(StrategyNode):
367
368
  nullable_concepts = find_nullable_concepts(
368
369
  source_map=source_map, joins=joins, datasources=final_datasets
369
370
  )
371
+ if force_group:
372
+
373
+ grain = BuildGrain.from_concepts(
374
+ self.output_concepts, environment=self.environment
375
+ )
376
+ logger.info(
377
+ f"{self.logging_prefix}{LOGGER_PREFIX} forcing group by to achieve grain {grain}"
378
+ )
370
379
  qds = QueryDatasource(
371
380
  input_concepts=unique(self.input_concepts, "address"),
372
381
  output_concepts=unique(self.output_concepts, "address"),
@@ -27,6 +27,7 @@ from trilogy.core.models.build import (
27
27
  BuildDatasource,
28
28
  BuildFilterItem,
29
29
  BuildFunction,
30
+ BuildGrain,
30
31
  BuildParenthetical,
31
32
  BuildSubselectComparison,
32
33
  BuildWindowItem,
@@ -82,6 +83,13 @@ class JoinOrderOutput:
82
83
  return set(self.keys.keys())
83
84
 
84
85
 
86
+ @dataclass
87
+ class GroupRequiredResponse:
88
+ target: BuildGrain
89
+ upstream: BuildGrain
90
+ required: bool
91
+
92
+
85
93
  def resolve_join_order_v2(
86
94
  g: nx.Graph, partials: dict[str, list[str]], nullables: dict[str, list[str]]
87
95
  ) -> list[JoinOrderOutput]:
trilogy/dialect/base.py CHANGED
@@ -786,6 +786,8 @@ class BaseDialect:
786
786
  return str(e.value)
787
787
  elif isinstance(e, ArrayType):
788
788
  return f"{self.COMPLEX_DATATYPE_MAP[DataType.ARRAY](self.render_expr(e.value_data_type, cte=cte, cte_map=cte_map))}"
789
+ elif isinstance(e, list):
790
+ return f"{self.FUNCTION_MAP[FunctionType.ARRAY]([self.render_expr(x, cte=cte, cte_map=cte_map) for x in e])}"
789
791
  elif isinstance(e, BuildParamaterizedConceptReference):
790
792
  if self.rendering.parameters:
791
793
  if e.concept.namespace == DEFAULT_NAMESPACE:
@@ -794,6 +796,7 @@ class BaseDialect:
794
796
  elif e.concept.lineage:
795
797
  return self.render_expr(e.concept.lineage, cte=cte, cte_map=cte_map)
796
798
  return f"{self.QUOTE_CHARACTER}{e.concept.address}{self.QUOTE_CHARACTER}"
799
+
797
800
  else:
798
801
  raise ValueError(f"Unable to render type {type(e)} {e}")
799
802
 
@@ -0,0 +1,3 @@
1
+
2
+
3
+ type hex string; # Hex color value, e.g. #FF5733
trilogy/std/display.preql CHANGED
@@ -3,7 +3,7 @@
3
3
  type percent float; # Percentage value
4
4
 
5
5
  def calc_percent(a, b, digits=-1) -> case when digits =-1 then
6
- case when b = 0 then 0.0::float::percent else
7
- (a/b)::float::percent end
6
+ case when b = 0 then 0.0::numeric::percent else
7
+ (a/b)::numeric::percent end
8
8
  else round((case when b = 0 then 0.0::float::percent else
9
- (a/b)::float::percent end):: float::percent, digits) end;
9
+ (a/b)::float::percent end):: numeric::percent, digits) end;