pytrilogy 0.0.3.65__py3-none-any.whl → 0.0.3.67__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

Files changed (27) hide show
  1. {pytrilogy-0.0.3.65.dist-info → pytrilogy-0.0.3.67.dist-info}/METADATA +1 -1
  2. {pytrilogy-0.0.3.65.dist-info → pytrilogy-0.0.3.67.dist-info}/RECORD +27 -27
  3. trilogy/__init__.py +1 -1
  4. trilogy/core/exceptions.py +4 -4
  5. trilogy/core/models/author.py +17 -4
  6. trilogy/core/models/build.py +42 -4
  7. trilogy/core/models/environment.py +5 -6
  8. trilogy/core/processing/concept_strategies_v3.py +15 -6
  9. trilogy/core/processing/discovery_node_factory.py +4 -1
  10. trilogy/core/processing/discovery_utility.py +1 -9
  11. trilogy/core/processing/discovery_validation.py +17 -4
  12. trilogy/core/processing/node_generators/filter_node.py +25 -5
  13. trilogy/core/processing/node_generators/group_node.py +1 -1
  14. trilogy/core/processing/node_generators/node_merge_node.py +46 -31
  15. trilogy/core/processing/node_generators/select_merge_node.py +26 -24
  16. trilogy/core/processing/node_generators/select_node.py +38 -0
  17. trilogy/core/processing/node_generators/synonym_node.py +11 -7
  18. trilogy/core/processing/nodes/base_node.py +12 -3
  19. trilogy/core/processing/nodes/group_node.py +1 -1
  20. trilogy/core/query_processor.py +4 -0
  21. trilogy/dialect/base.py +1 -1
  22. trilogy/parsing/common.py +28 -3
  23. trilogy/std/display.preql +4 -1
  24. {pytrilogy-0.0.3.65.dist-info → pytrilogy-0.0.3.67.dist-info}/WHEEL +0 -0
  25. {pytrilogy-0.0.3.65.dist-info → pytrilogy-0.0.3.67.dist-info}/entry_points.txt +0 -0
  26. {pytrilogy-0.0.3.65.dist-info → pytrilogy-0.0.3.67.dist-info}/licenses/LICENSE.md +0 -0
  27. {pytrilogy-0.0.3.65.dist-info → pytrilogy-0.0.3.67.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.65
3
+ Version: 0.0.3.67
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -1,5 +1,5 @@
1
- pytrilogy-0.0.3.65.dist-info/licenses/LICENSE.md,sha256=5ZRvtTyCCFwz1THxDTjAu3Lidds9WjPvvzgVwPSYNDo,1042
2
- trilogy/__init__.py,sha256=5PSTzzhWYk76xlzO_XJek4S4HFPOp41QHzTT_y75mqk,303
1
+ pytrilogy-0.0.3.67.dist-info/licenses/LICENSE.md,sha256=5ZRvtTyCCFwz1THxDTjAu3Lidds9WjPvvzgVwPSYNDo,1042
2
+ trilogy/__init__.py,sha256=kIt0gE_lZ8Zmqhi-fcE9krdwlETcz0sP0kiCjnsBQog,303
3
3
  trilogy/compiler.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  trilogy/constants.py,sha256=lv_aJWP6dn6e2aF4BAE72jbnNtceFddfqtiDSsvzno0,1692
5
5
  trilogy/engine.py,sha256=OK2RuqCIUId6yZ5hfF8J1nxGP0AJqHRZiafcowmW0xc,1728
@@ -15,55 +15,55 @@ trilogy/core/enums.py,sha256=2sgoVzHQpscIx_WUqJMH0oGxtcg41gN0l1qkRjzK2iI,7803
15
15
  trilogy/core/env_processor.py,sha256=pFsxnluKIusGKx1z7tTnfsd_xZcPy9pZDungkjkyvI0,3170
16
16
  trilogy/core/environment_helpers.py,sha256=VvPIiFemqaLLpIpLIqprfu63K7muZ1YzNg7UZIUph8w,8267
17
17
  trilogy/core/ergonomics.py,sha256=e-7gE29vPLFdg0_A1smQ7eOrUwKl5VYdxRSTddHweRA,1631
18
- trilogy/core/exceptions.py,sha256=JPYyBcit3T_pRtlHdtKSeVJkIyWUTozW2aaut25A2xI,673
18
+ trilogy/core/exceptions.py,sha256=jYEduuMehcMkmCpf-OC_taELPZm7qNfeSNzIWkDYScs,707
19
19
  trilogy/core/functions.py,sha256=poVfAwet1xdxTkC7WL38UmGRDpUVO9iSMNWSagl9_r4,29302
20
20
  trilogy/core/graph_models.py,sha256=wIT-oBchHWE46GLDkgN5K7EzhOBEo8LfaeWV5G5cYcE,3302
21
21
  trilogy/core/internal.py,sha256=iicDBlC6nM8d7e7jqzf_ZOmpUsW8yrr2AA8AqEiLx-s,1577
22
22
  trilogy/core/optimization.py,sha256=ojpn-p79lr03SSVQbbw74iPCyoYpDYBmj1dbZ3oXCjI,8860
23
- trilogy/core/query_processor.py,sha256=QiE_w5HgheT4GLZFnaLssJ4plf4voK0TeTd6N3jhR6A,20188
23
+ trilogy/core/query_processor.py,sha256=jA1lek3Kbpi_-NamDPjJuJanUdY7KM3ODB7tS_qqxH4,20311
24
24
  trilogy/core/utility.py,sha256=3VC13uSQWcZNghgt7Ot0ZTeEmNqs__cx122abVq9qhM,410
25
25
  trilogy/core/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
- trilogy/core/models/author.py,sha256=8XbIsQr6cQrgo9uzee5qRoYiMdEG7yKF4FiiWImW7U0,77490
27
- trilogy/core/models/build.py,sha256=80v9rxwl41O8_7uIJoHK4tnTUfgR6u8EPrwHg4ySqO4,63323
26
+ trilogy/core/models/author.py,sha256=44VyEInWied287lJp99T14Ahq8RSQK4q3Y-AzAByMTY,77818
27
+ trilogy/core/models/build.py,sha256=eNjbFwg_mEcwPeagQpkIybIluIrRCKJLAHdpZjm01e8,64857
28
28
  trilogy/core/models/build_environment.py,sha256=s_C9xAHuD3yZ26T15pWVBvoqvlp2LdZ8yjsv2_HdXLk,5363
29
29
  trilogy/core/models/core.py,sha256=EMAuWTngoNVGCdfNrAY7_k6g528iodNQLwPRVip-8DA,10980
30
30
  trilogy/core/models/datasource.py,sha256=6RjJUd2u4nYmEwFBpJlM9LbHVYDv8iHJxqiBMZqUrwI,9422
31
- trilogy/core/models/environment.py,sha256=7bkxUob5pNgvK7Om-qvlJgsDiCh5iSPlMHI7tN_OZhU,27717
31
+ trilogy/core/models/environment.py,sha256=TBbPfsXHpJK49QKuqHwhgZD4PwHiSAYjXmTTTomRE7o,27861
32
32
  trilogy/core/models/execute.py,sha256=94CZVY_EdW675n8SihDnhaGTV0dq7BAAsl2Anf1mPmk,41815
33
33
  trilogy/core/optimizations/__init__.py,sha256=YH2-mGXZnVDnBcWVi8vTbrdw7Qs5TivG4h38rH3js_I,290
34
34
  trilogy/core/optimizations/base_optimization.py,sha256=gzDOKImoFn36k7XBD3ysEYDnbnb6vdVIztUfFQZsGnM,513
35
35
  trilogy/core/optimizations/inline_datasource.py,sha256=2sWNRpoRInnTgo9wExVT_r9RfLAQHI57reEV5cGHUcg,4329
36
36
  trilogy/core/optimizations/predicate_pushdown.py,sha256=g4AYE8Aw_iMlAh68TjNXGP754NTurrDduFECkUjoBnc,9399
37
37
  trilogy/core/processing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
- trilogy/core/processing/concept_strategies_v3.py,sha256=uD_Bzy2l30TJ5-6v0tn9dO-vI6zFRgtHsIHUvge3Sps,22536
38
+ trilogy/core/processing/concept_strategies_v3.py,sha256=zy5VZa9LITOws6aIILfv_bSR2-jR1Ndldy-nmwMyQ5w,23144
39
39
  trilogy/core/processing/discovery_loop.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
- trilogy/core/processing/discovery_node_factory.py,sha256=X3-ywUvGbbcwwWtMqKXsPc6xyh3o41eiLRuByXGCvY4,14915
41
- trilogy/core/processing/discovery_utility.py,sha256=hF3aUbRHHZFeFT5aBjE6TuSeU60I90gzmj512QXG_t8,4856
42
- trilogy/core/processing/discovery_validation.py,sha256=Ek9jviFgimLMUMYLXBChUQmOD94ihhwQ3NDVe6RTdWg,4930
40
+ trilogy/core/processing/discovery_node_factory.py,sha256=73fViHQfKsrFbuk8zfx8YIiaT66pF5AFvw4suV7bXbM,14985
41
+ trilogy/core/processing/discovery_utility.py,sha256=3xdd1ypKappSDm0SJs7WtW5YegL80SlYhDQlkNePp4E,4549
42
+ trilogy/core/processing/discovery_validation.py,sha256=fGWJmKpgEd1f4RkK-fYOBUT1cwsJnahwXFAdRlou7MI,5365
43
43
  trilogy/core/processing/graph_utils.py,sha256=8QUVrkE9j-9C1AyrCb1nQEh8daCe0u1HuXl-Te85lag,1205
44
44
  trilogy/core/processing/utility.py,sha256=mrfR9pgek-xjxoDQSlvPqOW9dpmREjgzqn4AGoqpGeM,22774
45
45
  trilogy/core/processing/node_generators/__init__.py,sha256=w8TQQgNhyAra6JQHdg1_Ags4BGyxjXYruu6UeC5yOkI,873
46
46
  trilogy/core/processing/node_generators/basic_node.py,sha256=luN8LftafZepoFgDRv4gmvEGFlOI2j0icJ5fz4UT7uo,5165
47
47
  trilogy/core/processing/node_generators/common.py,sha256=PdysdroW9DUADP7f5Wv_GKPUyCTROZV1g3L45fawxi8,9443
48
- trilogy/core/processing/node_generators/filter_node.py,sha256=0hdfiS2I-Jvr6P-il3jnAJK-g-DMG7_cFbZGCnLnJAo,10032
49
- trilogy/core/processing/node_generators/group_node.py,sha256=nIfiMrJQEksUfqAeeA3X5PS1343y4lmPTipYuCa-rvs,6141
48
+ trilogy/core/processing/node_generators/filter_node.py,sha256=oRRq2-T3ufgn4D23uQsc58f20eFk-djs4QI3WKA75K8,10908
49
+ trilogy/core/processing/node_generators/group_node.py,sha256=S7mzEAVmjc7rnHrJ9PAp4Ld8njcBSrvAu3K1cxsLQdY,6140
50
50
  trilogy/core/processing/node_generators/group_to_node.py,sha256=jKcNCDOY6fNblrdZwaRU0sbUSr9H0moQbAxrGgX6iGA,3832
51
51
  trilogy/core/processing/node_generators/multiselect_node.py,sha256=GWV5yLmKTe1yyPhN60RG1Rnrn4ktfn9lYYXi_FVU4UI,7061
52
- trilogy/core/processing/node_generators/node_merge_node.py,sha256=-sVhRHB3NqNIOk_e7LLVhW17Rjcf82hxaHjEvpF5Q-w,16858
52
+ trilogy/core/processing/node_generators/node_merge_node.py,sha256=dSqfqWp2SolhDB16nkPaaTXgNQo4QquEufPdf7q0Tb4,17398
53
53
  trilogy/core/processing/node_generators/recursive_node.py,sha256=l5zdh0dURKwmAy8kK4OpMtZfyUEQRk6N-PwSWIyBpSM,2468
54
54
  trilogy/core/processing/node_generators/rowset_node.py,sha256=2BiSsegbRF9csJ_Xl8P_CxIm4dAAb7dF29u6v_Odr-A,6709
55
- trilogy/core/processing/node_generators/select_merge_node.py,sha256=3GDGi1tNIfuKO_FMrNCfp-G1c3lxdRuuufcmomYLt4s,21446
56
- trilogy/core/processing/node_generators/select_node.py,sha256=3dvw0d53eUtCRCUPN6J48I3qBEX1Wha7saQ_ndPu6_I,1777
57
- trilogy/core/processing/node_generators/synonym_node.py,sha256=CN2swdGPEP_Irx4GykHp4gyLCK0dWd2vX7PYJUGxw7w,3548
55
+ trilogy/core/processing/node_generators/select_merge_node.py,sha256=fisCkMyzNDEdDIi9BMuekpIJT5lBF4h0z_pEwR14x9s,21438
56
+ trilogy/core/processing/node_generators/select_node.py,sha256=Ta1G39V94gjX_AgyZDz9OqnwLz4BjY3D6Drx9YpziMQ,3555
57
+ trilogy/core/processing/node_generators/synonym_node.py,sha256=F9DWaKEmJDYnwAmQduTuQP2LCCHqAMDA3oDERRDN2pU,3773
58
58
  trilogy/core/processing/node_generators/union_node.py,sha256=VNo6Oey4p8etU9xrOh2oTT2lIOTvY6PULUPRvVa2uxU,2877
59
59
  trilogy/core/processing/node_generators/unnest_node.py,sha256=ueOQtoTf2iJHO09RzWHDFQ5iKZq2fVhGf2KAF2U2kU8,2677
60
60
  trilogy/core/processing/node_generators/window_node.py,sha256=GP3Hvkbb0TDA6ef7W7bmvQEHVH-NRIfBT_0W4fcH3g4,6529
61
61
  trilogy/core/processing/node_generators/select_helpers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
62
  trilogy/core/processing/node_generators/select_helpers/datasource_injection.py,sha256=Dw0pjdhuJX0p-18CjelLrIJoevAPKzZOTN9uSLNPOF4,6553
63
63
  trilogy/core/processing/nodes/__init__.py,sha256=zTge1EzwzEydlcMliIFO_TT7h7lS8l37lyZuQDir1h0,5487
64
- trilogy/core/processing/nodes/base_node.py,sha256=p6yljFNLQsXz277c5wTATMNqsKUbsdP_3e7--tezBMw,17691
64
+ trilogy/core/processing/nodes/base_node.py,sha256=HcltmrYO5RUkmM7qryNQkcahgY0r8lfmIqDbN6Kx9zE,18043
65
65
  trilogy/core/processing/nodes/filter_node.py,sha256=5VtRfKbCORx0dV-vQfgy3gOEkmmscL9f31ExvlODwvY,2461
66
- trilogy/core/processing/nodes/group_node.py,sha256=4EbOur1wSsOpPvP6znHih126o6A-TWbBXyvhiw5B0rs,10505
66
+ trilogy/core/processing/nodes/group_node.py,sha256=ZJ9LRF1sDOyel5v0MHXHORn6DYdn771nX0-KdHdt3-4,10517
67
67
  trilogy/core/processing/nodes/merge_node.py,sha256=02oWRca0ba41U6PSAB14jwnWWxoyrvxRPLwkli259SY,15865
68
68
  trilogy/core/processing/nodes/recursive_node.py,sha256=k0rizxR8KE64ievfHx_GPfQmU8QAP118Laeyq5BLUOk,1526
69
69
  trilogy/core/processing/nodes/select_node_v2.py,sha256=Xyfq8lU7rP7JTAd8VV0ATDNal64n4xIBgWQsOuMe_Ak,8824
@@ -76,7 +76,7 @@ trilogy/core/statements/build.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
76
76
  trilogy/core/statements/common.py,sha256=KxEmz2ySySyZ6CTPzn0fJl5NX2KOk1RPyuUSwWhnK1g,759
77
77
  trilogy/core/statements/execute.py,sha256=rqfuoMuXPcH7L7TmE1dSiZ_K_A1ohB8whVMfGimZBOk,1294
78
78
  trilogy/dialect/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
79
- trilogy/dialect/base.py,sha256=fM5tPJA5yi6tTHUTWpHldh4RApJUmrosDorbrRQibe4,43613
79
+ trilogy/dialect/base.py,sha256=_EkBijBaJGF_d0SBNBCf_mbmDD7N9HrnO2XbRWM8ypg,43629
80
80
  trilogy/dialect/bigquery.py,sha256=6ghCqy-k7UioIJc1EEQ7gRo_PHaO8Vm7yYbiQ-kgpzs,3629
81
81
  trilogy/dialect/common.py,sha256=hhzuMTFW9QQIP7TKLT9BlJy6lw2R03a68jKQ-7t4-2c,6070
82
82
  trilogy/dialect/config.py,sha256=olnyeVU5W5T6b9-dMeNAnvxuPlyc2uefb7FRME094Ec,3834
@@ -93,7 +93,7 @@ trilogy/hooks/graph_hook.py,sha256=5BfR7Dt0bgEsCLgwjowgCsVkboGYfVJGOz8g9mqpnos,4
93
93
  trilogy/hooks/query_debugger.py,sha256=1npRjww94sPV5RRBBlLqMJRaFkH9vhEY6o828MeoEcw,5583
94
94
  trilogy/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
95
95
  trilogy/parsing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
96
- trilogy/parsing/common.py,sha256=yuKN3fQEtftRMZlJb0ESUX4TLOVFcAE0vw2CfImYG1A,29980
96
+ trilogy/parsing/common.py,sha256=_-KWPP3NvNAHiB4B6jyGSnYKqlLs2XqXeTZiX4MCtWU,30861
97
97
  trilogy/parsing/config.py,sha256=Z-DaefdKhPDmSXLgg5V4pebhSB0h590vI0_VtHnlukI,111
98
98
  trilogy/parsing/exceptions.py,sha256=Xwwsv2C9kSNv2q-HrrKC1f60JNHShXcCMzstTSEbiCw,154
99
99
  trilogy/parsing/helpers.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
@@ -104,14 +104,14 @@ trilogy/scripts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
104
104
  trilogy/scripts/trilogy.py,sha256=1L0XrH4mVHRt1C9T1HnaDv2_kYEfbWTb5_-cBBke79w,3774
105
105
  trilogy/std/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
106
106
  trilogy/std/date.preql,sha256=HWZm4t4HWyxr5geWRsY05RnHBVDMci8z8YA2cu0-OOw,188
107
- trilogy/std/display.preql,sha256=2BbhvqR4rcltyAbOXAUo7SZ_yGFYZgFnurglHMbjW2g,40
107
+ trilogy/std/display.preql,sha256=nm7lox87Xf6lBvXCVCS6x2HskguMKzndEBucJ5pktzk,175
108
108
  trilogy/std/geography.preql,sha256=qLnHmDU5EnvjTbfqZF-NEclSYM5_e9rZra7QjV01rZ4,582
109
109
  trilogy/std/money.preql,sha256=XWwvAV3WxBsHX9zfptoYRnBigcfYwrYtBHXTME0xJuQ,2082
110
110
  trilogy/std/net.preql,sha256=-bMV6dyofskl4Kvows-iQ4JCxjVUwsZOeWCy8JO5Ftw,135
111
111
  trilogy/std/ranking.preql,sha256=LDoZrYyz4g3xsII9XwXfmstZD-_92i1Eox1UqkBIfi8,83
112
112
  trilogy/std/report.preql,sha256=LbV-XlHdfw0jgnQ8pV7acG95xrd1-p65fVpiIc-S7W4,202
113
- pytrilogy-0.0.3.65.dist-info/METADATA,sha256=EzLG1grru3E83dGL_4EkZemWzM8QDjCe-S08QQNVAmk,9095
114
- pytrilogy-0.0.3.65.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
115
- pytrilogy-0.0.3.65.dist-info/entry_points.txt,sha256=ewBPU2vLnVexZVnB-NrVj-p3E-4vukg83Zk8A55Wp2w,56
116
- pytrilogy-0.0.3.65.dist-info/top_level.txt,sha256=cAy__NW_eMAa_yT9UnUNlZLFfxcg6eimUAZ184cdNiE,8
117
- pytrilogy-0.0.3.65.dist-info/RECORD,,
113
+ pytrilogy-0.0.3.67.dist-info/METADATA,sha256=q69gBB9Ympec6F7E9ASBDjTfuRs06HmUws7QvAxVBeA,9095
114
+ pytrilogy-0.0.3.67.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
115
+ pytrilogy-0.0.3.67.dist-info/entry_points.txt,sha256=ewBPU2vLnVexZVnB-NrVj-p3E-4vukg83Zk8A55Wp2w,56
116
+ pytrilogy-0.0.3.67.dist-info/top_level.txt,sha256=cAy__NW_eMAa_yT9UnUNlZLFfxcg6eimUAZ184cdNiE,8
117
+ pytrilogy-0.0.3.67.dist-info/RECORD,,
trilogy/__init__.py CHANGED
@@ -4,6 +4,6 @@ from trilogy.dialect.enums import Dialects
4
4
  from trilogy.executor import Executor
5
5
  from trilogy.parser import parse
6
6
 
7
- __version__ = "0.0.3.65"
7
+ __version__ = "0.0.3.67"
8
8
 
9
9
  __all__ = ["parse", "Executor", "Dialects", "Environment", "CONFIG"]
@@ -8,7 +8,7 @@ class UndefinedConceptException(Exception):
8
8
  self.suggestions = suggestions
9
9
 
10
10
 
11
- class UnresolvableQueryException(Exception):
11
+ class FrozenEnvironmentException(Exception):
12
12
  pass
13
13
 
14
14
 
@@ -16,15 +16,15 @@ class InvalidSyntaxException(Exception):
16
16
  pass
17
17
 
18
18
 
19
- class NoDatasourceException(Exception):
19
+ class UnresolvableQueryException(Exception):
20
20
  pass
21
21
 
22
22
 
23
- class FrozenEnvironmentException(Exception):
23
+ class NoDatasourceException(UnresolvableQueryException):
24
24
  pass
25
25
 
26
26
 
27
- class AmbiguousRelationshipResolutionException(Exception):
27
+ class AmbiguousRelationshipResolutionException(UnresolvableQueryException):
28
28
  def __init__(self, message, parents: List[set[str]]):
29
29
  super().__init__(self, message)
30
30
  self.message = message
@@ -271,6 +271,20 @@ class Conditional(Mergeable, ConceptArgs, Namespaced, DataTyped, BaseModel):
271
271
  right: Expr
272
272
  operator: BooleanOperator
273
273
 
274
+ @field_validator("left", mode="before")
275
+ @classmethod
276
+ def left_validator(cls, v, info: ValidationInfo):
277
+ if isinstance(v, Concept):
278
+ return v.reference
279
+ return v
280
+
281
+ @field_validator("right", mode="before")
282
+ @classmethod
283
+ def right_validator(cls, v, info: ValidationInfo):
284
+ if isinstance(v, Concept):
285
+ return v.reference
286
+ return v
287
+
274
288
  def __add__(self, other) -> "Conditional":
275
289
  if other is None:
276
290
  return self
@@ -346,7 +360,6 @@ class Conditional(Mergeable, ConceptArgs, Namespaced, DataTyped, BaseModel):
346
360
 
347
361
  @property
348
362
  def concept_arguments(self) -> Sequence[ConceptRef]:
349
- """Return concepts directly referenced in where clause"""
350
363
  output = []
351
364
  output += get_concept_arguments(self.left)
352
365
  output += get_concept_arguments(self.right)
@@ -575,11 +588,11 @@ class Comparison(ConceptArgs, Mergeable, DataTyped, Namespaced, BaseModel):
575
588
  date,
576
589
  Function,
577
590
  ConceptRef,
578
- "Conditional",
591
+ Conditional,
579
592
  DataType,
580
- "Comparison",
593
+ Comparison,
581
594
  FunctionCallWrapper,
582
- "Parenthetical",
595
+ Parenthetical,
583
596
  MagicConstants,
584
597
  WindowItem,
585
598
  AggregateWrapper,
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from abc import ABC
4
+ from collections import defaultdict
4
5
  from datetime import date, datetime
5
6
  from functools import cached_property, singledispatchmethod
6
7
  from typing import (
@@ -1466,6 +1467,20 @@ BuildExpr = (
1466
1467
  BuildConcept.model_rebuild()
1467
1468
 
1468
1469
 
1470
+ def get_canonical_pseudonyms(environment: Environment) -> dict[str, set[str]]:
1471
+ roots: dict[str, set[str]] = defaultdict(set)
1472
+ for k, v in environment.concepts.items():
1473
+ roots[v.address].add(k)
1474
+ for x in v.pseudonyms:
1475
+ roots[v.address].add(x)
1476
+ for k, v in environment.alias_origin_lookup.items():
1477
+ lookup = environment.concepts[k].address
1478
+ roots[lookup].add(v.address)
1479
+ for x2 in v.pseudonyms:
1480
+ roots[lookup].add(x2)
1481
+ return roots
1482
+
1483
+
1469
1484
  class Factory:
1470
1485
 
1471
1486
  def __init__(
@@ -1479,6 +1494,7 @@ class Factory:
1479
1494
  self.local_concepts: dict[str, BuildConcept] = (
1480
1495
  {} if local_concepts is None else local_concepts
1481
1496
  )
1497
+ self.pseudonym_map = get_canonical_pseudonyms(environment)
1482
1498
 
1483
1499
  def instantiate_concept(
1484
1500
  self,
@@ -1631,7 +1647,10 @@ class Factory:
1631
1647
 
1632
1648
  @build.register
1633
1649
  def _(self, base: CaseElse) -> BuildCaseElse:
1634
- return BuildCaseElse.model_construct(expr=self.build(base.expr))
1650
+ expr: Concept | FuncArgs = base.expr
1651
+ if isinstance(expr, (AggregateWrapper, FilterItem, WindowItem)):
1652
+ expr, _ = self.instantiate_concept(expr)
1653
+ return BuildCaseElse.model_construct(expr=self.build(expr))
1635
1654
 
1636
1655
  @build.register
1637
1656
  def _(self, base: Concept) -> BuildConcept:
@@ -1653,6 +1672,17 @@ class Factory:
1653
1672
  )
1654
1673
  is_aggregate = Concept.calculate_is_aggregate(build_lineage)
1655
1674
 
1675
+ # if this is a pseudonym, we need to look up the base address
1676
+ if base.address in self.environment.alias_origin_lookup:
1677
+ lookup_address = self.environment.concepts[base.address].address
1678
+ # map only to the canonical concept, not to other merged concepts
1679
+ base_pseudonyms = {lookup_address}
1680
+ else:
1681
+ base_pseudonyms = {
1682
+ x
1683
+ for x in self.pseudonym_map.get(base.address, set())
1684
+ if x != base.address
1685
+ }
1656
1686
  rval = BuildConcept.model_construct(
1657
1687
  name=base.name,
1658
1688
  datatype=base.datatype,
@@ -1663,7 +1693,7 @@ class Factory:
1663
1693
  namespace=base.namespace,
1664
1694
  keys=base.keys,
1665
1695
  modifiers=base.modifiers,
1666
- pseudonyms=base.pseudonyms,
1696
+ pseudonyms=base_pseudonyms,
1667
1697
  ## instantiated values
1668
1698
  derivation=derivation,
1669
1699
  granularity=granularity,
@@ -1685,14 +1715,22 @@ class Factory:
1685
1715
 
1686
1716
  @build.register
1687
1717
  def _(self, base: ColumnAssignment) -> BuildColumnAssignment:
1688
- fetched = self.environment.concepts[base.concept.address]
1718
+ address = base.concept.address
1719
+ fetched = (
1720
+ self.build(
1721
+ self.environment.alias_origin_lookup[address].with_grain(self.grain)
1722
+ )
1723
+ if address in self.environment.alias_origin_lookup
1724
+ else self.build(self.environment.concepts[address].with_grain(self.grain))
1725
+ )
1726
+
1689
1727
  return BuildColumnAssignment.model_construct(
1690
1728
  alias=(
1691
1729
  self.build(base.alias)
1692
1730
  if isinstance(base.alias, Function)
1693
1731
  else base.alias
1694
1732
  ),
1695
- concept=self.build(fetched.with_grain(self.grain)),
1733
+ concept=fetched,
1696
1734
  modifiers=base.modifiers,
1697
1735
  )
1698
1736
 
@@ -362,9 +362,6 @@ class Environment(BaseModel):
362
362
  and x.concept.address != deriv_lookup
363
363
  ]
364
364
  assert len(datasource.columns) < clen
365
- for x in datasource.columns:
366
- logger.info(x)
367
-
368
365
  return None
369
366
 
370
367
  if existing and self.config.allow_duplicate_declaration:
@@ -607,15 +604,15 @@ class Environment(BaseModel):
607
604
  )
608
605
  persisted = f"{PERSISTED_CONCEPT_PREFIX}_" + new_persisted_concept.name
609
606
  # override the current concept source to reflect that it's now coming from a datasource
607
+ base_pseudonyms = new_persisted_concept.pseudonyms or set()
608
+ original_pseudonyms = {*base_pseudonyms, new_persisted_concept.address}
610
609
  if (
611
610
  new_persisted_concept.metadata.concept_source
612
611
  != ConceptSource.PERSIST_STATEMENT
613
612
  ):
614
613
  original_concept = new_persisted_concept.model_copy(
615
614
  deep=True,
616
- update={
617
- "name": persisted,
618
- },
615
+ update={"name": persisted, "pseudonyms": original_pseudonyms},
619
616
  )
620
617
  self.add_concept(
621
618
  original_concept,
@@ -629,6 +626,7 @@ class Environment(BaseModel):
629
626
  ),
630
627
  "derivation": Derivation.ROOT,
631
628
  "purpose": new_persisted_concept.purpose,
629
+ "pseudonyms": {*original_pseudonyms, original_concept.address},
632
630
  }
633
631
  # purpose is used in derivation calculation
634
632
  # which should be fixed, but we'll do in a followup
@@ -650,6 +648,7 @@ class Environment(BaseModel):
650
648
  new_persisted_concept,
651
649
  meta=meta,
652
650
  )
651
+
653
652
  return datasource
654
653
 
655
654
  def delete_datasource(
@@ -224,6 +224,9 @@ def initialize_loop_context(
224
224
  else:
225
225
 
226
226
  completion_mandatory = mandatory_list
227
+ logger.info(
228
+ f"{depth_to_prefix(depth)}{LOGGER_PREFIX} Initialized loop context with mandatory list {[c.address for c in mandatory_list]} and completion mandatory {[c.address for c in completion_mandatory]}"
229
+ )
227
230
  return LoopContext(
228
231
  mandatory_list=mandatory_list,
229
232
  environment=environment,
@@ -330,7 +333,7 @@ def check_for_early_exit(
330
333
  return False
331
334
 
332
335
 
333
- def generate_loop_completion(context: LoopContext, virtual) -> StrategyNode:
336
+ def generate_loop_completion(context: LoopContext, virtual: set[str]) -> StrategyNode:
334
337
  condition_required = True
335
338
  non_virtual = [c for c in context.completion_mandatory if c.address not in virtual]
336
339
  non_virtual_output = [
@@ -367,10 +370,15 @@ def generate_loop_completion(context: LoopContext, virtual) -> StrategyNode:
367
370
  output: StrategyNode = context.stack[0]
368
371
  if non_virtual_different:
369
372
  logger.info(
370
- f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Found different non-virtual output concepts ({non_virtual_difference_values}), removing condition injected values"
373
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Found different non-virtual output concepts ({non_virtual_difference_values}), removing condition injected values by setting outputs to {[x.address for x in output.output_concepts if x.address in non_virtual_output]}"
371
374
  )
372
375
  output.set_output_concepts(
373
- [x for x in output.output_concepts if x.address in non_virtual_output],
376
+ [
377
+ x
378
+ for x in output.output_concepts
379
+ if x.address in non_virtual_output
380
+ or any(c in non_virtual_output for c in x.pseudonyms)
381
+ ],
374
382
  rebuild=False,
375
383
  )
376
384
 
@@ -404,7 +412,7 @@ def generate_loop_completion(context: LoopContext, virtual) -> StrategyNode:
404
412
  elif context.conditions:
405
413
  output.preexisting_conditions = context.conditions.conditional
406
414
  logger.info(
407
- f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Graph is connected, returning {type(output)} node partial {[c.address for c in output.partial_concepts]} with {context.conditions}"
415
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Graph is connected, returning {type(output)} node output {[x.address for x in output.usable_outputs]} partial {[c.address for c in output.partial_concepts]} with {context.conditions}"
408
416
  )
409
417
  if condition_required and context.conditions and non_virtual_different:
410
418
  logger.info(
@@ -419,7 +427,7 @@ def generate_loop_completion(context: LoopContext, virtual) -> StrategyNode:
419
427
  )
420
428
  if result.required:
421
429
  logger.info(
422
- f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Adding group node"
430
+ f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Adding group node with outputs {[x.address for x in context.original_mandatory]}"
423
431
  )
424
432
  return GroupNode(
425
433
  output_concepts=context.original_mandatory,
@@ -466,6 +474,7 @@ def _search_concepts(
466
474
  )
467
475
 
468
476
  while context.incomplete:
477
+
469
478
  priority_concept = get_priority_concept(
470
479
  context.mandatory_list,
471
480
  context.attempted,
@@ -478,7 +487,7 @@ def _search_concepts(
478
487
  candidates = [
479
488
  c for c in context.mandatory_list if c.address != priority_concept.address
480
489
  ]
481
- # the local conditions list may be override if we end up injecting conditions
490
+ # the local conditions list may be overriden if we end up injecting conditions
482
491
  candidate_list, local_conditions = generate_candidates_restrictive(
483
492
  priority_concept,
484
493
  candidates,
@@ -351,7 +351,10 @@ class RootNodeHandler:
351
351
  extra = restrict_node_outputs_targets(expanded, root_targets, self.ctx.depth)
352
352
 
353
353
  pseudonyms = [
354
- x for x in extra if any(x.address in y.pseudonyms for y in root_targets)
354
+ x
355
+ for x in extra
356
+ if any(x.address in y.pseudonyms for y in root_targets)
357
+ and x.address not in root_targets
355
358
  ]
356
359
 
357
360
  if pseudonyms:
@@ -1,7 +1,7 @@
1
1
  from typing import List
2
2
 
3
3
  from trilogy.constants import logger
4
- from trilogy.core.enums import Derivation, Granularity
4
+ from trilogy.core.enums import Derivation
5
5
  from trilogy.core.models.build import (
6
6
  BuildConcept,
7
7
  BuildRowsetItem,
@@ -52,14 +52,6 @@ def get_priority_concept(
52
52
  # pass_two = [c for c in all_concepts+filter_only if c.address not in attempted_addresses]
53
53
  for remaining_concept in (pass_one,):
54
54
  priority = (
55
- # find anything that needs no joins first, so we can exit early
56
- [
57
- c
58
- for c in remaining_concept
59
- if c.derivation == Derivation.CONSTANT
60
- and c.granularity == Granularity.SINGLE_ROW
61
- ]
62
- +
63
55
  # then multiselects to remove them from scope
64
56
  [c for c in remaining_concept if c.derivation == Derivation.MULTISELECT]
65
57
  +
@@ -34,6 +34,9 @@ def validate_concept(
34
34
  seen: set[str],
35
35
  environment: BuildEnvironment,
36
36
  ):
37
+ # logger.debug(
38
+ # f"Validating concept {concept.address} with accept_partial={accept_partial}"
39
+ # )
37
40
  found_map[str(node)].add(concept)
38
41
  seen.add(concept.address)
39
42
  if concept not in node.partial_concepts:
@@ -53,12 +56,21 @@ def validate_concept(
53
56
  found_map[str(node)].add(concept)
54
57
  for v_address in concept.pseudonyms:
55
58
  if v_address in seen:
56
- return
57
- v = environment.concepts[v_address]
59
+ continue
60
+ if v_address in environment.alias_origin_lookup:
61
+ # logger.debug(
62
+ # f"Found alias origin for {v_address}: {environment.alias_origin_lookup[v_address]} mapped to {environment.concepts[v_address]}")
63
+ v = environment.alias_origin_lookup[v_address]
64
+ else:
65
+ v = environment.concepts[v_address]
66
+
58
67
  if v.address in seen:
59
- return
68
+
69
+ continue
70
+
60
71
  if v.address == concept.address:
61
- return
72
+
73
+ continue
62
74
  validate_concept(
63
75
  v,
64
76
  node,
@@ -93,6 +105,7 @@ def validate_stack(
93
105
 
94
106
  for concept in resolved.output_concepts:
95
107
  if concept.address in resolved.hidden_concepts:
108
+
96
109
  continue
97
110
 
98
111
  validate_concept(
@@ -57,7 +57,13 @@ def build_parent_concepts(
57
57
  local_optional: List[BuildConcept],
58
58
  conditions: BuildWhereClause | None = None,
59
59
  depth: int = 0,
60
- ):
60
+ ) -> tuple[
61
+ list[BuildConcept],
62
+ list[tuple[BuildConcept, ...]],
63
+ list[BuildConcept],
64
+ bool,
65
+ bool,
66
+ ]:
61
67
  parent_row_concepts, parent_existence_concepts = resolve_filter_parent_concepts(
62
68
  concept, environment
63
69
  )
@@ -66,6 +72,10 @@ def build_parent_concepts(
66
72
  filter_where = concept.lineage.where
67
73
 
68
74
  same_filter_optional: list[BuildConcept] = []
75
+ # mypy struggled here? we shouldn't need explicit bools
76
+ global_filter_is_local_filter: bool = (
77
+ True if (conditions and conditions == filter_where) else False
78
+ )
69
79
 
70
80
  for x in local_optional:
71
81
  if isinstance(x.lineage, FILTER_TYPES):
@@ -79,7 +89,7 @@ def build_parent_concepts(
79
89
  parent_row_concepts.append(arg)
80
90
  same_filter_optional.append(x)
81
91
  continue
82
- elif conditions and conditions == filter_where:
92
+ elif global_filter_is_local_filter:
83
93
  same_filter_optional.append(x)
84
94
 
85
95
  # sometimes, it's okay to include other local optional above the filter
@@ -100,6 +110,7 @@ def build_parent_concepts(
100
110
  parent_existence_concepts,
101
111
  same_filter_optional,
102
112
  is_optimized_pushdown,
113
+ global_filter_is_local_filter,
103
114
  )
104
115
 
105
116
 
@@ -152,6 +163,7 @@ def gen_filter_node(
152
163
  parent_existence_concepts,
153
164
  same_filter_optional,
154
165
  optimized_pushdown,
166
+ global_filter_is_local_filter,
155
167
  ) = build_parent_concepts(
156
168
  concept,
157
169
  environment=environment,
@@ -187,7 +199,13 @@ def gen_filter_node(
187
199
  f"{padding(depth)}{LOGGER_PREFIX} filter node row parents {[x.address for x in parent_row_concepts]} could not be found"
188
200
  )
189
201
  return None
190
-
202
+ if global_filter_is_local_filter:
203
+ logger.info(
204
+ f"{padding(depth)}{LOGGER_PREFIX} filter node conditions match global conditions adding row parent {row_parent.output_concepts} with condition {where.conditional}"
205
+ )
206
+ row_parent.add_parents(core_parent_nodes)
207
+ row_parent.set_output_concepts([concept] + local_optional)
208
+ return row_parent
191
209
  if optimized_pushdown:
192
210
  logger.info(
193
211
  f"{padding(depth)}{LOGGER_PREFIX} returning optimized filter node with pushdown to parent with condition {where.conditional} across {[concept] + same_filter_optional + row_parent.output_concepts} "
@@ -211,7 +229,8 @@ def gen_filter_node(
211
229
  parent = row_parent
212
230
  parent.add_output_concepts([concept] + same_filter_optional)
213
231
  parent.add_parents(core_parent_nodes)
214
- parent.add_condition(where.conditional)
232
+ if not parent.preexisting_conditions == where.conditional:
233
+ parent.add_condition(where.conditional)
215
234
  parent.add_existence_concepts(flattened_existence, False)
216
235
  parent.grain = BuildGrain.from_concepts(
217
236
  parent.output_concepts,
@@ -225,7 +244,8 @@ def gen_filter_node(
225
244
  parents_for_grain = [
226
245
  x.lineage.content
227
246
  for x in filters
228
- if isinstance(x.lineage.content, BuildConcept)
247
+ if isinstance(x.lineage, BuildFilterItem)
248
+ and isinstance(x.lineage.content, BuildConcept)
229
249
  ]
230
250
  filter_node = FilterNode(
231
251
  input_concepts=unique(
@@ -133,7 +133,7 @@ def gen_group_node(
133
133
  )
134
134
  return group_node
135
135
  missing_optional = [
136
- x.address for x in local_optional if x.address not in group_node.output_concepts
136
+ x.address for x in local_optional if x.address not in group_node.usable_outputs
137
137
  ]
138
138
  if not missing_optional:
139
139
  logger.info(
@@ -42,6 +42,9 @@ def extract_concept(node: str, env: BuildEnvironment):
42
42
 
43
43
  def filter_unique_graphs(graphs: list[list[str]]) -> list[list[str]]:
44
44
  unique_graphs: list[set[str]] = []
45
+
46
+ # sort graphs from largest to smallest
47
+ graphs.sort(key=lambda x: len(x), reverse=True)
45
48
  for graph in graphs:
46
49
  if not any(set(graph).issubset(x) for x in unique_graphs):
47
50
  unique_graphs.append(set(graph))
@@ -110,12 +113,13 @@ def determine_induced_minimal_nodes(
110
113
 
111
114
  try:
112
115
  paths = nx.multi_source_dijkstra_path(H, nodelist)
113
- except nx.exception.NodeNotFound as e:
114
- logger.debug(f"Unable to find paths for {nodelist}- {str(e)}")
116
+ # logger.debug(f"Paths found for {nodelist}")
117
+ except nx.exception.NodeNotFound:
118
+ # logger.debug(f"Unable to find paths for {nodelist}- {str(e)}")
115
119
  return None
116
120
  H.remove_nodes_from(list(x for x in H.nodes if x not in paths))
117
121
  sG: nx.Graph = ax.steinertree.steiner_tree(H, nodelist).copy()
118
- logger.debug("Steiner tree found for nodes %s", nodelist)
122
+ # logger.debug(f"Steiner tree found for nodes {nodelist} {sG.nodes}")
119
123
  final: nx.DiGraph = nx.subgraph(G, sG.nodes).copy()
120
124
 
121
125
  for edge in G.edges:
@@ -154,12 +158,31 @@ def determine_induced_minimal_nodes(
154
158
  return final
155
159
 
156
160
 
161
+ def canonicalize_addresses(
162
+ reduced_concept_set: set[str], environment: BuildEnvironment
163
+ ) -> set[str]:
164
+ """
165
+ Convert a set of concept addresses to their canonical form.
166
+ This is necessary to ensure that we can compare concepts correctly,
167
+ especially when dealing with aliases or pseudonyms.
168
+ """
169
+ return set(
170
+ environment.concepts[x].address if x in environment.concepts else x
171
+ for x in reduced_concept_set
172
+ )
173
+
174
+
157
175
  def detect_ambiguity_and_raise(
158
- all_concepts: list[BuildConcept], reduced_concept_sets: list[set[str]]
176
+ all_concepts: list[BuildConcept],
177
+ reduced_concept_sets_raw: list[set[str]],
178
+ environment: BuildEnvironment,
159
179
  ) -> None:
160
180
  final_candidates: list[set[str]] = []
161
181
  common: set[str] = set()
162
182
  # find all values that show up in every join_additions
183
+ reduced_concept_sets = [
184
+ canonicalize_addresses(x, environment) for x in reduced_concept_sets_raw
185
+ ]
163
186
  for ja in reduced_concept_sets:
164
187
  if not common:
165
188
  common = ja
@@ -198,18 +221,21 @@ def filter_relevant_subgraphs(
198
221
 
199
222
 
200
223
  def filter_duplicate_subgraphs(
201
- subgraphs: list[list[BuildConcept]],
224
+ subgraphs: list[list[BuildConcept]], environment
202
225
  ) -> list[list[BuildConcept]]:
203
226
  seen: list[set[str]] = []
204
227
 
205
228
  for graph in subgraphs:
206
- seen.append(set([x.address for x in graph]))
229
+ seen.append(
230
+ canonicalize_addresses(set([x.address for x in graph]), environment)
231
+ )
207
232
  final = []
208
233
  # sometimes w can get two subcomponents that are the same
209
234
  # due to alias resolution
210
235
  # if so, drop any that are strict subsets.
211
236
  for graph in subgraphs:
212
- set_x = set([x.address for x in graph])
237
+ logger.info(f"Checking graph {graph} for duplicates in {seen}")
238
+ set_x = canonicalize_addresses(set([x.address for x in graph]), environment)
213
239
  if any([set_x.issubset(y) and set_x != y for y in seen]):
214
240
  continue
215
241
  final.append(graph)
@@ -295,7 +321,7 @@ def resolve_weak_components(
295
321
  if not found:
296
322
  return None
297
323
 
298
- detect_ambiguity_and_raise(all_concepts, reduced_concept_sets)
324
+ detect_ambiguity_and_raise(all_concepts, reduced_concept_sets, environment)
299
325
 
300
326
  # take our first one as the actual graph
301
327
  g = found[0]
@@ -316,7 +342,7 @@ def resolve_weak_components(
316
342
  if not sub_component:
317
343
  continue
318
344
  subgraphs.append(sub_component)
319
- final = filter_duplicate_subgraphs(subgraphs)
345
+ final = filter_duplicate_subgraphs(subgraphs, environment)
320
346
  return final
321
347
  # return filter_relevant_subgraphs(subgraphs)
322
348
 
@@ -361,17 +387,25 @@ def subgraphs_to_merge_node(
361
387
  )
362
388
  parents.append(parent)
363
389
  input_c = []
390
+ output_c = []
364
391
  for x in parents:
365
392
  for y in x.usable_outputs:
366
393
  input_c.append(y)
394
+ if y in output_concepts:
395
+ output_c.append(y)
396
+ elif any(y.address in c.pseudonyms for c in output_concepts) or any(
397
+ c.address in y.pseudonyms for c in output_concepts
398
+ ):
399
+ output_c.append(y)
400
+
367
401
  if len(parents) == 1 and enable_early_exit:
368
402
  logger.info(
369
403
  f"{padding(depth)}{LOGGER_PREFIX} only one parent node, exiting early w/ {[c.address for c in parents[0].output_concepts]}"
370
404
  )
371
405
  return parents[0]
372
- return MergeNode(
406
+ rval = MergeNode(
373
407
  input_concepts=unique(input_c, "address"),
374
- output_concepts=output_concepts,
408
+ output_concepts=output_c,
375
409
  environment=environment,
376
410
  parents=parents,
377
411
  depth=depth,
@@ -381,6 +415,7 @@ def subgraphs_to_merge_node(
381
415
  # preexisting_conditions=search_conditions.conditional,
382
416
  # node_joins=[]
383
417
  )
418
+ return rval
384
419
 
385
420
 
386
421
  def gen_merge_node(
@@ -437,24 +472,4 @@ def gen_merge_node(
437
472
  search_conditions=search_conditions,
438
473
  output_concepts=all_concepts,
439
474
  )
440
-
441
- # one concept handling may need to be kicked to alias
442
- if len(all_search_concepts) == 1:
443
- concept = all_search_concepts[0]
444
- for v in concept.pseudonyms:
445
- test = subgraphs_to_merge_node(
446
- [[concept, environment.alias_origin_lookup[v]]],
447
- g=g,
448
- all_concepts=[concept],
449
- environment=environment,
450
- depth=depth,
451
- source_concepts=source_concepts,
452
- history=history,
453
- conditions=conditions,
454
- enable_early_exit=False,
455
- search_conditions=search_conditions,
456
- output_concepts=[concept],
457
- )
458
- if test:
459
- return test
460
475
  return None
@@ -85,9 +85,9 @@ def subgraph_is_complete(
85
85
  mapped = set([mapping.get(n, n) for n in nodes])
86
86
  passed = all([t in mapped for t in targets])
87
87
  if not passed:
88
- logger.info(
89
- f"Subgraph {nodes} is not complete, missing targets {targets} - mapped {mapped}"
90
- )
88
+ # logger.info(
89
+ # f"Subgraph {nodes} is not complete, missing targets {targets} - mapped {mapped}"
90
+ # )
91
91
  return False
92
92
  # check if all concepts have a datasource edge
93
93
  has_ds_edge = {
@@ -346,7 +346,7 @@ def create_datasource_node(
346
346
  depth: int,
347
347
  conditions: BuildWhereClause | None = None,
348
348
  ) -> tuple[StrategyNode, bool]:
349
- logger.info(all_concepts)
349
+
350
350
  target_grain = BuildGrain.from_concepts(all_concepts, environment=environment)
351
351
  force_group = False
352
352
  if not datasource.grain.issubset(target_grain):
@@ -377,26 +377,26 @@ def create_datasource_node(
377
377
  partial_is_full = conditions and (conditions == datasource.non_partial_for)
378
378
 
379
379
  datasource_conditions = datasource.where.conditional if datasource.where else None
380
-
381
- return (
382
- SelectNode(
383
- input_concepts=[c.concept for c in datasource.columns],
384
- output_concepts=all_concepts,
385
- environment=environment,
386
- parents=[],
387
- depth=depth,
388
- partial_concepts=(
389
- [] if partial_is_full else [c for c in all_concepts if c in partial_lcl]
390
- ),
391
- nullable_concepts=[c for c in all_concepts if c in nullable_lcl],
392
- accept_partial=accept_partial,
393
- datasource=datasource,
394
- grain=datasource.grain,
395
- conditions=datasource_conditions,
396
- preexisting_conditions=(
397
- conditions.conditional if partial_is_full and conditions else None
398
- ),
380
+ rval = SelectNode(
381
+ input_concepts=[c.concept for c in datasource.columns],
382
+ output_concepts=all_concepts,
383
+ environment=environment,
384
+ parents=[],
385
+ depth=depth,
386
+ partial_concepts=(
387
+ [] if partial_is_full else [c for c in all_concepts if c in partial_lcl]
388
+ ),
389
+ nullable_concepts=[c for c in all_concepts if c in nullable_lcl],
390
+ accept_partial=accept_partial,
391
+ datasource=datasource,
392
+ grain=datasource.grain,
393
+ conditions=datasource_conditions,
394
+ preexisting_conditions=(
395
+ conditions.conditional if partial_is_full and conditions else None
399
396
  ),
397
+ )
398
+ return (
399
+ rval,
400
400
  force_group,
401
401
  )
402
402
 
@@ -484,7 +484,7 @@ def create_select_node(
484
484
  input_concepts=all_concepts,
485
485
  environment=environment,
486
486
  parents=[bcandidate],
487
- depth=depth,
487
+ depth=depth + 1,
488
488
  partial_concepts=bcandidate.partial_concepts,
489
489
  nullable_concepts=bcandidate.nullable_concepts,
490
490
  preexisting_conditions=bcandidate.preexisting_conditions,
@@ -493,6 +493,7 @@ def create_select_node(
493
493
  else:
494
494
 
495
495
  candidate = bcandidate
496
+ assert candidate.resolve().output_concepts == all_concepts
496
497
  return candidate
497
498
 
498
499
 
@@ -604,6 +605,7 @@ def gen_select_merge_node(
604
605
  ]
605
606
  ):
606
607
  preexisting_conditions = conditions.conditional
608
+
607
609
  base = MergeNode(
608
610
  output_concepts=all_concepts,
609
611
  input_concepts=non_constant,
@@ -18,6 +18,43 @@ from trilogy.core.processing.utility import padding
18
18
  LOGGER_PREFIX = "[GEN_SELECT_NODE]"
19
19
 
20
20
 
21
+ def validate_query_is_resolvable(
22
+ missing: list[str],
23
+ environment: BuildEnvironment,
24
+ materialized_lcl: LooseBuildConceptList,
25
+ ) -> None:
26
+ # if a query cannot ever be resolved, exit early with an error
27
+ for x in missing:
28
+ if x not in environment.concepts:
29
+ # if it's locally derived, we can assume it can be resolved
30
+ continue
31
+ validation_concept = environment.concepts[x]
32
+ # if the concept we look up isn't what we searched for,
33
+ # we're in a pseudonym anyway, don't worry about validating
34
+ if validation_concept.address != x:
35
+ continue
36
+ if validation_concept.derivation == Derivation.ROOT:
37
+ has_source = False
38
+ for x in validation_concept.pseudonyms:
39
+ if x in environment.alias_origin_lookup:
40
+ pseudonym_concept = environment.alias_origin_lookup[x]
41
+ else:
42
+ pseudonym_concept = environment.concepts[x]
43
+ # if it's not a root concept pseudonym,
44
+ # assume we can derivve it
45
+ if pseudonym_concept.derivation != Derivation.ROOT:
46
+ has_source = True
47
+ break
48
+ if pseudonym_concept.address in materialized_lcl:
49
+ has_source = True
50
+ break
51
+ if not has_source:
52
+ raise NoDatasourceException(
53
+ f"No datasource exists for root concept {validation_concept}, and no resolvable pseudonyms found from {validation_concept.pseudonyms}. This query is unresolvable from your environment. Check your datasource configuration?"
54
+ )
55
+ return None
56
+
57
+
21
58
  def gen_select_node(
22
59
  concepts: list[BuildConcept],
23
60
  environment: BuildEnvironment,
@@ -42,6 +79,7 @@ def gen_select_node(
42
79
  f"{padding(depth)}{LOGGER_PREFIX} Skipping select node generation for {concepts}"
43
80
  f" as it + optional includes non-materialized concepts (looking for all {all_lcl}, missing {missing}) "
44
81
  )
82
+ validate_query_is_resolvable(missing, environment, materialized_lcl)
45
83
  if fail_if_not_found:
46
84
  raise NoDatasourceException(f"No datasource exists for {concepts}")
47
85
  return None
@@ -30,22 +30,27 @@ def gen_synonym_node(
30
30
  accept_partial: bool = False,
31
31
  ) -> StrategyNode | None:
32
32
  local_prefix = f"{padding(depth)}[GEN_SYNONYM_NODE]"
33
- base_fingerprint = tuple([x.address for x in all_concepts])
33
+ base_fingerprint = tuple(sorted([x.address for x in all_concepts]))
34
34
  synonyms = defaultdict(list)
35
- synonym_count = 0
35
+ has_synonyms = False
36
36
  for x in all_concepts:
37
37
  synonyms[x.address] = [x]
38
+ if x.address in environment.alias_origin_lookup:
39
+ parent = environment.concepts[x.address]
40
+ if parent.address != x.address:
41
+ synonyms[x.address].append(parent)
42
+ has_synonyms = True
38
43
  for y in x.pseudonyms:
39
44
 
40
45
  if y in environment.alias_origin_lookup:
41
46
  synonyms[x.address].append(environment.alias_origin_lookup[y])
42
- synonym_count += 1
47
+ has_synonyms = True
43
48
  elif y in environment.concepts:
44
49
  synonyms[x.address].append(environment.concepts[y])
45
- synonym_count += 1
50
+ has_synonyms = True
46
51
  for address in synonyms:
47
52
  synonyms[address].sort(key=lambda obj: obj.address)
48
- if synonym_count == 0:
53
+ if not has_synonyms:
49
54
  return None
50
55
 
51
56
  logger.info(f"{local_prefix} Generating Synonym Node with {len(synonyms)} synonyms")
@@ -76,9 +81,8 @@ def gen_synonym_node(
76
81
  return (-similarity_score, addresses)
77
82
 
78
83
  combinations_list.sort(key=similarity_sort_key)
79
- logger.info(combinations_list)
80
84
  for combo in combinations_list:
81
- fingerprint = tuple([x.address for x in combo])
85
+ fingerprint = tuple(sorted([x.address for x in combo]))
82
86
  if fingerprint == base_fingerprint:
83
87
  continue
84
88
  logger.info(
@@ -29,6 +29,7 @@ def resolve_concept_map(
29
29
  inherited_inputs: List[BuildConcept],
30
30
  full_joins: List[BuildConcept] | None = None,
31
31
  ) -> dict[str, set[BuildDatasource | QueryDatasource | UnnestJoin]]:
32
+
32
33
  targets = targets or []
33
34
  concept_map: dict[str, set[BuildDatasource | QueryDatasource | UnnestJoin]] = (
34
35
  defaultdict(set)
@@ -56,7 +57,9 @@ def resolve_concept_map(
56
57
  # second loop, include partials
57
58
  for input in inputs:
58
59
  for concept in input.output_concepts:
59
- if concept.address not in [t for t in inherited_inputs]:
60
+ if concept.address not in inherited and not (
61
+ concept.pseudonyms and any(s in inherited for s in concept.pseudonyms)
62
+ ):
60
63
  continue
61
64
  if (
62
65
  isinstance(input, QueryDatasource)
@@ -70,7 +73,6 @@ def resolve_concept_map(
70
73
  if target.address not in inherited:
71
74
  # an empty source means it is defined in this CTE
72
75
  concept_map[target.address] = set()
73
-
74
76
  return concept_map
75
77
 
76
78
 
@@ -195,8 +197,10 @@ class StrategyNode:
195
197
  return
196
198
  non_hidden = set()
197
199
  hidden = set()
200
+ usable_outputs = set()
198
201
  for x in self.parents:
199
202
  for z in x.usable_outputs:
203
+ usable_outputs.add(z.address)
200
204
  non_hidden.add(z.address)
201
205
  for psd in z.pseudonyms:
202
206
  non_hidden.add(psd)
@@ -205,7 +209,7 @@ class StrategyNode:
205
209
  if not all([x.address in non_hidden for x in self.input_concepts]):
206
210
  missing = [x for x in self.input_concepts if x.address not in non_hidden]
207
211
  raise ValueError(
208
- f"Invalid input concepts; {missing} are missing non-hidden parent nodes; have {non_hidden} and hidden {hidden}"
212
+ f"Invalid input concepts; {missing} are missing non-hidden parent nodes; have {non_hidden} and hidden {hidden} from root {usable_outputs}"
209
213
  )
210
214
 
211
215
  def add_parents(self, parents: list["StrategyNode"]):
@@ -282,6 +286,11 @@ class StrategyNode:
282
286
  if self.output_concepts == concepts:
283
287
  return self
284
288
  self.output_concepts = concepts
289
+ if self.hidden_concepts:
290
+ self.hidden_concepts = set(
291
+ x for x in self.hidden_concepts if x not in concepts
292
+ )
293
+
285
294
  self.output_lcl = LooseBuildConceptList(concepts=self.output_concepts)
286
295
 
287
296
  if rebuild:
@@ -165,7 +165,7 @@ class GroupNode(StrategyNode):
165
165
  p.resolve() for p in self.parents
166
166
  ]
167
167
  grains = self.check_if_required(
168
- self.output_concepts, parent_sources, self.environment
168
+ self.output_concepts, parent_sources, self.environment, self.depth
169
169
  )
170
170
  target_grain = grains.target
171
171
  comp_grain = grains.upstream
@@ -395,6 +395,9 @@ def get_query_node(
395
395
  if not statement.output_components:
396
396
  raise ValueError(f"Statement has no output components {statement}")
397
397
  history = history or History(base_environment=environment)
398
+ print(
399
+ f"{LOGGER_PREFIX} building query node for {statement.output_components} grain {statement.grain}"
400
+ )
398
401
  build_statement: BuildSelectLineage | BuildMultiSelectLineage = Factory(
399
402
  environment=environment,
400
403
  ).build(statement)
@@ -404,6 +407,7 @@ def get_query_node(
404
407
  build_statement.local_concepts
405
408
  )
406
409
  graph = generate_graph(build_environment)
410
+
407
411
  logger.info(
408
412
  f"{LOGGER_PREFIX} getting source datasource for outputs {build_statement.output_components} grain {build_statement.grain}"
409
413
  )
trilogy/dialect/base.py CHANGED
@@ -843,7 +843,7 @@ class BaseDialect:
843
843
  else:
844
844
  having = having + x if having else x
845
845
 
846
- logger.info(f"{len(final_joins)} joins for cte {cte.name}")
846
+ logger.info(f"{LOGGER_PREFIX} {len(final_joins)} joins for cte {cte.name}")
847
847
  return CompiledCTE(
848
848
  name=cte.name,
849
849
  statement=self.SQL_TEMPLATE.render(
trilogy/parsing/common.py CHANGED
@@ -23,9 +23,13 @@ from trilogy.core.models.author import (
23
23
  AggregateWrapper,
24
24
  AlignClause,
25
25
  AlignItem,
26
+ CaseElse,
27
+ CaseWhen,
28
+ Comparison,
26
29
  Concept,
27
30
  ConceptArgs,
28
31
  ConceptRef,
32
+ Conditional,
29
33
  FilterItem,
30
34
  Function,
31
35
  FunctionCallWrapper,
@@ -38,6 +42,7 @@ from trilogy.core.models.author import (
38
42
  Parenthetical,
39
43
  RowsetItem,
40
44
  RowsetLineage,
45
+ SubselectComparison,
41
46
  TraitDataType,
42
47
  UndefinedConcept,
43
48
  WhereClause,
@@ -198,6 +203,7 @@ def atom_is_relevant(
198
203
  others: list[Concept | ConceptRef],
199
204
  environment: Environment | None = None,
200
205
  ):
206
+
201
207
  if isinstance(atom, (ConceptRef, Concept)):
202
208
  # when we are looking at atoms, if there is a concept that is in others
203
209
  # return directly
@@ -210,9 +216,10 @@ def atom_is_relevant(
210
216
  elif isinstance(atom, AggregateWrapper):
211
217
  return any(atom_is_relevant(x, others, environment) for x in atom.by)
212
218
 
213
- if isinstance(atom, Function):
219
+ elif isinstance(atom, Function):
214
220
  relevant = False
215
221
  for arg in atom.arguments:
222
+
216
223
  relevant = relevant or atom_is_relevant(arg, others, environment)
217
224
  return relevant
218
225
  elif isinstance(atom, FunctionCallWrapper):
@@ -220,8 +227,27 @@ def atom_is_relevant(
220
227
  [atom_is_relevant(atom.content, others, environment)]
221
228
  + [atom_is_relevant(x, others, environment) for x in atom.args]
222
229
  )
230
+ elif isinstance(atom, CaseWhen):
231
+ rval = atom_is_relevant(atom.expr, others, environment) or atom_is_relevant(
232
+ atom.comparison, others, environment
233
+ )
234
+ return rval
235
+ elif isinstance(atom, CaseElse):
236
+
237
+ rval = atom_is_relevant(atom.expr, others, environment)
238
+ return rval
239
+ elif isinstance(atom, SubselectComparison):
240
+ return atom_is_relevant(atom.left, others, environment)
241
+ elif isinstance(atom, Comparison):
242
+ return atom_is_relevant(atom.left, others, environment) or atom_is_relevant(
243
+ atom.right, others, environment
244
+ )
245
+ elif isinstance(atom, Conditional):
246
+ return atom_is_relevant(atom.left, others, environment) or atom_is_relevant(
247
+ atom.right, others, environment
248
+ )
223
249
  elif isinstance(atom, ConceptArgs):
224
- # use atom is relevant here to trigger the early exit behavior for concpets in set
250
+ # use atom is relevant here to trigger the early exit behavior for concepts in set
225
251
  return any(
226
252
  [atom_is_relevant(x, others, environment) for x in atom.concept_arguments]
227
253
  )
@@ -233,7 +259,6 @@ def concept_is_relevant(
233
259
  others: list[Concept | ConceptRef],
234
260
  environment: Environment | None = None,
235
261
  ) -> bool:
236
-
237
262
  if isinstance(concept, UndefinedConcept):
238
263
  return False
239
264
  if concept.datatype == DataType.UNKNOWN:
trilogy/std/display.preql CHANGED
@@ -1,3 +1,6 @@
1
1
 
2
2
 
3
- type percent float; # Percentage value
3
+ type percent float; # Percentage value
4
+
5
+ def calc_percent(a, b, digits=-1) -> case when digits =-1 then (a/b):: float::percent
6
+ else round((a/b):: float::percent, digits) end;