pytrilogy 0.0.3.99__py3-none-any.whl → 0.0.3.101__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.99
3
+ Version: 0.0.3.101
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -123,15 +123,12 @@ Versus SQL, Trilogy aims to:
123
123
  **Improve:**
124
124
  - Simplicity
125
125
  - Refactoring/maintainability
126
- - Reusability
126
+ - Reusability/composability
127
+ - Expressivness
127
128
 
128
129
  **Maintain:**
129
130
  - Acceptable performance
130
131
 
131
- **Remove:**
132
- - Lower-level procedural features
133
- - Transactional optimizations/non-analytics features
134
-
135
132
  ## Backend Support
136
133
 
137
134
  | Backend | Status | Notes |
@@ -161,6 +158,7 @@ property sentence_id.word_three string::word; # a description to it
161
158
  # comments in other places are just comments
162
159
 
163
160
  # define our datasource to bind the model to data
161
+ # for most work, you can import something already defined
164
162
  # testing using query fixtures is a common pattern
165
163
  datasource word_one(
166
164
  sentence: sentence_id,
@@ -323,7 +321,7 @@ from pytrilogy import Executor, Dialect
323
321
 
324
322
  ### Authoring Imports
325
323
 
326
- Are also stable, and should be used for cases which programatically generate Trilogy statements without a base text format
324
+ Are also stable, and should be used for cases which programatically generate Trilogy statements without text inputs
327
325
  or need to process/transform parsed code in more complicated ways.
328
326
 
329
327
  ```python
@@ -391,6 +389,16 @@ datasource <name>(
391
389
  )
392
390
  grain(<concept>, <concept>)
393
391
  address <table>;
392
+
393
+ datasource orders(
394
+ order_id,
395
+ order_date,
396
+ total_rev: point_of_sale_rev,
397
+ customomer_id: customer.id
398
+ )
399
+ grain orders
400
+ address orders;
401
+
394
402
  ```
395
403
 
396
404
  ### Queries
@@ -1,6 +1,6 @@
1
- pytrilogy-0.0.3.99.dist-info/licenses/LICENSE.md,sha256=5ZRvtTyCCFwz1THxDTjAu3Lidds9WjPvvzgVwPSYNDo,1042
2
- trilogy/__init__.py,sha256=_mu-EP3oMYeyuDb0ESiaTOvq4vYDFOgPukUj0Nk4pwI,303
3
- trilogy/constants.py,sha256=SSsRMg9HTou259nMKAw-rJNBgzkWjQ3QIQXcrq9i5Kk,1717
1
+ pytrilogy-0.0.3.101.dist-info/licenses/LICENSE.md,sha256=5ZRvtTyCCFwz1THxDTjAu3Lidds9WjPvvzgVwPSYNDo,1042
2
+ trilogy/__init__.py,sha256=NMiEE_jE99ZiREk8IPjfT2M-jxAwtmd2vyCWVD3kT28,304
3
+ trilogy/constants.py,sha256=ohmro6so7PPNp2ruWQKVc0ijjXYPOyRrxB9LI8dr3TU,1746
4
4
  trilogy/engine.py,sha256=3MiADf5MKcmxqiHBuRqiYdsXiLj7oitDfVvXvHrfjkA,2178
5
5
  trilogy/executor.py,sha256=KgCAQhHPT-j0rPkBbALX0f84W9-Q-bkjHayGuavg99w,16490
6
6
  trilogy/parser.py,sha256=o4cfk3j3yhUFoiDKq9ZX_GjBF3dKhDjXEwb63rcBkBM,293
@@ -14,8 +14,8 @@ trilogy/core/enums.py,sha256=H8I2Dz4POHZ4ixYCGzNs4c3KDqxLQklGLVfmje1DSMo,8877
14
14
  trilogy/core/env_processor.py,sha256=H-rr2ALj31l5oh3FqeI47Qju6OOfiXBacXNJGNZ92zQ,4521
15
15
  trilogy/core/environment_helpers.py,sha256=TRlqVctqIRBxzfjRBmpQsAVoiCcsEKBhG1B6PUE0l1M,12743
16
16
  trilogy/core/ergonomics.py,sha256=e-7gE29vPLFdg0_A1smQ7eOrUwKl5VYdxRSTddHweRA,1631
17
- trilogy/core/exceptions.py,sha256=fI16oTNCVMMAJFSn2AFzZVapzsF5M9WbdN5e5UixwXc,2807
18
- trilogy/core/functions.py,sha256=oY-F0hsA9vp1ZipGTyx4QVtz_x83Ekk-lkHv6mMkHVQ,33095
17
+ trilogy/core/exceptions.py,sha256=axkVXYJYQXCCwMHwlyDA232g4tCOwdCZUt7eHeUMDMg,2829
18
+ trilogy/core/functions.py,sha256=sdV6Z3NUVfwL1d18eNcaAXllVNqzLez23McsJ6xIp7M,33182
19
19
  trilogy/core/graph_models.py,sha256=4EWFTHGfYd72zvS2HYoV6hm7nMC_VEd7vWr6txY-ig0,3400
20
20
  trilogy/core/internal.py,sha256=r9QagDB2GvpqlyD_I7VrsfbVfIk5mnok2znEbv72Aa4,2681
21
21
  trilogy/core/optimization.py,sha256=ojpn-p79lr03SSVQbbw74iPCyoYpDYBmj1dbZ3oXCjI,8860
@@ -25,48 +25,47 @@ trilogy/core/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
25
25
  trilogy/core/models/author.py,sha256=ZSKEJ6Vg4otpI_m7_JuGyrFM8dZV1HaxBwprvDSwUzo,81149
26
26
  trilogy/core/models/build.py,sha256=ZwJJGyp4rVsISvL8Er_AxQdVJrafYc4fesSj4MNgoxU,70615
27
27
  trilogy/core/models/build_environment.py,sha256=mpx7MKGc60fnZLVdeLi2YSREy7eQbQYycCrP4zF-rHU,5258
28
- trilogy/core/models/core.py,sha256=EofJ8-kltNr_7oFhyCPqauVX1bSJzJI5xOp0eMP_vlA,12892
28
+ trilogy/core/models/core.py,sha256=iT9WdZoiXeglmUHWn6bZyXCTBpkApTGPKtNm_Mhbu_g,12987
29
29
  trilogy/core/models/datasource.py,sha256=wogTevZ-9CyUW2a8gjzqMCieircxi-J5lkI7EOAZnck,9596
30
30
  trilogy/core/models/environment.py,sha256=hwTIRnJgaHUdCYof7U5A9NPitGZ2s9yxqiW5O2SaJ9Y,28759
31
- trilogy/core/models/execute.py,sha256=lQTpiuNhBT4In-oQ76ImgIoTdUbs4mmyd0J0iTOZOdw,42105
31
+ trilogy/core/models/execute.py,sha256=lsNzNjS3nZvoW5CHjYwxDTwBe502NZyytpK1eq8CwW4,42357
32
32
  trilogy/core/optimizations/__init__.py,sha256=YH2-mGXZnVDnBcWVi8vTbrdw7Qs5TivG4h38rH3js_I,290
33
33
  trilogy/core/optimizations/base_optimization.py,sha256=gzDOKImoFn36k7XBD3ysEYDnbnb6vdVIztUfFQZsGnM,513
34
34
  trilogy/core/optimizations/inline_datasource.py,sha256=2sWNRpoRInnTgo9wExVT_r9RfLAQHI57reEV5cGHUcg,4329
35
35
  trilogy/core/optimizations/predicate_pushdown.py,sha256=g4AYE8Aw_iMlAh68TjNXGP754NTurrDduFECkUjoBnc,9399
36
36
  trilogy/core/processing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
- trilogy/core/processing/concept_strategies_v3.py,sha256=Izo7yfR6sGHTaD17lN7ZzGYCtXA5AkXAmIp_OBjHH58,23161
38
- trilogy/core/processing/discovery_loop.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
+ trilogy/core/processing/concept_strategies_v3.py,sha256=tvSN_aiqb1H7LkTl96vj7YK_DKcq_1nDdRJ69wZCLc8,22158
39
38
  trilogy/core/processing/discovery_node_factory.py,sha256=5QVYUsci_h6iYWhS0GCoDow2tSAipiBW1OyTRX-g_L8,15581
40
- trilogy/core/processing/discovery_utility.py,sha256=eY4n7_r6_R-cx_Sm8FiouMXh78v2iO2SGhi0aI5jvDg,4549
39
+ trilogy/core/processing/discovery_utility.py,sha256=Xntgug6VnEF96uw5Zwen1qMEUwKjqrm_ZDUr4i4tc1U,5595
41
40
  trilogy/core/processing/discovery_validation.py,sha256=eZ4HfHMpqZLI8MGG2jez8arS8THs6ceuVrQFIY6gXrU,5364
42
41
  trilogy/core/processing/graph_utils.py,sha256=8QUVrkE9j-9C1AyrCb1nQEh8daCe0u1HuXl-Te85lag,1205
43
42
  trilogy/core/processing/utility.py,sha256=PGQKZgX58kI3gG4nprY8HRGayc2D8fF5RmbvBhQj8ok,23319
44
43
  trilogy/core/processing/node_generators/__init__.py,sha256=iVJ-crowPxYeut-hFjyEjfibKIDq7PfB4LEuDAUCjGY,943
45
- trilogy/core/processing/node_generators/basic_node.py,sha256=TLZCv4WS196a-0g5xgKuJGthnGP8Ugm46iz85_3NIY4,5626
44
+ trilogy/core/processing/node_generators/basic_node.py,sha256=0Uhnf07056SBbRkt-wYLw4DZqsFR6jztGLUaE9ebPZs,5577
46
45
  trilogy/core/processing/node_generators/common.py,sha256=PdysdroW9DUADP7f5Wv_GKPUyCTROZV1g3L45fawxi8,9443
47
46
  trilogy/core/processing/node_generators/constant_node.py,sha256=LfpDq2WrBRZ3tGsLxw77LuigKfhbteWWh9L8BGdMGwk,1146
48
47
  trilogy/core/processing/node_generators/filter_node.py,sha256=ArBsQJl-4fWBJWCE28CRQ7UT7ErnFfbcseoQQZrBodY,11220
49
- trilogy/core/processing/node_generators/group_node.py,sha256=8HJ1lkOvIXfX3xoS2IMbM_wCu_mT0J_hQ7xnTaxsVlo,6611
48
+ trilogy/core/processing/node_generators/group_node.py,sha256=yqOWl5TCV4PrdJua4OJkPUIHkljaLoSW2Y8eRAmVddQ,6733
50
49
  trilogy/core/processing/node_generators/group_to_node.py,sha256=jKcNCDOY6fNblrdZwaRU0sbUSr9H0moQbAxrGgX6iGA,3832
51
50
  trilogy/core/processing/node_generators/multiselect_node.py,sha256=GWV5yLmKTe1yyPhN60RG1Rnrn4ktfn9lYYXi_FVU4UI,7061
52
- trilogy/core/processing/node_generators/node_merge_node.py,sha256=DfivU4xAo3iOFvuZxmztlD5AlusM_Nq0IOOgvPKMH_M,22964
51
+ trilogy/core/processing/node_generators/node_merge_node.py,sha256=1joMV7XpQ9Gpe-d5y7JUMBHIqakV5wFJi3Mtvs4UcL4,23415
53
52
  trilogy/core/processing/node_generators/recursive_node.py,sha256=l5zdh0dURKwmAy8kK4OpMtZfyUEQRk6N-PwSWIyBpSM,2468
54
53
  trilogy/core/processing/node_generators/rowset_node.py,sha256=5L5u6xz1In8EaHQdcYgR2si-tz9WB9YLXURo4AkUT9A,6630
55
54
  trilogy/core/processing/node_generators/select_merge_node.py,sha256=KQvGoNT5ZBWQ_caEomRTtG1PKZC7OPT4PKfY0QmwMGE,22270
56
55
  trilogy/core/processing/node_generators/select_node.py,sha256=Ta1G39V94gjX_AgyZDz9OqnwLz4BjY3D6Drx9YpziMQ,3555
57
56
  trilogy/core/processing/node_generators/synonym_node.py,sha256=AnAsa_Wj50NJ_IK0HSgab_7klYmKVrv0WI1uUe-GvEY,3766
58
57
  trilogy/core/processing/node_generators/union_node.py,sha256=VNo6Oey4p8etU9xrOh2oTT2lIOTvY6PULUPRvVa2uxU,2877
59
- trilogy/core/processing/node_generators/unnest_node.py,sha256=ueOQtoTf2iJHO09RzWHDFQ5iKZq2fVhGf2KAF2U2kU8,2677
58
+ trilogy/core/processing/node_generators/unnest_node.py,sha256=w9vhPzASz53QPASLqFcLDdR9eY132tgVUcp3QolD5Jw,3726
60
59
  trilogy/core/processing/node_generators/window_node.py,sha256=A90linr4pkZtTNfn9k2YNLqrJ_SFII3lbHxB-BC6mI8,6688
61
60
  trilogy/core/processing/node_generators/select_helpers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
61
  trilogy/core/processing/node_generators/select_helpers/datasource_injection.py,sha256=m2YQ4OmG0N2O61a7NEq1ZzbTa7JsCC00lxB2ymjcYRI,8224
63
62
  trilogy/core/processing/nodes/__init__.py,sha256=zTge1EzwzEydlcMliIFO_TT7h7lS8l37lyZuQDir1h0,5487
64
63
  trilogy/core/processing/nodes/base_node.py,sha256=C_CjlOzlGMXckyV0b_PJZerpopNesRCKfambMq7Asvc,18221
65
64
  trilogy/core/processing/nodes/filter_node.py,sha256=5VtRfKbCORx0dV-vQfgy3gOEkmmscL9f31ExvlODwvY,2461
66
- trilogy/core/processing/nodes/group_node.py,sha256=ZJ9LRF1sDOyel5v0MHXHORn6DYdn771nX0-KdHdt3-4,10517
67
- trilogy/core/processing/nodes/merge_node.py,sha256=02oWRca0ba41U6PSAB14jwnWWxoyrvxRPLwkli259SY,15865
65
+ trilogy/core/processing/nodes/group_node.py,sha256=njz-5T7OJ3-kaBC7EhdtPra3G77HnI7apjUwMGhUeXo,10569
66
+ trilogy/core/processing/nodes/merge_node.py,sha256=daJywBxh44Gqk-7eTiXbYtY7xo6O6fNvqX-DagTOTmE,16231
68
67
  trilogy/core/processing/nodes/recursive_node.py,sha256=k0rizxR8KE64ievfHx_GPfQmU8QAP118Laeyq5BLUOk,1526
69
- trilogy/core/processing/nodes/select_node_v2.py,sha256=Xyfq8lU7rP7JTAd8VV0ATDNal64n4xIBgWQsOuMe_Ak,8824
68
+ trilogy/core/processing/nodes/select_node_v2.py,sha256=IWyKyNgFlV8A2S1FUTPdIaogg6PzaHh-HmQo6v24sbg,8862
70
69
  trilogy/core/processing/nodes/union_node.py,sha256=hLAXXVWqEgMWi7dlgSHfCF59fon64av14-uPgJzoKzM,1870
71
70
  trilogy/core/processing/nodes/unnest_node.py,sha256=oLKMMNMx6PLDPlt2V5neFMFrFWxET8r6XZElAhSNkO0,2181
72
71
  trilogy/core/processing/nodes/window_node.py,sha256=JXJ0iVRlSEM2IBr1TANym2RaUf_p5E_l2sNykRzXWDo,1710
@@ -78,11 +77,11 @@ trilogy/core/statements/execute.py,sha256=kiwJcVeMa4wZR-xLfM2oYOJ9DeyJkP8An38WFy
78
77
  trilogy/core/validation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
79
78
  trilogy/core/validation/common.py,sha256=Sd-towAX1uSDe3dK51FcVtIwVrMhayEwdHqhzeJHro0,4776
80
79
  trilogy/core/validation/concept.py,sha256=PM2BxBxLvuBScSWZMPsDZVcOblDil5pNT0pHLcLhdPA,5242
81
- trilogy/core/validation/datasource.py,sha256=d9AQNcukIRgN2spItPsXFiNtlZva-lDnfei3i06yQCE,6489
80
+ trilogy/core/validation/datasource.py,sha256=nJeEFyb6iMBwlEVdYVy1vLzAbdRZwOsUjGxgWKgY8oM,7636
82
81
  trilogy/core/validation/environment.py,sha256=ymvhQyt7jLK641JAAIQkqjQaAmr9C5022ILzYvDgPP0,2835
83
82
  trilogy/core/validation/fix.py,sha256=Z818UFNLxndMTLiyhB3doLxIfnOZ-16QGvVFWuD7UsA,3750
84
83
  trilogy/dialect/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
85
- trilogy/dialect/base.py,sha256=0QVHv4F0t3_gRQrZ0woFoUNKu7vaXGo-BG1l47CZUKc,49698
84
+ trilogy/dialect/base.py,sha256=d2gXfa5Jh3uyN9H9MxG53JT-xQQgntq2X7EprobJYUc,49698
86
85
  trilogy/dialect/bigquery.py,sha256=XS3hpybeowgfrOrkycAigAF3NX2YUzTzfgE6f__2fT4,4316
87
86
  trilogy/dialect/common.py,sha256=_MarnMWRBn3VcNt3k5VUdFrwH6oHzGdNQquSpHNLq4o,5644
88
87
  trilogy/dialect/config.py,sha256=olnyeVU5W5T6b9-dMeNAnvxuPlyc2uefb7FRME094Ec,3834
@@ -105,8 +104,8 @@ trilogy/parsing/config.py,sha256=Z-DaefdKhPDmSXLgg5V4pebhSB0h590vI0_VtHnlukI,111
105
104
  trilogy/parsing/exceptions.py,sha256=Xwwsv2C9kSNv2q-HrrKC1f60JNHShXcCMzstTSEbiCw,154
106
105
  trilogy/parsing/helpers.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
107
106
  trilogy/parsing/parse_engine.py,sha256=2k1TvnBYE_CW5zCmNfVbf1aWBuMDm5Wz4QfKKgGnE5k,81824
108
- trilogy/parsing/render.py,sha256=tqB3GlGk3bX6AbkJjvADad2QH6n63nw1kgrpjzLX2tI,20520
109
- trilogy/parsing/trilogy.lark,sha256=rM4WleeyGhoRgU-FOGcaeHOzZcYVxN4f13e_3B4OeLQ,16389
107
+ trilogy/parsing/render.py,sha256=E8-R0zO40QoeTeVX9OYdi5e9YgRYtuRrezDRj7VOgds,20614
108
+ trilogy/parsing/trilogy.lark,sha256=2-jguxgJQnNLbODjTijqrXXzFZ_UlivTdiYhec2YWuc,16451
110
109
  trilogy/scripts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
111
110
  trilogy/scripts/trilogy.py,sha256=1L0XrH4mVHRt1C9T1HnaDv2_kYEfbWTb5_-cBBke79w,3774
112
111
  trilogy/std/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -118,8 +117,8 @@ trilogy/std/money.preql,sha256=XWwvAV3WxBsHX9zfptoYRnBigcfYwrYtBHXTME0xJuQ,2082
118
117
  trilogy/std/net.preql,sha256=WZCuvH87_rZntZiuGJMmBDMVKkdhTtxeHOkrXNwJ1EE,416
119
118
  trilogy/std/ranking.preql,sha256=LDoZrYyz4g3xsII9XwXfmstZD-_92i1Eox1UqkBIfi8,83
120
119
  trilogy/std/report.preql,sha256=LbV-XlHdfw0jgnQ8pV7acG95xrd1-p65fVpiIc-S7W4,202
121
- pytrilogy-0.0.3.99.dist-info/METADATA,sha256=Symb6C1gs-2By5qBIt51posuNDI3_en-b84YtlWKTiU,11683
122
- pytrilogy-0.0.3.99.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
123
- pytrilogy-0.0.3.99.dist-info/entry_points.txt,sha256=ewBPU2vLnVexZVnB-NrVj-p3E-4vukg83Zk8A55Wp2w,56
124
- pytrilogy-0.0.3.99.dist-info/top_level.txt,sha256=cAy__NW_eMAa_yT9UnUNlZLFfxcg6eimUAZ184cdNiE,8
125
- pytrilogy-0.0.3.99.dist-info/RECORD,,
120
+ pytrilogy-0.0.3.101.dist-info/METADATA,sha256=dkvyYmeCXSZl2uHkPpoy-R7HdKb2w7pLGFrDu1tRGEU,11811
121
+ pytrilogy-0.0.3.101.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
122
+ pytrilogy-0.0.3.101.dist-info/entry_points.txt,sha256=ewBPU2vLnVexZVnB-NrVj-p3E-4vukg83Zk8A55Wp2w,56
123
+ pytrilogy-0.0.3.101.dist-info/top_level.txt,sha256=cAy__NW_eMAa_yT9UnUNlZLFfxcg6eimUAZ184cdNiE,8
124
+ pytrilogy-0.0.3.101.dist-info/RECORD,,
trilogy/__init__.py CHANGED
@@ -4,6 +4,6 @@ from trilogy.dialect.enums import Dialects
4
4
  from trilogy.executor import Executor
5
5
  from trilogy.parser import parse
6
6
 
7
- __version__ = "0.0.3.99"
7
+ __version__ = "0.0.3.101"
8
8
 
9
9
  __all__ = ["parse", "Executor", "Dialects", "Environment", "CONFIG"]
trilogy/constants.py CHANGED
@@ -40,6 +40,7 @@ class Comments:
40
40
  joins: bool = True
41
41
  nullable: bool = True
42
42
  partial: bool = True
43
+ source_map: bool = False
43
44
 
44
45
 
45
46
  @dataclass
@@ -69,7 +69,7 @@ class DatasourceColumnBindingData:
69
69
  actual_modifiers: List[Modifier]
70
70
 
71
71
  def format_failure(self):
72
- return f"Concept {self.address} value '{self.value}' with type {self.value_modifiers} does not conform to expected type {str(self.actual_type)} with modifiers {self.actual_modifiers}"
72
+ return f"Concept {self.address} value '{self.value}' with type {self.value_type} and {self.value_modifiers} does not conform to expected type {str(self.actual_type)} with modifiers {self.actual_modifiers}"
73
73
 
74
74
  def is_modifier_issue(self) -> bool:
75
75
  return len(self.value_modifiers) > 0 and any(
trilogy/core/functions.py CHANGED
@@ -18,6 +18,7 @@ from trilogy.core.models.author import (
18
18
  AggregateWrapper,
19
19
  Concept,
20
20
  ConceptRef,
21
+ Conditional,
21
22
  Function,
22
23
  Parenthetical,
23
24
  UndefinedConcept,
@@ -129,8 +130,8 @@ def validate_case_output(
129
130
  def create_struct_output(
130
131
  args: list[Any],
131
132
  ) -> StructType:
132
- zipped = dict(zip(args[::2], args[1::2]))
133
- types = [arg_to_datatype(x) for x in args[1::2]]
133
+ zipped = dict(zip(args[1::2], args[::2]))
134
+ types = [arg_to_datatype(x) for x in args[::2]]
134
135
  return StructType(fields=types, fields_map=zipped)
135
136
 
136
137
 
@@ -997,6 +998,8 @@ def argument_to_purpose(arg) -> Purpose:
997
998
  return argument_to_purpose(arg.content)
998
999
  elif isinstance(arg, WindowItem):
999
1000
  return Purpose.PROPERTY
1001
+ elif isinstance(arg, Conditional):
1002
+ return Purpose.PROPERTY
1000
1003
  elif isinstance(arg, Concept):
1001
1004
  base = arg.purpose
1002
1005
  if (
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  from abc import ABC
4
4
  from collections import UserDict, UserList
5
5
  from datetime import date, datetime
6
+ from decimal import Decimal
6
7
  from enum import Enum
7
8
  from typing import (
8
9
  Any,
@@ -448,6 +449,8 @@ def arg_to_datatype(arg) -> CONCRETE_TYPES:
448
449
  return DataType.STRING
449
450
  elif isinstance(arg, float):
450
451
  return DataType.FLOAT
452
+ elif isinstance(arg, Decimal):
453
+ return DataType.NUMERIC
451
454
  elif isinstance(arg, DataType):
452
455
  return arg
453
456
  elif isinstance(arg, NumericType):
@@ -118,19 +118,20 @@ class CTE(BaseModel):
118
118
  base += f" Source: {self.source.source_type}."
119
119
  if self.parent_ctes:
120
120
  base += f" References: {', '.join([x.name for x in self.parent_ctes])}."
121
- if self.joins:
121
+ if self.joins and CONFIG.comments.joins:
122
122
  base += f"\n-- Joins: {', '.join([str(x) for x in self.joins])}."
123
- if self.partial_concepts:
123
+ if self.partial_concepts and CONFIG.comments.partial:
124
124
  base += (
125
125
  f"\n-- Partials: {', '.join([str(x) for x in self.partial_concepts])}."
126
126
  )
127
- base += f"\n-- Source Map: {self.source_map}."
127
+ if CONFIG.comments.source_map:
128
+ base += f"\n-- Source Map: {self.source_map}."
128
129
  base += f"\n-- Output: {', '.join([str(x) for x in self.output_columns])}."
129
130
  if self.source.input_concepts:
130
131
  base += f"\n-- Inputs: {', '.join([str(x) for x in self.source.input_concepts])}."
131
132
  if self.hidden_concepts:
132
133
  base += f"\n-- Hidden: {', '.join([str(x) for x in self.hidden_concepts])}."
133
- if self.nullable_concepts:
134
+ if self.nullable_concepts and CONFIG.comments.nullable:
134
135
  base += (
135
136
  f"\n-- Nullable: {', '.join([str(x) for x in self.nullable_concepts])}."
136
137
  )
@@ -368,6 +369,7 @@ class CTE(BaseModel):
368
369
  @property
369
370
  def group_concepts(self) -> List[BuildConcept]:
370
371
  def check_is_not_in_group(c: BuildConcept):
372
+
371
373
  if len(self.source_map.get(c.address, [])) > 0:
372
374
  return False
373
375
  if c.derivation == Derivation.ROWSET:
@@ -381,8 +383,6 @@ class CTE(BaseModel):
381
383
  and c.lineage.operator in FunctionClass.AGGREGATE_FUNCTIONS.value
382
384
  ):
383
385
  return True
384
- if c.purpose == Purpose.METRIC:
385
- return True
386
386
 
387
387
  if c.derivation == Derivation.BASIC and c.lineage:
388
388
  if all([check_is_not_in_group(x) for x in c.lineage.concept_arguments]):
@@ -392,6 +392,10 @@ class CTE(BaseModel):
392
392
  and c.lineage.operator == FunctionType.GROUP
393
393
  ):
394
394
  return check_is_not_in_group(c.lineage.concept_arguments[0])
395
+ return False
396
+ if c.purpose == Purpose.METRIC:
397
+ return True
398
+
395
399
  return False
396
400
 
397
401
  return (
@@ -707,6 +711,8 @@ class QueryDatasource(BaseModel):
707
711
  f" {[c.address for c in self.output_concepts]} concepts and"
708
712
  f" {other.name} with {[c.address for c in other.output_concepts]} concepts"
709
713
  )
714
+ logger.info(self.source_map)
715
+ logger.info(other.source_map)
710
716
 
711
717
  merged_datasources: dict[str, Union[BuildDatasource, "QueryDatasource"]] = {}
712
718
 
@@ -770,6 +776,7 @@ class QueryDatasource(BaseModel):
770
776
  logger.debug(
771
777
  f"[Query Datasource] merged with {[c.address for c in qds.output_concepts]} concepts"
772
778
  )
779
+ logger.debug(qds.source_map)
773
780
  return qds
774
781
 
775
782
  @property
@@ -777,7 +784,7 @@ class QueryDatasource(BaseModel):
777
784
  filters = abs(hash(str(self.condition))) if self.condition else ""
778
785
  grain = "_".join([str(c).replace(".", "_") for c in self.grain.components])
779
786
  group = ""
780
- if self.source_type == SourceType.GROUP:
787
+ if self.group_required:
781
788
  keys = [
782
789
  x.address for x in self.output_concepts if x.purpose != Purpose.METRIC
783
790
  ]
@@ -19,13 +19,13 @@ from trilogy.core.processing.discovery_utility import (
19
19
  LOGGER_PREFIX,
20
20
  depth_to_prefix,
21
21
  get_priority_concept,
22
+ group_if_required,
22
23
  )
23
24
  from trilogy.core.processing.discovery_validation import (
24
25
  ValidationResult,
25
26
  validate_stack,
26
27
  )
27
28
  from trilogy.core.processing.nodes import (
28
- GroupNode,
29
29
  History,
30
30
  MergeNode,
31
31
  StrategyNode,
@@ -218,6 +218,7 @@ def initialize_loop_context(
218
218
  f"{depth_to_prefix(depth)}{LOGGER_PREFIX} derived condition row inputs {[x.address for x in required_filters]} present in mandatory list, forcing condition evaluation at this level. "
219
219
  )
220
220
  mandatory_list = completion_mandatory
221
+ all_mandatory = set(c.address for c in completion_mandatory)
221
222
  must_evaluate_condition_on_this_level_not_push_down = True
222
223
  else:
223
224
  logger.info(
@@ -263,7 +264,7 @@ def evaluate_loop_conditions(
263
264
  ]
264
265
  ) and not any(
265
266
  [
266
- x.derivation not in ROOT_DERIVATIONS
267
+ x.derivation not in ROOT_DERIVATIONS + [Derivation.BASIC]
267
268
  for x in context.mandatory_list
268
269
  if x.address not in context.conditions.row_arguments
269
270
  ]
@@ -282,7 +283,7 @@ def evaluate_loop_conditions(
282
283
  # to ensure filtering happens before something like a SUM
283
284
  if (
284
285
  context.conditions
285
- and priority_concept.derivation not in ROOT_DERIVATIONS
286
+ and priority_concept.derivation not in ROOT_DERIVATIONS + [Derivation.BASIC]
286
287
  and priority_concept.address not in context.conditions.row_arguments
287
288
  ):
288
289
  logger.info(
@@ -419,26 +420,9 @@ def generate_loop_completion(context: LoopContext, virtual: set[str]) -> Strateg
419
420
  logger.info(
420
421
  f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Conditions {context.conditions} were injected, checking if we need a group to restore grain"
421
422
  )
422
-
423
- result = GroupNode.check_if_required(
424
- downstream_concepts=output.usable_outputs,
425
- parents=[output.resolve()],
426
- environment=context.environment,
427
- depth=context.depth,
423
+ return group_if_required(
424
+ output, context.original_mandatory, context.environment
428
425
  )
429
- if result.required:
430
- logger.info(
431
- f"{depth_to_prefix(context.depth)}{LOGGER_PREFIX} Adding group node with outputs {[x.address for x in context.original_mandatory]}"
432
- )
433
- return GroupNode(
434
- output_concepts=context.original_mandatory,
435
- input_concepts=output.usable_outputs,
436
- environment=context.environment,
437
- parents=[output],
438
- partial_concepts=output.partial_concepts,
439
- preexisting_conditions=context.conditions.conditional,
440
- depth=context.depth,
441
- )
442
426
  return output
443
427
 
444
428
 
@@ -604,18 +588,4 @@ def source_query_concepts(
604
588
  logger.info(
605
589
  f"{depth_to_prefix(0)}{LOGGER_PREFIX} final concepts are {[x.address for x in final]}"
606
590
  )
607
- if GroupNode.check_if_required(
608
- downstream_concepts=final,
609
- parents=[root.resolve()],
610
- environment=environment,
611
- ).required:
612
- candidate: StrategyNode = GroupNode(
613
- output_concepts=final,
614
- input_concepts=final,
615
- environment=environment,
616
- parents=[root],
617
- partial_concepts=root.partial_concepts,
618
- )
619
- else:
620
- candidate = root
621
- return candidate
591
+ return group_if_required(root, output_concepts, environment)
@@ -6,6 +6,8 @@ from trilogy.core.models.build import (
6
6
  BuildConcept,
7
7
  BuildRowsetItem,
8
8
  )
9
+ from trilogy.core.models.build_environment import BuildEnvironment
10
+ from trilogy.core.processing.nodes import GroupNode, MergeNode, StrategyNode
9
11
 
10
12
 
11
13
  def depth_to_prefix(depth: int) -> str:
@@ -15,6 +17,34 @@ def depth_to_prefix(depth: int) -> str:
15
17
  LOGGER_PREFIX = "[DISCOVERY LOOP]"
16
18
 
17
19
 
20
+ def group_if_required(
21
+ root: StrategyNode, final: List[BuildConcept], environment: BuildEnvironment
22
+ ):
23
+ if isinstance(root, MergeNode) and root.force_group is True:
24
+ return root
25
+ elif isinstance(root, GroupNode):
26
+ return root
27
+ elif GroupNode.check_if_required(
28
+ downstream_concepts=final,
29
+ parents=[root.resolve()],
30
+ environment=environment,
31
+ ).required:
32
+ if isinstance(root, MergeNode):
33
+ root.force_group = True
34
+ root.set_output_concepts(final, rebuild=False)
35
+ root.rebuild_cache()
36
+ return root
37
+ return GroupNode(
38
+ output_concepts=final,
39
+ input_concepts=final,
40
+ environment=environment,
41
+ parents=[root],
42
+ partial_concepts=root.partial_concepts,
43
+ preexisting_conditions=root.preexisting_conditions,
44
+ )
45
+ return root
46
+
47
+
18
48
  def get_upstream_concepts(base: BuildConcept, nested: bool = False) -> set[str]:
19
49
  upstream = set()
20
50
  if nested:
@@ -120,8 +120,8 @@ def gen_basic_node(
120
120
  f"{depth_prefix}{LOGGER_PREFIX} No basic node could be generated for {concept}"
121
121
  )
122
122
  return None
123
-
124
- parent_node.source_type = SourceType.BASIC
123
+ if parent_node.source_type != SourceType.CONSTANT:
124
+ parent_node.source_type = SourceType.BASIC
125
125
  parent_node.add_output_concept(concept)
126
126
  for x in equivalent_optional:
127
127
  parent_node.add_output_concept(x)
@@ -129,24 +129,18 @@ def gen_basic_node(
129
129
  logger.info(
130
130
  f"{depth_prefix}{LOGGER_PREFIX} Returning basic select for {concept}: output {[x.address for x in parent_node.output_concepts]}"
131
131
  )
132
+ # if it's a constant, don't prune outputs
133
+ if parent_node.source_type == SourceType.CONSTANT:
134
+ return parent_node
132
135
  targets = [concept] + local_optional + equivalent_optional
133
- should_hide = [
134
- x
135
- for x in parent_node.output_concepts
136
- if (
137
- x.address not in targets
138
- and not any(x.address in y.pseudonyms for y in targets)
139
- )
140
- ]
141
- parent_node.hide_output_concepts(should_hide)
142
- should_not_hide = [
143
- x
144
- for x in parent_node.output_concepts
145
- if x.address in targets or any(x.address in y.pseudonyms for y in targets)
146
- ]
147
- parent_node.unhide_output_concepts(should_not_hide)
136
+ targets = [
137
+ s
138
+ for s in parent_node.output_concepts
139
+ if any(s.address in y.pseudonyms for y in targets)
140
+ ] + targets
141
+ parent_node.set_output_concepts(targets)
148
142
 
149
143
  logger.info(
150
- f"{depth_prefix}{LOGGER_PREFIX} Returning basic select for {concept}: output {[x.address for x in parent_node.output_concepts]} hidden {[x for x in parent_node.hidden_concepts]}"
144
+ f"{depth_prefix}{LOGGER_PREFIX} Returning basic select for {concept}: input: {[x.address for x in parent_node.input_concepts]} output {[x.address for x in parent_node.output_concepts]} hidden {[x for x in parent_node.hidden_concepts]}"
151
145
  )
152
146
  return parent_node
@@ -28,6 +28,7 @@ def get_aggregate_grain(
28
28
  parent_concepts: List[BuildConcept] = unique(
29
29
  resolve_function_parent_concepts(concept, environment=environment), "address"
30
30
  )
31
+
31
32
  if (
32
33
  concept.grain
33
34
  and len(concept.grain.components) > 0
@@ -108,13 +109,14 @@ def gen_group_node(
108
109
  f"{padding(depth)}{LOGGER_PREFIX} cannot include optional agg {possible_agg.address}; it has mismatched parent grain {comp_grain } vs local parent {build_grain_parents}"
109
110
  )
110
111
  if parent_concepts:
112
+ target_grain = BuildGrain.from_concepts(parent_concepts)
111
113
  logger.info(
112
- f"{padding(depth)}{LOGGER_PREFIX} fetching group node parents {LooseBuildConceptList(concepts=parent_concepts)}"
114
+ f"{padding(depth)}{LOGGER_PREFIX} fetching group node parents {LooseBuildConceptList(concepts=parent_concepts)} with expected grain {target_grain}"
113
115
  )
114
116
  parent_concepts = unique(
115
117
  [x for x in parent_concepts if not x.name == ALL_ROWS_CONCEPT], "address"
116
118
  )
117
- parent = source_concepts(
119
+ parent: StrategyNode | None = source_concepts(
118
120
  mandatory_list=parent_concepts,
119
121
  environment=environment,
120
122
  g=g,
@@ -20,6 +20,9 @@ from trilogy.core.models.build import (
20
20
  BuildWhereClause,
21
21
  )
22
22
  from trilogy.core.models.build_environment import BuildEnvironment
23
+ from trilogy.core.processing.discovery_utility import (
24
+ group_if_required,
25
+ )
23
26
  from trilogy.core.processing.nodes import History, MergeNode, StrategyNode
24
27
  from trilogy.core.processing.utility import padding
25
28
  from trilogy.utility import unique
@@ -161,9 +164,6 @@ def reinject_common_join_keys_v2(
161
164
  reduced = BuildGrain.from_concepts(concrete_concepts).components
162
165
  existing_addresses = set()
163
166
  for concrete in concrete_concepts:
164
- logger.debug(
165
- f"looking at column {concrete.address} with pseudonyms {concrete.pseudonyms}"
166
- )
167
167
  cnode = concept_to_node(concrete.with_default_grain())
168
168
  if cnode in final.nodes:
169
169
  existing_addresses.add(concrete.address)
@@ -510,6 +510,7 @@ def subgraphs_to_merge_node(
510
510
  search_conditions: BuildWhereClause | None = None,
511
511
  enable_early_exit: bool = True,
512
512
  ):
513
+ target_grain = BuildGrain.from_concepts(output_concepts, environment=environment)
513
514
  parents: List[StrategyNode] = []
514
515
  logger.info(
515
516
  f"{padding(depth)}{LOGGER_PREFIX} fetching subgraphs {[[c.address for c in subgraph] for subgraph in concept_subgraphs]}"
@@ -549,16 +550,20 @@ def subgraphs_to_merge_node(
549
550
  output_c.append(y)
550
551
 
551
552
  if len(parents) == 1 and enable_early_exit:
553
+
552
554
  logger.info(
553
555
  f"{padding(depth)}{LOGGER_PREFIX} only one parent node, exiting early w/ {[c.address for c in parents[0].output_concepts]}"
554
556
  )
555
- return parents[0]
557
+ parent = parents[0]
558
+ return group_if_required(parent, output_concepts, environment)
559
+
556
560
  rval = MergeNode(
557
561
  input_concepts=unique(input_c, "address"),
558
- output_concepts=output_c,
562
+ output_concepts=output_concepts,
559
563
  environment=environment,
560
564
  parents=parents,
561
565
  depth=depth,
566
+ grain=target_grain,
562
567
  # hidden_concepts=[]
563
568
  # conditions=conditions,
564
569
  # conditions=search_conditions.conditional,
@@ -579,6 +584,12 @@ def gen_merge_node(
579
584
  conditions: BuildConditional | None = None,
580
585
  search_conditions: BuildWhereClause | None = None,
581
586
  ) -> Optional[MergeNode]:
587
+
588
+ # we do not actually APPLY these conditions anywhere
589
+ # though we could look at doing that as an optimization
590
+ # it's important to include them so the base discovery loop that was generating
591
+ # the merge node can then add them automatically
592
+ # so we should not return a node with preexisting conditions
582
593
  if search_conditions:
583
594
  all_search_concepts = unique(
584
595
  all_concepts + list(search_conditions.row_arguments), "address"
@@ -33,6 +33,8 @@ def gen_unnest_node(
33
33
  logger.info(
34
34
  f"{depth_prefix}{LOGGER_PREFIX} unnest node for {concept} with lineage {concept.lineage} has parents + optional {all_parents} and equivalent optional {equivalent_optional}"
35
35
  )
36
+ local_conditions = False
37
+ expected_outputs = [concept] + local_optional
36
38
  if arguments or local_optional:
37
39
  parent = source_concepts(
38
40
  mandatory_list=all_parents,
@@ -47,24 +49,49 @@ def gen_unnest_node(
47
49
  f"{padding(depth)}{LOGGER_PREFIX} could not find unnest node parents"
48
50
  )
49
51
  return None
52
+ elif conditions:
53
+ logger.info(
54
+ f"{padding(depth)}{LOGGER_PREFIX} unnest node has no parents but conditions inputs {conditions.row_arguments} vs expected output {expected_outputs}"
55
+ )
56
+ if all([x.address in expected_outputs for x in conditions.row_arguments]):
57
+ local_conditions = True
58
+ else:
59
+ parent = source_concepts(
60
+ mandatory_list=conditions.conditional.row_arguments,
61
+ environment=environment,
62
+ g=g,
63
+ depth=depth + 1,
64
+ history=history,
65
+ conditions=conditions,
66
+ )
67
+ if not parent:
68
+ logger.info(
69
+ f"{padding(depth)}{LOGGER_PREFIX} could not find unnest node condition inputs with no parents"
70
+ )
71
+ return None
72
+ else:
73
+ parent = None
50
74
 
51
75
  base = UnnestNode(
52
76
  unnest_concepts=[concept] + equivalent_optional,
53
77
  input_concepts=arguments + non_equivalent_optional,
54
78
  output_concepts=[concept] + local_optional,
55
79
  environment=environment,
56
- parents=([parent] if (arguments or local_optional) else []),
80
+ parents=([parent] if parent else []),
57
81
  )
58
82
  # we need to sometimes nest an unnest node,
59
83
  # as unnest operations are not valid in all situations
60
84
  # TODO: inline this node when we can detect it's safe
85
+ conditional = conditions.conditional if conditions else None
61
86
  new = StrategyNode(
62
87
  input_concepts=base.output_concepts,
63
88
  output_concepts=base.output_concepts,
64
89
  environment=environment,
65
90
  parents=[base],
66
- # conditions=conditions.conditional if conditions else None,
67
- preexisting_conditions=conditions.conditional if conditions else None,
91
+ conditions=conditional if local_conditions is True else None,
92
+ preexisting_conditions=(
93
+ conditional if conditional and local_conditions is False else None
94
+ ),
68
95
  )
69
96
  qds = new.resolve()
70
97
  assert qds.source_map[concept.address] == {base.resolve()}
@@ -113,16 +113,18 @@ class GroupNode(StrategyNode):
113
113
  environment.concepts[c] for c in (comp_grain - target_grain).components
114
114
  ]
115
115
  logger.info(
116
- f"{padding}{LOGGER_PREFIX} Group requirement check: {comp_grain}, {target_grain}, difference {difference}"
116
+ f"{padding}{LOGGER_PREFIX} Group requirement check: {comp_grain}, {target_grain}, difference {[x.address for x in difference]}"
117
117
  )
118
118
 
119
119
  # if the difference is all unique properties whose keys are in the source grain
120
120
  # we can also suppress the group
121
121
  if all(
122
122
  [
123
- x.purpose == Purpose.UNIQUE_PROPERTY
124
- and x.keys
125
- and all(z in comp_grain.components for z in x.keys)
123
+ x.keys
124
+ and all(
125
+ environment.concepts[z].address in comp_grain.components
126
+ for z in x.keys
127
+ )
126
128
  for x in difference
127
129
  ]
128
130
  ):
@@ -308,7 +308,13 @@ class MergeNode(StrategyNode):
308
308
  f"{self.logging_prefix}{LOGGER_PREFIX} skipping existence only source with {source.output_concepts} from grain accumulation"
309
309
  )
310
310
  continue
311
+ logger.info(
312
+ f"{self.logging_prefix}{LOGGER_PREFIX} adding source grain {source.grain} from source {source.identifier} to pregrain"
313
+ )
311
314
  pregrain += source.grain
315
+ logger.info(
316
+ f"{self.logging_prefix}{LOGGER_PREFIX} pregrain is now {pregrain}"
317
+ )
312
318
 
313
319
  pregrain = BuildGrain.from_concepts(
314
320
  pregrain.components, environment=self.environment
@@ -334,7 +340,9 @@ class MergeNode(StrategyNode):
334
340
  if isinstance(join, BaseJoin) and join.join_type == JoinType.FULL:
335
341
  full_join_concepts += join.input_concepts
336
342
 
337
- if self.whole_grain:
343
+ if self.force_group is True:
344
+ force_group = True
345
+ elif self.whole_grain:
338
346
  force_group = False
339
347
  elif self.force_group is False:
340
348
  force_group = False
@@ -221,6 +221,7 @@ class SelectNode(StrategyNode):
221
221
 
222
222
 
223
223
  class ConstantNode(SelectNode):
224
+ source_type = SourceType.CONSTANT
224
225
  """Represents a constant value."""
225
226
 
226
227
  def copy(self) -> "ConstantNode":
@@ -36,31 +36,51 @@ def type_check(
36
36
  ) -> bool:
37
37
  if input is None and nullable:
38
38
  return True
39
+
39
40
  target_type = expected_type
40
41
  while isinstance(target_type, TraitDataType):
41
42
  return type_check(input, target_type.data_type, nullable)
43
+
42
44
  if target_type == DataType.STRING:
43
45
  return isinstance(input, str)
44
46
  if target_type == DataType.INTEGER:
45
47
  return isinstance(input, int)
48
+ if target_type == DataType.BIGINT:
49
+ return isinstance(input, int) # or check for larger int if needed
46
50
  if target_type == DataType.FLOAT or isinstance(target_type, NumericType):
47
51
  return (
48
52
  isinstance(input, float)
49
53
  or isinstance(input, int)
50
54
  or isinstance(input, Decimal)
51
55
  )
56
+ if target_type == DataType.NUMBER:
57
+ return isinstance(input, (int, float, Decimal))
58
+ if target_type == DataType.NUMERIC:
59
+ return isinstance(input, (int, float, Decimal))
52
60
  if target_type == DataType.BOOL:
53
61
  return isinstance(input, bool)
54
62
  if target_type == DataType.DATE:
55
- return isinstance(input, date)
63
+ return isinstance(input, date) and not isinstance(input, datetime)
56
64
  if target_type == DataType.DATETIME:
57
65
  return isinstance(input, datetime)
66
+ if target_type == DataType.TIMESTAMP:
67
+ return isinstance(input, datetime) # or timestamp type if you have one
68
+ if target_type == DataType.UNIX_SECONDS:
69
+ return isinstance(input, (int, float)) # Unix timestamps are numeric
70
+ if target_type == DataType.DATE_PART:
71
+ return isinstance(
72
+ input, str
73
+ ) # assuming date parts are strings like "year", "month"
58
74
  if target_type == DataType.ARRAY or isinstance(target_type, ArrayType):
59
75
  return isinstance(input, list)
60
76
  if target_type == DataType.MAP or isinstance(target_type, MapType):
61
77
  return isinstance(input, dict)
62
78
  if target_type == DataType.STRUCT or isinstance(target_type, StructType):
63
79
  return isinstance(input, dict)
80
+ if target_type == DataType.NULL:
81
+ return input is None
82
+ if target_type == DataType.UNKNOWN:
83
+ return True
64
84
  return False
65
85
 
66
86
 
@@ -125,15 +145,19 @@ def validate_datasource(
125
145
  rval = row[actual_address]
126
146
  passed = type_check(rval, col.concept.datatype, col.is_nullable)
127
147
  if not passed:
148
+ value_type = (
149
+ arg_to_datatype(rval) if rval is not None else col.concept.datatype
150
+ )
151
+ traits = None
152
+ if isinstance(col.concept.datatype, TraitDataType):
153
+ traits = col.concept.datatype.traits
154
+ if traits and not isinstance(value_type, TraitDataType):
155
+ value_type = TraitDataType(type=value_type, traits=traits)
128
156
  failures.append(
129
157
  DatasourceColumnBindingData(
130
158
  address=col.concept.address,
131
159
  value=rval,
132
- value_type=(
133
- arg_to_datatype(rval)
134
- if rval is not None
135
- else col.concept.datatype
136
- ),
160
+ value_type=value_type,
137
161
  value_modifiers=[Modifier.NULLABLE] if rval is None else [],
138
162
  actual_type=col.concept.datatype,
139
163
  actual_modifiers=col.concept.modifiers,
trilogy/dialect/base.py CHANGED
@@ -163,7 +163,7 @@ def render_case(args):
163
163
 
164
164
 
165
165
  def struct_arg(args):
166
- return [f"{x[0]}: {x[1]}" for x in zip(args[::2], args[1::2])]
166
+ return [f"{x[1]}: {x[0]}" for x in zip(args[::2], args[1::2])]
167
167
 
168
168
 
169
169
  FUNCTION_MAP = {
trilogy/parsing/render.py CHANGED
@@ -349,7 +349,8 @@ class Renderer:
349
349
  else:
350
350
  output = f"{concept.purpose.value} {namespace}{concept.name} <- {self.to_string(concept.lineage)};"
351
351
  if base_description:
352
- output += f" #{base_description}"
352
+ lines = "\n#".join(base_description.split("\n"))
353
+ output += f" #{lines}"
353
354
  return output
354
355
 
355
356
  @to_string.register
@@ -439,7 +440,7 @@ class Renderer:
439
440
 
440
441
  @to_string.register
441
442
  def _(self, arg: "Conditional"):
442
- return f"({self.to_string(arg.left)} {arg.operator.value} {self.to_string(arg.right)})"
443
+ return f"{self.to_string(arg.left)} {arg.operator.value} {self.to_string(arg.right)}"
443
444
 
444
445
  @to_string.register
445
446
  def _(self, arg: "SubselectComparison"):
@@ -451,7 +452,8 @@ class Renderer:
451
452
 
452
453
  @to_string.register
453
454
  def _(self, arg: "Comment"):
454
- return f"{arg.text}"
455
+ lines = "\n#".join(arg.text.split("\n"))
456
+ return f"{lines}"
455
457
 
456
458
  @to_string.register
457
459
  def _(self, arg: "WindowItem"):
@@ -134,10 +134,12 @@
134
134
  metadata: "metadata" "(" IDENTIFIER "=" string_lit ")"
135
135
 
136
136
  limit: "LIMIT"i /[0-9]+/
137
+
138
+ _order_atom: expr ordering
137
139
 
138
- order_list: expr ordering ("," expr ordering)* ","?
140
+ order_list: _order_atom ("," _order_atom)* ","?
139
141
 
140
- over_list: concept_lit ("," concept_lit )* ","?
142
+ over_list: (concept_lit ",")* concept_lit ","?
141
143
 
142
144
  ORDERING_DIRECTION: /ASC|DESC/i
143
145
 
@@ -433,7 +435,8 @@
433
435
  map_lit: "{" (literal ":" literal ",")* literal ":" literal ","? "}"
434
436
 
435
437
  _STRUCT.1: "struct("i
436
- struct_lit: _STRUCT (IDENTIFIER "->" expr ",")* IDENTIFIER "->" expr ","? ")"
438
+ _BINDING.1: "->"
439
+ struct_lit: _STRUCT expr _BINDING IDENTIFIER ( "," expr _BINDING IDENTIFIER )* ","? ")"
437
440
 
438
441
  !bool_lit: "True"i | "False"i
439
442
 
File without changes