pytrilogy 0.0.2.17__py3-none-any.whl → 0.0.2.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

Files changed (40) hide show
  1. {pytrilogy-0.0.2.17.dist-info → pytrilogy-0.0.2.18.dist-info}/METADATA +12 -8
  2. {pytrilogy-0.0.2.17.dist-info → pytrilogy-0.0.2.18.dist-info}/RECORD +40 -39
  3. trilogy/__init__.py +1 -1
  4. trilogy/constants.py +1 -1
  5. trilogy/core/enums.py +1 -0
  6. trilogy/core/functions.py +11 -0
  7. trilogy/core/models.py +89 -47
  8. trilogy/core/optimization.py +15 -9
  9. trilogy/core/processing/concept_strategies_v3.py +372 -145
  10. trilogy/core/processing/node_generators/basic_node.py +27 -55
  11. trilogy/core/processing/node_generators/common.py +6 -7
  12. trilogy/core/processing/node_generators/filter_node.py +28 -31
  13. trilogy/core/processing/node_generators/group_node.py +14 -2
  14. trilogy/core/processing/node_generators/group_to_node.py +3 -1
  15. trilogy/core/processing/node_generators/multiselect_node.py +3 -0
  16. trilogy/core/processing/node_generators/node_merge_node.py +14 -9
  17. trilogy/core/processing/node_generators/rowset_node.py +12 -12
  18. trilogy/core/processing/node_generators/select_merge_node.py +302 -0
  19. trilogy/core/processing/node_generators/select_node.py +7 -511
  20. trilogy/core/processing/node_generators/unnest_node.py +4 -3
  21. trilogy/core/processing/node_generators/window_node.py +12 -37
  22. trilogy/core/processing/nodes/__init__.py +0 -2
  23. trilogy/core/processing/nodes/base_node.py +69 -20
  24. trilogy/core/processing/nodes/filter_node.py +3 -0
  25. trilogy/core/processing/nodes/group_node.py +18 -17
  26. trilogy/core/processing/nodes/merge_node.py +4 -10
  27. trilogy/core/processing/nodes/select_node_v2.py +28 -14
  28. trilogy/core/processing/nodes/window_node.py +1 -2
  29. trilogy/core/processing/utility.py +51 -3
  30. trilogy/core/query_processor.py +17 -73
  31. trilogy/dialect/base.py +7 -3
  32. trilogy/dialect/duckdb.py +4 -1
  33. trilogy/dialect/sql_server.py +3 -3
  34. trilogy/hooks/query_debugger.py +5 -3
  35. trilogy/parsing/parse_engine.py +66 -38
  36. trilogy/parsing/trilogy.lark +2 -1
  37. {pytrilogy-0.0.2.17.dist-info → pytrilogy-0.0.2.18.dist-info}/LICENSE.md +0 -0
  38. {pytrilogy-0.0.2.17.dist-info → pytrilogy-0.0.2.18.dist-info}/WHEEL +0 -0
  39. {pytrilogy-0.0.2.17.dist-info → pytrilogy-0.0.2.18.dist-info}/entry_points.txt +0 -0
  40. {pytrilogy-0.0.2.17.dist-info → pytrilogy-0.0.2.18.dist-info}/top_level.txt +0 -0
@@ -1,424 +1,31 @@
1
- from typing import List, Optional, Callable
2
-
3
1
  from trilogy.core.enums import PurposeLineage
4
2
  from trilogy.core.models import (
5
3
  Concept,
6
4
  Environment,
7
- Grain,
8
5
  LooseConceptList,
9
- Datasource,
10
6
  WhereClause,
11
7
  )
12
8
  from trilogy.core.processing.nodes import (
13
9
  StrategyNode,
14
- SelectNode,
15
- MergeNode,
16
- GroupNode,
17
- ConstantNode,
18
10
  )
19
11
  from trilogy.core.exceptions import NoDatasourceException
20
- import networkx as nx
21
- from trilogy.core.graph_models import concept_to_node, datasource_to_node
22
12
  from trilogy.constants import logger
23
13
  from trilogy.core.processing.utility import padding
24
- from dataclasses import dataclass
14
+ from trilogy.core.processing.node_generators.select_merge_node import (
15
+ gen_select_merge_node,
16
+ )
25
17
 
26
18
  LOGGER_PREFIX = "[GEN_SELECT_NODE]"
27
19
 
28
20
 
29
- @dataclass
30
- class DatasourceMatch:
31
- key: str
32
- datasource: Datasource
33
- matched: LooseConceptList
34
- partial: LooseConceptList
35
- nullable: LooseConceptList
36
-
37
- def __repr__(self):
38
- return f"DatasourceMatch({self.key}, {self.datasource.identifier}, {str(self.matched)}, {str(self.partial)})"
39
-
40
-
41
- def dm_to_strategy_node(
42
- dm: DatasourceMatch,
43
- target_grain: Grain,
44
- environment: Environment,
45
- g: nx.DiGraph,
46
- depth: int,
47
- source_concepts: Callable,
48
- accept_partial: bool = False,
49
- ) -> StrategyNode:
50
- datasource = dm.datasource
51
-
52
- if target_grain and target_grain.issubset(datasource.grain):
53
- if all([x in dm.matched for x in target_grain.components]):
54
- force_group = False
55
- # if we are not returning the grain
56
- # we have to group
57
- else:
58
- logger.info(
59
- f"{padding(depth)}{LOGGER_PREFIX} not all grain components {target_grain} are in output {str(dm.matched)}, group to actual grain"
60
- )
61
- force_group = True
62
- elif all([x in dm.matched for x in datasource.grain.components]):
63
- logger.info(
64
- f"{padding(depth)}{LOGGER_PREFIX} query output includes all grain components, no reason to group further"
65
- )
66
- force_group = False
67
- else:
68
- logger.info(
69
- f"{padding(depth)}{LOGGER_PREFIX} target grain is not subset of datasource grain {datasource.grain}, required to group"
70
- )
71
- force_group = True
72
- bcandidate: StrategyNode = SelectNode(
73
- input_concepts=[c.concept for c in datasource.columns],
74
- output_concepts=dm.matched.concepts,
75
- environment=environment,
76
- g=g,
77
- parents=[],
78
- depth=depth,
79
- partial_concepts=dm.partial.concepts,
80
- nullable_concepts=dm.nullable.concepts,
81
- accept_partial=accept_partial,
82
- datasource=datasource,
83
- grain=datasource.grain,
84
- conditions=datasource.where.conditional if datasource.where else None,
85
- )
86
- # we need to nest the group node one further
87
- if force_group is True:
88
- candidate: StrategyNode = GroupNode(
89
- output_concepts=dm.matched.concepts,
90
- input_concepts=dm.matched.concepts,
91
- environment=environment,
92
- g=g,
93
- parents=[bcandidate],
94
- depth=depth,
95
- partial_concepts=bcandidate.partial_concepts,
96
- )
97
- else:
98
- candidate = bcandidate
99
- return candidate
100
-
101
-
102
- def gen_select_nodes_from_tables_v2(
103
- mandatory_concept: Concept,
104
- all_concepts: List[Concept],
105
- g: nx.DiGraph,
106
- environment: Environment,
107
- depth: int,
108
- target_grain: Grain,
109
- source_concepts: Callable,
110
- accept_partial: bool = False,
111
- conditions: WhereClause | None = None,
112
- ) -> tuple[bool, list[Concept], list[StrategyNode]]:
113
- # if we have only constants
114
- # we don't need a table
115
- # so verify nothing, select node will render
116
- all_lcl = LooseConceptList(concepts=all_concepts)
117
- if all([c.derivation == PurposeLineage.CONSTANT for c in all_lcl]):
118
- logger.info(
119
- f"{padding(depth)}{LOGGER_PREFIX} All concepts {[x.address for x in all_lcl]} are constants, returning constant node"
120
- )
121
- return (
122
- True,
123
- all_lcl.concepts,
124
- [
125
- ConstantNode(
126
- output_concepts=all_lcl.concepts,
127
- input_concepts=[],
128
- environment=environment,
129
- g=g,
130
- parents=[],
131
- depth=depth,
132
- # no partial for constants
133
- partial_concepts=[],
134
- force_group=False,
135
- )
136
- ],
137
- )
138
- # otherwise, we need to look for a table
139
- nodes_to_find = [concept_to_node(x.with_default_grain()) for x in all_lcl.concepts]
140
- matches: dict[str, DatasourceMatch] = {}
141
- for k, datasource in environment.datasources.items():
142
- matched = []
143
- matched_paths = []
144
- for idx, req_concept in enumerate(nodes_to_find):
145
- try:
146
- path = nx.shortest_path(
147
- g,
148
- source=datasource_to_node(datasource),
149
- target=req_concept,
150
- )
151
- ds_valid = (
152
- sum(
153
- [
154
- 1 if g.nodes[node]["type"] == "datasource" else 0
155
- for node in path
156
- ]
157
- )
158
- == 1
159
- )
160
- address_valid = (
161
- sum(
162
- [
163
- (
164
- 1
165
- if g.nodes[node]["type"] == "concept"
166
- and g.nodes[node]["concept"].address
167
- != all_lcl.concepts[idx].address
168
- else 0
169
- )
170
- for node in path
171
- ]
172
- )
173
- == 0
174
- )
175
- if ds_valid and address_valid:
176
- matched_paths.append(path)
177
- matched.append(all_lcl.concepts[idx])
178
-
179
- except nx.NodeNotFound:
180
- continue
181
- except nx.exception.NetworkXNoPath:
182
- continue
183
- dm = DatasourceMatch(
184
- key=k,
185
- datasource=datasource,
186
- matched=LooseConceptList(concepts=matched),
187
- partial=LooseConceptList(
188
- concepts=[
189
- c.concept
190
- for c in datasource.columns
191
- if not c.is_complete and c.concept.address in all_lcl
192
- ]
193
- ),
194
- nullable=LooseConceptList(
195
- concepts=[
196
- c.concept
197
- for c in datasource.columns
198
- if c.is_nullable and c.concept in all_lcl
199
- ]
200
- ),
201
- )
202
- if not matched:
203
- continue
204
- if mandatory_concept.address not in dm.matched:
205
- continue
206
- if not accept_partial and dm.partial.addresses:
207
- continue
208
- matches[k] = dm
209
- found: set[str] = set()
210
- all_found = False
211
- all_checked = False
212
- to_return: list[StrategyNode] = []
213
- if not matches:
214
- return False, [], []
215
- while not all_found and not all_checked:
216
- final_key: str = max(
217
- matches,
218
- key=lambda x: len(
219
- [m for m in matches[x].matched.addresses if m not in found]
220
- )
221
- - 0.1 * len(matches[x].partial.addresses),
222
- )
223
- final: DatasourceMatch = matches[final_key]
224
-
225
- candidate = dm_to_strategy_node(
226
- final,
227
- target_grain=Grain(
228
- components=[
229
- x for x in target_grain.components if x.address in final.matched
230
- ]
231
- ),
232
- environment=environment,
233
- g=g,
234
- depth=depth,
235
- accept_partial=accept_partial,
236
- source_concepts=source_concepts,
237
- )
238
- to_return.append(candidate)
239
- del matches[final_key]
240
- found = found.union(final.matched.addresses)
241
- all_found = all_lcl.addresses.issubset(found)
242
- all_checked = len(matches) == 0
243
- return all_found, [x for x in all_concepts if x.address in found], to_return
244
-
245
-
246
- def gen_select_node_from_table(
247
- target_concept: Concept,
248
- all_concepts: List[Concept],
249
- g: nx.DiGraph,
250
- environment: Environment,
251
- depth: int,
252
- target_grain: Grain,
253
- source_concepts,
254
- accept_partial: bool = False,
255
- conditions: WhereClause | None = None,
256
- ) -> Optional[StrategyNode]:
257
- # if we have only constants
258
- # we don't need a table
259
- # so verify nothing, select node will render
260
- all_lcl = LooseConceptList(concepts=all_concepts)
261
- if all([c.derivation == PurposeLineage.CONSTANT for c in all_concepts]):
262
- logger.info(
263
- f"{padding(depth)}{LOGGER_PREFIX} All concepts {[x.address for x in all_concepts]} are constants, returning constant node"
264
- )
265
- return ConstantNode(
266
- output_concepts=all_concepts,
267
- input_concepts=[],
268
- environment=environment,
269
- g=g,
270
- parents=[],
271
- depth=depth,
272
- # no partial for constants
273
- partial_concepts=[],
274
- force_group=False,
275
- )
276
- candidates: dict[str, StrategyNode] = {}
277
- scores: dict[str, int] = {}
278
- # otherwise, we need to look for a table
279
- nodes_to_find = [concept_to_node(x.with_default_grain()) for x in all_concepts]
280
- for datasource in environment.datasources.values():
281
- all_found = True
282
- for idx, req_concept in enumerate(nodes_to_find):
283
- try:
284
- path = nx.shortest_path(
285
- g,
286
- source=datasource_to_node(datasource),
287
- target=req_concept,
288
- )
289
- except nx.NodeNotFound as e:
290
- # just to provide better error
291
- ncandidates = [
292
- datasource_to_node(datasource),
293
- req_concept,
294
- ]
295
- for ncandidate in ncandidates:
296
- try:
297
- g.nodes[ncandidate]
298
- except KeyError:
299
- raise nx.exception.NetworkXNoPath
300
- raise e
301
- except nx.exception.NetworkXNoPath:
302
- all_found = False
303
- break
304
- # 2023-10-18 - more strict condition then below
305
- # 2023-10-20 - we need this to get through abstract concepts
306
- # but we may want to add a filter to avoid using this for anything with lineage
307
- # if len(path) != 2:
308
- # all_found = False
309
- # break
310
- if len([p for p in path if g.nodes[p]["type"] == "datasource"]) != 1:
311
- all_found = False
312
- break
313
- for node in path:
314
- if g.nodes[node]["type"] == "datasource":
315
- continue
316
- if g.nodes[node]["concept"].address == all_concepts[idx].address:
317
- continue
318
- all_found = False
319
- break
320
-
321
- if not all_found:
322
- # skip to next node
323
- continue
324
- partial_concepts = [
325
- c.concept
326
- for c in datasource.columns
327
- if not c.is_complete and c.concept in all_lcl
328
- ]
329
- partial_lcl = LooseConceptList(concepts=partial_concepts)
330
- nullable_concepts = [
331
- c.concept
332
- for c in datasource.columns
333
- if c.is_nullable and c.concept in all_lcl
334
- ]
335
- nullable_lcl = LooseConceptList(concepts=nullable_concepts)
336
- if not accept_partial and target_concept in partial_lcl:
337
- continue
338
- logger.info(
339
- f"{padding(depth)}{LOGGER_PREFIX} target grain is {str(target_grain)}"
340
- )
341
- if target_grain and target_grain.issubset(datasource.grain):
342
-
343
- if (
344
- all([x in all_lcl for x in target_grain.components])
345
- and target_grain == datasource.grain
346
- ):
347
- logger.info(
348
- f"{padding(depth)}{LOGGER_PREFIX} target grain components match all lcl, group to false"
349
- )
350
- force_group = False
351
- # if we are not returning the grain
352
- # we have to group
353
- else:
354
- logger.info(
355
- f"{padding(depth)}{LOGGER_PREFIX} not all grain components {target_grain} are in output {str(all_lcl)}, group to actual grain"
356
- )
357
- force_group = True
358
- elif all([x in all_lcl for x in datasource.grain.components]):
359
- logger.info(
360
- f"{padding(depth)}{LOGGER_PREFIX} query output includes all grain components, no reason to group further"
361
- )
362
- force_group = False
363
- else:
364
- logger.info(
365
- f"{padding(depth)}{LOGGER_PREFIX} target grain is not subset of datasource grain {datasource.grain}, required to group"
366
- )
367
- force_group = True
368
-
369
- bcandidate: StrategyNode = SelectNode(
370
- input_concepts=[c.concept for c in datasource.columns],
371
- output_concepts=all_concepts,
372
- environment=environment,
373
- g=g,
374
- parents=[],
375
- depth=depth,
376
- partial_concepts=[c for c in all_concepts if c in partial_lcl],
377
- nullable_concepts=[c for c in all_concepts if c in nullable_lcl],
378
- accept_partial=accept_partial,
379
- datasource=datasource,
380
- grain=Grain(components=all_concepts),
381
- conditions=datasource.where.conditional if datasource.where else None,
382
- )
383
- # if conditions:
384
- # for component in conditions.components:
385
- # if
386
- # we need to nest the group node one further
387
- if force_group is True:
388
- candidate: StrategyNode = GroupNode(
389
- output_concepts=all_concepts,
390
- input_concepts=all_concepts,
391
- environment=environment,
392
- g=g,
393
- parents=[bcandidate],
394
- depth=depth,
395
- partial_concepts=bcandidate.partial_concepts,
396
- nullable_concepts=bcandidate.nullable_concepts,
397
- )
398
- else:
399
- candidate = bcandidate
400
- logger.info(
401
- f"{padding(depth)}{LOGGER_PREFIX} found select node with {datasource.identifier}, force group is {force_group}, returning {candidate.output_lcl}"
402
- )
403
- candidates[datasource.identifier] = candidate
404
- scores[datasource.identifier] = -len(partial_concepts)
405
- if not candidates:
406
- return None
407
- final = max(candidates, key=lambda x: scores[x])
408
- return candidates[final]
409
-
410
-
411
21
  def gen_select_node(
412
22
  concept: Concept,
413
- local_optional: List[Concept],
23
+ local_optional: list[Concept],
414
24
  environment: Environment,
415
25
  g,
416
26
  depth: int,
417
- source_concepts,
418
27
  accept_partial: bool = False,
419
28
  fail_if_not_found: bool = True,
420
- accept_partial_optional: bool = True,
421
- target_grain: Grain | None = None,
422
29
  conditions: WhereClause | None = None,
423
30
  ) -> StrategyNode | None:
424
31
  all_concepts = [concept] + local_optional
@@ -427,135 +34,24 @@ def gen_select_node(
427
34
  concepts=[
428
35
  x
429
36
  for x in all_concepts
430
- if x.address in [z.address for z in environment.materialized_concepts]
37
+ if x.address in [z for z in environment.materialized_concepts]
431
38
  or x.derivation == PurposeLineage.CONSTANT
432
39
  ]
433
40
  )
434
- if not target_grain:
435
- target_grain = Grain()
436
- for ac in all_concepts:
437
- target_grain += ac.grain
438
- if target_grain.abstract:
439
- target_grain = Grain(components=all_concepts)
440
41
  if materialized_lcl != all_lcl:
441
42
  logger.info(
442
43
  f"{padding(depth)}{LOGGER_PREFIX} Skipping select node generation for {concept.address}"
443
- f" as it + optional (looking for all {all_lcl}) includes non-materialized concepts"
44
+ f" as it + optional includes non-materialized concepts (looking for all {all_lcl}) "
444
45
  )
445
46
  if fail_if_not_found:
446
47
  raise NoDatasourceException(f"No datasource exists for {concept}")
447
48
  return None
448
49
 
449
- ds: StrategyNode | None = None
450
-
451
- # attempt to select all concepts from table
452
- ds = gen_select_node_from_table(
453
- concept,
50
+ return gen_select_merge_node(
454
51
  [concept] + local_optional,
455
52
  g=g,
456
53
  environment=environment,
457
54
  depth=depth,
458
55
  accept_partial=accept_partial,
459
- target_grain=target_grain,
460
- source_concepts=source_concepts,
461
56
  conditions=conditions,
462
57
  )
463
- if ds:
464
- logger.info(
465
- f"{padding(depth)}{LOGGER_PREFIX} Found select node with all target concepts, force group is {ds.force_group}, target grain {target_grain}"
466
- )
467
- return ds
468
- # if we cannot find a match
469
- all_found, found, parents = gen_select_nodes_from_tables_v2(
470
- concept,
471
- all_concepts,
472
- g,
473
- environment,
474
- depth=depth,
475
- target_grain=target_grain,
476
- accept_partial=accept_partial,
477
- source_concepts=source_concepts,
478
- conditions=conditions,
479
- )
480
- if parents and (all_found or accept_partial_optional):
481
- all_partial = [
482
- c
483
- for c in all_concepts
484
- if all(
485
- [c.address in [x.address for x in p.partial_concepts] for p in parents]
486
- )
487
- ]
488
-
489
- all_nullable = [
490
- c
491
- for c in all_concepts
492
- if any(
493
- [c.address in [x.address for x in p.nullable_concepts] for p in parents]
494
- )
495
- ]
496
-
497
- if all_found:
498
- logger.info(
499
- f"{padding(depth)}{LOGGER_PREFIX} found all optional {[c.address for c in local_optional]} via joins"
500
- )
501
- else:
502
- logger.info(
503
- f"{padding(depth)}{LOGGER_PREFIX} found some optional {[x.address for x in found]}, and partial return allowed: returning"
504
- )
505
-
506
- force_group = None
507
- inferred_grain = sum([x.grain for x in parents if x.grain], Grain())
508
- for candidate in parents:
509
- if candidate.grain and not candidate.grain.issubset(target_grain):
510
- force_group = True
511
- if len(parents) == 1:
512
- candidate = parents[0]
513
- else:
514
- candidate = MergeNode(
515
- output_concepts=[concept] + found,
516
- input_concepts=[concept] + found,
517
- environment=environment,
518
- g=g,
519
- parents=parents,
520
- depth=depth,
521
- partial_concepts=all_partial,
522
- nullable_concepts=all_nullable,
523
- grain=inferred_grain,
524
- )
525
-
526
- candidate.depth += 1
527
- if force_group:
528
- logger.info(
529
- f"{padding(depth)}{LOGGER_PREFIX} datasource grain {inferred_grain} does not match target grain {target_grain} for select, adding group node"
530
- )
531
- return GroupNode(
532
- output_concepts=candidate.output_concepts,
533
- input_concepts=candidate.output_concepts,
534
- environment=environment,
535
- g=g,
536
- parents=[candidate],
537
- depth=depth,
538
- partial_concepts=candidate.partial_concepts,
539
- )
540
- else:
541
- logger.info(
542
- f"{padding(depth)}{LOGGER_PREFIX} datasource grain {inferred_grain} matches target grain {target_grain} for select, returning without group"
543
- )
544
- return candidate
545
-
546
- if not accept_partial_optional:
547
- return None
548
- ds = gen_select_node_from_table(
549
- concept,
550
- [concept],
551
- g=g,
552
- environment=environment,
553
- depth=depth,
554
- accept_partial=accept_partial,
555
- target_grain=Grain(components=[concept]),
556
- source_concepts=source_concepts,
557
- )
558
-
559
- if not ds and fail_if_not_found:
560
- raise NoDatasourceException(f"No datasource exists for {concept}")
561
- return ds
@@ -6,7 +6,7 @@ from trilogy.core.processing.nodes import UnnestNode, History, StrategyNode
6
6
  from trilogy.core.processing.utility import padding
7
7
  from trilogy.constants import logger
8
8
 
9
- LOGGER_PREFIX = "[GEN_ROWSET_NODE]"
9
+ LOGGER_PREFIX = "[GEN_UNNEST_NODE]"
10
10
 
11
11
 
12
12
  def gen_unnest_node(
@@ -54,11 +54,12 @@ def gen_unnest_node(
54
54
  # as unnest operations are not valid in all situations
55
55
  # TODO: inline this node when we can detect it's safe
56
56
  new = StrategyNode(
57
- input_concepts=[concept] + local_optional,
58
- output_concepts=[concept] + local_optional,
57
+ input_concepts=base.output_concepts,
58
+ output_concepts=base.output_concepts,
59
59
  environment=environment,
60
60
  g=g,
61
61
  parents=[base],
62
+ preexisting_conditions=conditions.conditional if conditions else None,
62
63
  )
63
64
  qds = new.resolve()
64
65
  assert qds.source_map[concept.address] == {base.resolve()}
@@ -3,17 +3,9 @@ from typing import List
3
3
 
4
4
  from trilogy.core.models import Concept, WindowItem, Environment, WhereClause
5
5
  from trilogy.utility import unique
6
- from trilogy.core.processing.nodes import (
7
- WindowNode,
8
- )
9
- from trilogy.core.processing.nodes import MergeNode, History
10
-
6
+ from trilogy.core.processing.nodes import WindowNode, StrategyNode, History
11
7
  from trilogy.constants import logger
12
- from trilogy.core.processing.utility import padding, create_log_lambda
13
- from trilogy.core.processing.node_generators.common import (
14
- gen_enrichment_node,
15
- )
16
- from trilogy.core.processing.utility import concept_to_relevant_joins
8
+ from trilogy.core.processing.utility import padding
17
9
 
18
10
  LOGGER_PREFIX = "[GEN_WINDOW_NODE]"
19
11
 
@@ -39,10 +31,10 @@ def gen_window_node(
39
31
  source_concepts,
40
32
  history: History | None = None,
41
33
  conditions: WhereClause | None = None,
42
- ) -> WindowNode | MergeNode | None:
34
+ ) -> StrategyNode | None:
43
35
  parent_concepts = resolve_window_parent_concepts(concept)
44
36
  parent_node = source_concepts(
45
- mandatory_list=parent_concepts,
37
+ mandatory_list=parent_concepts + local_optional,
46
38
  environment=environment,
47
39
  g=g,
48
40
  depth=depth + 1,
@@ -69,8 +61,8 @@ def gen_window_node(
69
61
  )
70
62
  raise SyntaxError
71
63
  _window_node = WindowNode(
72
- input_concepts=parent_concepts,
73
- output_concepts=[concept] + parent_concepts,
64
+ input_concepts=parent_concepts + local_optional,
65
+ output_concepts=[concept] + parent_concepts + local_optional,
74
66
  environment=environment,
75
67
  g=g,
76
68
  parents=[
@@ -80,29 +72,12 @@ def gen_window_node(
80
72
  )
81
73
  _window_node.rebuild_cache()
82
74
  _window_node.resolve()
83
- window_node = MergeNode(
84
- parents=[_window_node],
85
- environment=environment,
86
- g=g,
87
- input_concepts=[concept] + _window_node.input_concepts,
88
- output_concepts=_window_node.output_concepts,
89
- grain=_window_node.grain,
90
- force_group=False,
91
- depth=depth,
92
- )
93
- window_node.resolve()
94
- if not local_optional:
95
- return window_node
96
- logger.info(f"{padding(depth)}{LOGGER_PREFIX} window node requires enrichment")
97
- return gen_enrichment_node(
98
- window_node,
99
- join_keys=concept_to_relevant_joins(parent_concepts),
100
- local_optional=local_optional,
75
+ window_node = StrategyNode(
76
+ input_concepts=[concept] + local_optional,
77
+ output_concepts=[concept] + local_optional,
101
78
  environment=environment,
102
79
  g=g,
103
- depth=depth,
104
- source_concepts=source_concepts,
105
- log_lambda=create_log_lambda(LOGGER_PREFIX, depth, logger),
106
- history=history,
107
- conditions=conditions,
80
+ parents=[_window_node],
81
+ preexisting_conditions=conditions.conditional if conditions else None,
108
82
  )
83
+ return window_node
@@ -144,8 +144,6 @@ class History(BaseModel):
144
144
  depth + 1,
145
145
  fail_if_not_found=fail_if_not_found,
146
146
  accept_partial=accept_partial,
147
- accept_partial_optional=accept_partial_optional,
148
- source_concepts=source_concepts,
149
147
  conditions=conditions,
150
148
  )
151
149
  self.select_history[fingerprint] = gen