relationalai 0.11.3__py3-none-any.whl → 0.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. relationalai/clients/config.py +7 -0
  2. relationalai/clients/direct_access_client.py +113 -0
  3. relationalai/clients/snowflake.py +41 -107
  4. relationalai/clients/use_index_poller.py +349 -188
  5. relationalai/early_access/dsl/bindings/csv.py +2 -2
  6. relationalai/early_access/metamodel/rewrite/__init__.py +5 -3
  7. relationalai/early_access/rel/rewrite/__init__.py +1 -1
  8. relationalai/errors.py +24 -3
  9. relationalai/semantics/internal/annotations.py +1 -0
  10. relationalai/semantics/internal/internal.py +22 -4
  11. relationalai/semantics/lqp/builtins.py +1 -0
  12. relationalai/semantics/lqp/executor.py +61 -12
  13. relationalai/semantics/lqp/intrinsics.py +23 -0
  14. relationalai/semantics/lqp/model2lqp.py +13 -4
  15. relationalai/semantics/lqp/passes.py +4 -6
  16. relationalai/semantics/lqp/primitives.py +12 -1
  17. relationalai/semantics/{rel → lqp}/rewrite/__init__.py +6 -0
  18. relationalai/semantics/lqp/rewrite/extract_common.py +362 -0
  19. relationalai/semantics/metamodel/builtins.py +20 -2
  20. relationalai/semantics/metamodel/factory.py +3 -2
  21. relationalai/semantics/metamodel/rewrite/__init__.py +3 -9
  22. relationalai/semantics/reasoners/graph/core.py +273 -71
  23. relationalai/semantics/reasoners/optimization/solvers_dev.py +20 -1
  24. relationalai/semantics/reasoners/optimization/solvers_pb.py +24 -3
  25. relationalai/semantics/rel/builtins.py +5 -1
  26. relationalai/semantics/rel/compiler.py +7 -19
  27. relationalai/semantics/rel/executor.py +2 -2
  28. relationalai/semantics/rel/rel.py +6 -0
  29. relationalai/semantics/rel/rel_utils.py +8 -1
  30. relationalai/semantics/sql/compiler.py +122 -42
  31. relationalai/semantics/sql/executor/duck_db.py +28 -3
  32. relationalai/semantics/sql/rewrite/denormalize.py +4 -6
  33. relationalai/semantics/sql/rewrite/recursive_union.py +23 -3
  34. relationalai/semantics/sql/sql.py +27 -0
  35. relationalai/semantics/std/__init__.py +2 -1
  36. relationalai/semantics/std/datetime.py +4 -0
  37. relationalai/semantics/std/re.py +83 -0
  38. relationalai/semantics/std/strings.py +1 -1
  39. relationalai/tools/cli.py +11 -4
  40. relationalai/tools/cli_controls.py +445 -60
  41. relationalai/util/format.py +78 -1
  42. {relationalai-0.11.3.dist-info → relationalai-0.12.0.dist-info}/METADATA +7 -5
  43. {relationalai-0.11.3.dist-info → relationalai-0.12.0.dist-info}/RECORD +51 -50
  44. relationalai/semantics/metamodel/rewrite/gc_nodes.py +0 -58
  45. relationalai/semantics/metamodel/rewrite/list_types.py +0 -109
  46. relationalai/semantics/rel/rewrite/extract_common.py +0 -451
  47. /relationalai/semantics/{rel → lqp}/rewrite/cdc.py +0 -0
  48. /relationalai/semantics/{metamodel → lqp}/rewrite/extract_keys.py +0 -0
  49. /relationalai/semantics/{metamodel → lqp}/rewrite/fd_constraints.py +0 -0
  50. /relationalai/semantics/{rel → lqp}/rewrite/quantify_vars.py +0 -0
  51. /relationalai/semantics/{metamodel → lqp}/rewrite/splinter.py +0 -0
  52. {relationalai-0.11.3.dist-info → relationalai-0.12.0.dist-info}/WHEEL +0 -0
  53. {relationalai-0.11.3.dist-info → relationalai-0.12.0.dist-info}/entry_points.txt +0 -0
  54. {relationalai-0.11.3.dist-info → relationalai-0.12.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,362 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from typing import Optional
5
+ from relationalai.semantics.metamodel.rewrite import flatten
6
+ from relationalai.semantics.metamodel import ir, factory as f, helpers
7
+ from relationalai.semantics.metamodel.compiler import Pass, group_tasks
8
+ from relationalai.semantics.metamodel.util import OrderedSet, ordered_set
9
+ from relationalai.semantics.metamodel import dependency
10
+ from relationalai.semantics.metamodel import builtins
11
+
12
+ class ExtractCommon(Pass):
13
+ """
14
+ Pass to analyze Logical bodies and extract lookups in their own Logical if it makes
15
+ sense. The heuristic is that, if there are multiple lookups, and there are also multiple
16
+ sibling nested logicals that will eventually be extracted by Flatten, then it makes
17
+ sense to extract these logicals into their own "rule", and then make the original body
18
+ just lookup this common rule.
19
+
20
+ From:
21
+ Logical
22
+ Logical
23
+ lookup1
24
+ lookup2
25
+ Logical1 ...
26
+ Logical2 ...
27
+ To:
28
+ Logical
29
+ Logical
30
+ lookup1
31
+ lookup2
32
+ derive common
33
+ Logical
34
+ lookup common
35
+ Logical1 ...
36
+ Logical2 ...
37
+ """
38
+
39
+ # The extraction plan heuristic is as follows:
40
+ #
41
+ # Given a set of binder tasks B and a set of extractable tasks E, we find:
42
+ # - A subset of common tasks C in B, and
43
+ # - A subset of exposed variables V output from tasks in C
44
+ # where:
45
+ # - The intersection of common dependencies of all tasks in E are contained in C
46
+ # (including transitive dependencies)
47
+ # - The union of input variables for all tasks in E intersected with the output
48
+ # variables of tasks in C are contained in V
49
+
50
+ #--------------------------------------------------
51
+ # Public API
52
+ #--------------------------------------------------
53
+ def rewrite(self, model: ir.Model, options:dict={}) -> ir.Model:
54
+ # create the context
55
+ ctx = ExtractCommon.Context(model)
56
+
57
+ # rewrite the root
58
+ replacement = self.handle(model.root, ctx)
59
+
60
+ # the new root contains the extracted top level logicals and the rewritten root
61
+ if ctx.rewrite_ctx.top_level:
62
+ new_root = ir.Logical(model.root.engine, tuple(), tuple(ctx.rewrite_ctx.top_level + [replacement]))
63
+ else:
64
+ new_root = replacement
65
+
66
+ # create the new model, updating relations and root
67
+ return ir.Model(
68
+ model.engines,
69
+ OrderedSet.from_iterable(model.relations).update(ctx.rewrite_ctx.relations).frozen(),
70
+ model.types,
71
+ new_root
72
+ )
73
+
74
+ #--------------------------------------------------
75
+ # IR handlers
76
+ #--------------------------------------------------
77
+
78
+ class Context():
79
+ def __init__(self, model: ir.Model):
80
+ self.rewrite_ctx = helpers.RewriteContext()
81
+ self.info = dependency.analyze(model.root)
82
+
83
+ def handle(self, task: ir.Task, ctx: Context):
84
+ # currently we only extract if it's a sequence of Logicals, but we could in the
85
+ # future support other intermediate nodes
86
+ if isinstance(task, ir.Logical):
87
+ return self.handle_logical(task, ctx)
88
+ else:
89
+ return task
90
+
91
+ def handle_logical(self, task: ir.Logical, ctx: Context):
92
+ # Process the original body to find binders and extractables. The Flatten pass later
93
+ # will extract out both composites and effects, so we group them together here
94
+ groups = group_tasks(task.body, {
95
+ "binders": helpers.BINDERS,
96
+ "composites_and_effects": helpers.COMPOSITES + helpers.EFFECTS,
97
+ })
98
+ binders = groups["binders"]
99
+ composites_and_effects = groups["composites_and_effects"]
100
+
101
+ # the new body of the rewritten task
102
+ body:OrderedSet[ir.Task] = ordered_set()
103
+
104
+ # quick check to see if it's worth doing more analysis; we only want to extract
105
+ # common binders if there are multiple, and there are also multiple composites
106
+ # that will be extracted by the flatten pass later (so that they can share the
107
+ # extracted logic).
108
+ plan = None
109
+ if len(binders) > 1 and composites_and_effects:
110
+ extractables = flatten.extractables(composites_and_effects)
111
+ # only makes sense to extract common if at least one nested composite will be
112
+ # extracted during Flatten
113
+ if extractables:
114
+ # make a plan to extract common tasks from the logical
115
+ plan = self._create_extraction_plan(binders, composites_and_effects, extractables, ctx)
116
+ if plan and len(composites_and_effects) > 1:
117
+ # plan is worthwhile and there are multiple composites, extract the common body and add the connection to the body
118
+ exposed_vars = plan.exposed_vars.get_list()
119
+ plan.common_reference = f.lookup(helpers.extract(task, plan.common_body, exposed_vars, ctx.rewrite_ctx, "common"), exposed_vars)
120
+ # if we are not distributing the reference, add to the main body
121
+ if not plan.distribute_common_reference:
122
+ body.add(plan.common_reference)
123
+
124
+ # if we have a plan and will distribute the common reference, keep track of
125
+ # variables still needed by the remaining tasks, as they need to be hoisted by
126
+ # the remaining composites that get the common reference
127
+ remaining_vars = None
128
+ if plan and plan.distribute_common_reference:
129
+ # add variables hoisted by this logical that are in the exposed vars, to
130
+ # make sure they are hoisted all the way through
131
+ remaining_vars = OrderedSet.from_iterable(helpers.hoisted_vars(task.hoisted)) & plan.exposed_vars
132
+ for child in task.body:
133
+ if child in groups["other"] or child not in plan.remaining_body or child in composites_and_effects:
134
+ continue
135
+ remaining_vars.update(ctx.info.task_inputs(child))
136
+ remaining_vars.update(ctx.info.task_outputs(child))
137
+ remaining_vars = remaining_vars & plan.exposed_vars
138
+
139
+ # if the plan was not used in one of the cases above, ignore it completely, we
140
+ # are not extracting common nor distributing it around
141
+ if plan and not plan.distribute_common_reference and not len(composites_and_effects) > 1:
142
+ plan = None
143
+
144
+ # recursively handle children
145
+ for child in task.body:
146
+ # skip children that were extracted
147
+ if plan and child not in groups["other"] and child not in plan.remaining_body and child not in composites_and_effects:
148
+ continue
149
+
150
+ # no plan or child is not a composite, so just add the handled to the body
151
+ if not plan or child not in composites_and_effects:
152
+ body.add(self.handle(child, ctx))
153
+ continue
154
+
155
+ # there is a plan and the child is in composites, so...
156
+ replacement = self.handle(child, ctx)
157
+
158
+ # this child needs either extra local dependencies or the common reference
159
+ if child in plan.local_dependencies or plan.distribute_common_reference:
160
+ # the new body will have maybe the common reference and the local deps
161
+ replacement_body = ordered_set()
162
+
163
+ hoisted = OrderedSet()
164
+ if isinstance(replacement, ir.Logical):
165
+ # if replacement is a logical, just keep the same hoisted vars
166
+ hoisted.update(replacement.hoisted)
167
+ else:
168
+ # otherwise, we need to hoist the vars that are output from local deps
169
+ # and input to the replacement task
170
+ dep_outputs = OrderedSet()
171
+ for d in plan.local_dependencies.get(child, ordered_set()):
172
+ dep_outputs.update(ctx.info.task_outputs(d))
173
+ hoisted.update(dep_outputs & ctx.info.task_inputs(replacement))
174
+
175
+ if plan.distribute_common_reference:
176
+ if len(composites_and_effects) == 1:
177
+ # if there's a single composite, just insert the whole common body into it
178
+ replacement_body.update(plan.common_body)
179
+ else:
180
+ # otherwise insert a clone of the reference on the extracted rule
181
+ assert(plan.common_reference)
182
+ replacement_body.add(plan.common_reference.clone())
183
+ # add remaining vars to hoisted, making sure there's no duplicates (due to VarOrDefault)
184
+ hoisted_vars = helpers.hoisted_vars(hoisted)
185
+ if remaining_vars:
186
+ hoisted = OrderedSet.from_iterable(filter(lambda v: v not in hoisted_vars, remaining_vars)) | hoisted
187
+
188
+ if child in plan.local_dependencies:
189
+ for local_dep in plan.local_dependencies[child]:
190
+ replacement_body.add(local_dep.clone())
191
+
192
+ if isinstance(replacement, ir.Logical):
193
+ # if the replacements is a logical, we can just add to the body
194
+ body.add(replacement.reconstruct(
195
+ replacement.engine,
196
+ tuple(hoisted.get_list()),
197
+ tuple(replacement_body.update(replacement.body).get_list()),
198
+ replacement.annotations
199
+ ))
200
+ else:
201
+ # Otherwise, wrap the local dependencies in a Lookup where the output
202
+ # variables are hoisted, and keep the computed replacement.
203
+ body.add(f.logical(replacement_body.get_list(), hoisted.get_list(), replacement.engine))
204
+ body.add(replacement)
205
+ else:
206
+ # child does not need extras in the body, just add it to the main body
207
+ body.add(replacement)
208
+
209
+ return ir.Logical(task.engine, task.hoisted, tuple(body))
210
+
211
+ @dataclass
212
+ class ExtractionPlan():
213
+ # tasks to extract to the body of the common logical
214
+ common_body: OrderedSet[ir.Task]
215
+ # tasks to remain in the original body
216
+ remaining_body: OrderedSet[ir.Task]
217
+ # variables to be exposed by the common logical
218
+ exposed_vars: OrderedSet[ir.Var]
219
+ # map from nested composite to the tasks in the common body that still need to be
220
+ # included in its body, because it contains variables not exposed by the common logical
221
+ local_dependencies: dict[ir.Task, OrderedSet[ir.Task]]
222
+ # whether the common reference should be distributed to composites
223
+ distribute_common_reference: bool
224
+ # a reference to the common connection created for this plan, if any
225
+ common_reference: Optional[ir.Lookup] = None
226
+
227
+
228
+ def _create_extraction_plan(self, binders: OrderedSet[ir.Task], composites: OrderedSet[ir.Task], extractables: list[ir.Task], ctx: Context):
229
+ """
230
+ Compute a plan to extract tasks in this frame that are common dependencies
231
+ across these composite tasks.
232
+ """
233
+ # If there are any pragma lookups, then don't extract anything. Pragma lookups are
234
+ # designed to control execution order, and extracting them may affect their
235
+ # semantics.
236
+ for b in binders:
237
+ if isinstance(b, ir.Lookup) and builtins.is_pragma(b.relation):
238
+ return None
239
+
240
+ # Compute intersection of task dependencies
241
+ sample = composites.some()
242
+ deps = ctx.info.task_dependencies(sample)
243
+ if deps is None:
244
+ return None
245
+ # only get sibling dependencies
246
+ common_body = binders & deps
247
+
248
+ # For other composites, remove their sibling dependencies so that we end up with
249
+ # the intersection of dependencies
250
+ for composite in composites:
251
+ if composite is sample:
252
+ continue
253
+
254
+ # compute sibling dependencies
255
+ deps = ctx.info.task_dependencies(composite)
256
+ if deps:
257
+ for task in common_body:
258
+ if task not in deps:
259
+ common_body.remove(task)
260
+
261
+ # Compute union of input vars
262
+ # Start with the output vars of the common body. We only want to expose vars that
263
+ # are output from the common body
264
+ body_output_vars = OrderedSet()
265
+ for child in common_body:
266
+ body_output_vars.update(ctx.info.task_outputs(child))
267
+
268
+ # Compute the union of input vars across all composites, intersected with output
269
+ # vars of the common body
270
+ exposed_vars = OrderedSet.from_iterable(ctx.info.task_inputs(sample)) & body_output_vars
271
+ for composite in composites:
272
+ if composite is sample:
273
+ continue
274
+ # compute common input vars
275
+ t_inputs = OrderedSet.from_iterable(ctx.info.task_inputs(composite))
276
+ exposed_vars.update(t_inputs & body_output_vars)
277
+
278
+ # no vars in common, not worth to extract
279
+ if not exposed_vars:
280
+ return None
281
+
282
+ for task in common_body:
283
+ local_deps = ctx.info.local_dependencies(task)
284
+ if local_deps:
285
+ common_body.update(local_deps & binders)
286
+
287
+ # not useful to extract common tasks if there's a single one
288
+ if len(common_body) < 2:
289
+ return None
290
+
291
+ # check if some variable used in the common body is needed by some binder that is
292
+ # not going to be extracted. In that case, we need to expose this variable from the
293
+ # common body
294
+ common_vars = ordered_set()
295
+ for task in common_body:
296
+ common_vars.update(ctx.info.task_outputs(task))
297
+ common_vars = common_vars - exposed_vars
298
+ for v in common_vars:
299
+ for binder in binders:
300
+ if binder not in common_body and ctx.info.task_inputs(binder) and v in ctx.info.task_inputs(binder):
301
+ exposed_vars.add(v)
302
+ break
303
+
304
+ # check which of the original binders remain, and make sure their dependencies also stay
305
+ remaining = ordered_set()
306
+ for binder in binders:
307
+ if binder not in common_body:
308
+ remaining.add(binder)
309
+ deps = self._compute_local_dependencies(ctx, binders, binder, exposed_vars)
310
+ if deps:
311
+ remaining.update(deps)
312
+
313
+ # for each composite, check if there are additional tasks needed, because the task
314
+ # depends on it but it is not exposed by the vars
315
+ local_dependencies: dict[ir.Task, OrderedSet[ir.Task]] = dict()
316
+ for composite in composites:
317
+ local = self._compute_local_dependencies(ctx, binders, composite, exposed_vars)
318
+ if local:
319
+ local_dependencies[composite] = local
320
+
321
+ # distribute the common reference only if all of the composites are extractable and there's nothing else remaining
322
+ distribute_common_reference = len(extractables) == len(composites) and not remaining
323
+
324
+ return ExtractCommon.ExtractionPlan(common_body, remaining, exposed_vars, local_dependencies, distribute_common_reference)
325
+
326
+
327
+ def _compute_local_dependencies(self, ctx: Context, binders: OrderedSet[ir.Task], composite: ir.Task, exposed_vars: OrderedSet[ir.Var]):
328
+ """
329
+ The tasks in common_body will be extracted into a logical that will expose the exposed_vars.
330
+ Compute which additional dependencies are needed specifically for this composite, because
331
+ it depends on some tasks that are extracted to common_body but not exposed by exposed_vars.
332
+ """
333
+
334
+ # working list of vars we still need to fulfill
335
+ inputs = ctx.info.task_inputs(composite)
336
+ if not inputs:
337
+ return None
338
+
339
+ # vars exposed by exposed vars + tasks added to the local body
340
+ vars_exposed = OrderedSet.from_iterable(exposed_vars)
341
+ vars_needed = (inputs - vars_exposed)
342
+ if not vars_needed:
343
+ return None
344
+
345
+ # this is a greedy algorithm that uses the first task in the common body that provides
346
+ # a variable needed; it may result in sub-optimal extraction, but should be correct
347
+ local_body = ordered_set()
348
+ while(vars_needed):
349
+ v = vars_needed.pop()
350
+ for x in binders:
351
+ if x not in local_body:
352
+ # an x that is not yet in local_body can fulfill v
353
+ x_outputs = ctx.info.task_outputs(x)
354
+ if x_outputs and v in x_outputs:
355
+ # add it to local_body and add its outputs to vars exposed
356
+ local_body.add(x)
357
+ vars_exposed.add(x_outputs)
358
+ # but add its inputs the vars now needed
359
+ inputs = ctx.info.task_inputs(x)
360
+ if inputs:
361
+ vars_needed.update(inputs - vars_exposed)
362
+ return local_body
@@ -391,7 +391,7 @@ erfinv = f.relation(
391
391
 
392
392
  # Strings
393
393
  concat = f.relation("concat", [f.input_field("a", types.String), f.input_field("b", types.String), f.field("c", types.String)])
394
- num_chars = f.relation("num_chars", [f.input_field("a", types.String), f.field("b", types.Int128)])
394
+ num_chars = f.relation("num_chars", [f.input_field("a", types.String), f.field("b", types.Int64)])
395
395
  starts_with = f.relation("starts_with", [f.input_field("a", types.String), f.input_field("b", types.String)])
396
396
  ends_with = f.relation("ends_with", [f.input_field("a", types.String), f.input_field("b", types.String)])
397
397
  contains = f.relation("contains", [f.input_field("a", types.String), f.input_field("b", types.String)])
@@ -406,7 +406,13 @@ replace = f.relation("replace", [f.input_field("a", types.String), f.input_field
406
406
  split = f.relation("split", [f.input_field("a", types.String), f.input_field("b", types.String), f.field("c", types.Int64), f.field("d", types.String)])
407
407
  # should be a separate builtin. SQL emitter compiles it differently
408
408
  split_part = f.relation("split_part", [f.input_field("a", types.String), f.input_field("b", types.String), f.field("c", types.Int64), f.field("d", types.String)])
409
+
410
+ # regex
409
411
  regex_match = f.relation("regex_match", [f.input_field("a", types.String), f.input_field("b", types.String)])
412
+ regex_match_all = f.relation("regex_match_all", [f.input_field("a", types.String), f.input_field("b", types.String), f.input_field("c", types.Int64), f.field("d", types.String)])
413
+ capture_group_by_index = f.relation("capture_group_by_index", [f.input_field("a", types.String), f.input_field("b", types.String), f.input_field("c", types.Int64), f.input_field("d", types.Int64), f.field("e", types.String)])
414
+ capture_group_by_name = f.relation("capture_group_by_name", [f.input_field("a", types.String), f.input_field("b", types.String), f.input_field("c", types.Int64), f.input_field("d", types.String), f.field("e", types.String)])
415
+ escape_regex_metachars = f.relation("escape_regex_metachars", [f.input_field("a", types.String), f.field("b", types.String)])
410
416
 
411
417
  # Dates
412
418
  date_format = f.relation("date_format", [f.input_field("a", types.Date), f.input_field("b", types.String), f.field("c", types.String)])
@@ -422,6 +428,7 @@ date_add = f.relation("date_add", [f.input_field("a", types.Date), f.input_field
422
428
  dates_period_days = f.relation("dates_period_days", [f.input_field("a", types.Date), f.input_field("b", types.Date), f.field("c", types.Int64)])
423
429
  datetimes_period_milliseconds = f.relation("datetimes_period_milliseconds", [f.input_field("a", types.DateTime), f.input_field("b", types.DateTime), f.field("c", types.Int64)])
424
430
  date_subtract = f.relation("date_subtract", [f.input_field("a", types.Date), f.input_field("b", types.Int64), f.field("c", types.Date)])
431
+ datetime_now = f.relation("datetime_now", [f.field("a", types.DateTime)])
425
432
  datetime_add = f.relation("datetime_add", [f.input_field("a", types.DateTime), f.input_field("b", types.Int64), f.field("c", types.DateTime)])
426
433
  datetime_subtract = f.relation("datetime_subtract", [f.input_field("a", types.DateTime), f.input_field("b", types.Int64), f.field("c", types.DateTime)])
427
434
  datetime_year = f.relation("datetime_year", [f.input_field("a", types.DateTime), f.input_field("b", types.String), f.field("c", types.Int64)])
@@ -489,6 +496,17 @@ function = f.relation("function", [f.input_field("code", types.Symbol)])
489
496
  function_checked_annotation = f.annotation(function, [f.lit("checked")])
490
497
  function_annotation = f.annotation(function, [])
491
498
 
499
+ # Indicates this relation should be tracked in telemetry. Only supported for Relationships.
500
+ # `RAI_BackIR.with_relation_tracking` produces log messages at the start and end of each
501
+ # SCC evaluation, if any declarations bear the `track` annotation.
502
+ track = f.relation("track", [
503
+ # BackIR evaluation expects 2 parameters on the track annotation: the tracking
504
+ # library name and tracking relation name, which appear as log metadata fields.
505
+ f.input_field("library", types.Symbol),
506
+ f.input_field("relation", types.Symbol)
507
+ ])
508
+ track_annotation = f.annotation(track, [])
509
+
492
510
  # All ir nodes marked by this annotation will be removed from the final metamodel before compilation.
493
511
  # Specifically it happens in `Flatten` pass when rewrites for `require` happen
494
512
  discharged = f.relation("discharged", [])
@@ -665,7 +683,7 @@ def _compute_builtin_overloads() -> list[ir.Relation]:
665
683
  return overloads
666
684
 
667
685
  # manually maintain the list of relations that are actually annotations
668
- builtin_annotations = [external, export, concept_population, from_cdc, from_cast]
686
+ builtin_annotations = [external, export, concept_population, from_cdc, from_cast, track]
669
687
  builtin_annotations_by_name = dict((r.name, r) for r in builtin_annotations)
670
688
 
671
689
  builtin_relations = _compute_builtin_relations()
@@ -185,10 +185,11 @@ def lit(value: Any) -> ir.Value:
185
185
  return ir.Literal(types.Bool, value)
186
186
  elif isinstance(value, decimal.Decimal):
187
187
  return ir.Literal(types.Decimal, value)
188
- elif isinstance(value, datetime.date):
189
- return ir.Literal(types.Date, value)
188
+ # datetime.datetime is a subclass of datetime.date, so check it first
190
189
  elif isinstance(value, datetime.datetime):
191
190
  return ir.Literal(types.DateTime, value)
191
+ elif isinstance(value, datetime.date):
192
+ return ir.Literal(types.Date, value)
192
193
  elif isinstance(value, list):
193
194
  return tuple([lit(v) for v in value])
194
195
  else:
@@ -1,12 +1,6 @@
1
- from .splinter import Splinter
2
- from .list_types import RewriteListTypes
3
- from .gc_nodes import GarbageCollectNodes
4
- from .flatten import Flatten
1
+ from .discharge_constraints import DischargeConstraints
5
2
  from .dnf_union_splitter import DNFUnionSplitter
6
- from .extract_keys import ExtractKeys
7
3
  from .extract_nested_logicals import ExtractNestedLogicals
8
- from .fd_constraints import FDConstraints
9
- from .discharge_constraints import DischargeConstraints
4
+ from .flatten import Flatten
10
5
 
11
- __all__ = ["Splinter", "RewriteListTypes", "GarbageCollectNodes", "Flatten", "DNFUnionSplitter", "ExtractKeys",
12
- "ExtractNestedLogicals", "FDConstraints", "DischargeConstraints"]
6
+ __all__ = ["DischargeConstraints", "DNFUnionSplitter", "ExtractNestedLogicals", "Flatten"]