relationalai 0.11.3__py3-none-any.whl → 0.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. relationalai/clients/config.py +7 -0
  2. relationalai/clients/direct_access_client.py +113 -0
  3. relationalai/clients/snowflake.py +41 -107
  4. relationalai/clients/use_index_poller.py +349 -188
  5. relationalai/early_access/dsl/bindings/csv.py +2 -2
  6. relationalai/early_access/metamodel/rewrite/__init__.py +5 -3
  7. relationalai/early_access/rel/rewrite/__init__.py +1 -1
  8. relationalai/errors.py +24 -3
  9. relationalai/semantics/internal/annotations.py +1 -0
  10. relationalai/semantics/internal/internal.py +22 -4
  11. relationalai/semantics/lqp/builtins.py +1 -0
  12. relationalai/semantics/lqp/executor.py +61 -12
  13. relationalai/semantics/lqp/intrinsics.py +23 -0
  14. relationalai/semantics/lqp/model2lqp.py +13 -4
  15. relationalai/semantics/lqp/passes.py +4 -6
  16. relationalai/semantics/lqp/primitives.py +12 -1
  17. relationalai/semantics/{rel → lqp}/rewrite/__init__.py +6 -0
  18. relationalai/semantics/lqp/rewrite/extract_common.py +362 -0
  19. relationalai/semantics/metamodel/builtins.py +20 -2
  20. relationalai/semantics/metamodel/factory.py +3 -2
  21. relationalai/semantics/metamodel/rewrite/__init__.py +3 -9
  22. relationalai/semantics/reasoners/graph/core.py +273 -71
  23. relationalai/semantics/reasoners/optimization/solvers_dev.py +20 -1
  24. relationalai/semantics/reasoners/optimization/solvers_pb.py +24 -3
  25. relationalai/semantics/rel/builtins.py +5 -1
  26. relationalai/semantics/rel/compiler.py +7 -19
  27. relationalai/semantics/rel/executor.py +2 -2
  28. relationalai/semantics/rel/rel.py +6 -0
  29. relationalai/semantics/rel/rel_utils.py +8 -1
  30. relationalai/semantics/sql/compiler.py +122 -42
  31. relationalai/semantics/sql/executor/duck_db.py +28 -3
  32. relationalai/semantics/sql/rewrite/denormalize.py +4 -6
  33. relationalai/semantics/sql/rewrite/recursive_union.py +23 -3
  34. relationalai/semantics/sql/sql.py +27 -0
  35. relationalai/semantics/std/__init__.py +2 -1
  36. relationalai/semantics/std/datetime.py +4 -0
  37. relationalai/semantics/std/re.py +83 -0
  38. relationalai/semantics/std/strings.py +1 -1
  39. relationalai/tools/cli.py +11 -4
  40. relationalai/tools/cli_controls.py +445 -60
  41. relationalai/util/format.py +78 -1
  42. {relationalai-0.11.3.dist-info → relationalai-0.12.0.dist-info}/METADATA +7 -5
  43. {relationalai-0.11.3.dist-info → relationalai-0.12.0.dist-info}/RECORD +51 -50
  44. relationalai/semantics/metamodel/rewrite/gc_nodes.py +0 -58
  45. relationalai/semantics/metamodel/rewrite/list_types.py +0 -109
  46. relationalai/semantics/rel/rewrite/extract_common.py +0 -451
  47. /relationalai/semantics/{rel → lqp}/rewrite/cdc.py +0 -0
  48. /relationalai/semantics/{metamodel → lqp}/rewrite/extract_keys.py +0 -0
  49. /relationalai/semantics/{metamodel → lqp}/rewrite/fd_constraints.py +0 -0
  50. /relationalai/semantics/{rel → lqp}/rewrite/quantify_vars.py +0 -0
  51. /relationalai/semantics/{metamodel → lqp}/rewrite/splinter.py +0 -0
  52. {relationalai-0.11.3.dist-info → relationalai-0.12.0.dist-info}/WHEEL +0 -0
  53. {relationalai-0.11.3.dist-info → relationalai-0.12.0.dist-info}/entry_points.txt +0 -0
  54. {relationalai-0.11.3.dist-info → relationalai-0.12.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,5 +1,5 @@
1
1
  from io import StringIO
2
- from typing import Optional
2
+ from typing import Optional, Hashable
3
3
 
4
4
  import numpy as np
5
5
  import pandas as pd
@@ -76,7 +76,7 @@ class BindableCsvColumn(BindableColumn, b.Relationship):
76
76
 
77
77
 
78
78
  class CsvTable(AbstractBindableTable[BindableCsvColumn]):
79
- _basic_type_schema: dict[str, str]
79
+ _basic_type_schema: dict[Hashable, str]
80
80
  _csv_data: list[pd.DataFrame]
81
81
  _num_rows: int
82
82
 
@@ -1,5 +1,7 @@
1
- from relationalai.semantics.metamodel.rewrite import Splinter, RewriteListTypes, GarbageCollectNodes, Flatten, \
2
- DNFUnionSplitter, ExtractKeys, ExtractNestedLogicals, FDConstraints, flatten
1
+ from relationalai.semantics.metamodel.rewrite import Flatten, \
2
+ DNFUnionSplitter, ExtractNestedLogicals, flatten
3
+ from relationalai.semantics.lqp.rewrite import Splinter, \
4
+ ExtractKeys, FDConstraints
3
5
 
4
- __all__ = ["Splinter", "RewriteListTypes", "GarbageCollectNodes", "Flatten", "DNFUnionSplitter", "ExtractKeys",
6
+ __all__ = ["Splinter", "Flatten", "DNFUnionSplitter", "ExtractKeys",
5
7
  "ExtractNestedLogicals", "FDConstraints", "flatten"]
@@ -1,4 +1,4 @@
1
- from relationalai.semantics.rel.rewrite import CDC, ExtractCommon, QuantifyVars
1
+ from relationalai.semantics.lqp.rewrite import CDC, ExtractCommon, QuantifyVars
2
2
 
3
3
  __all__ = [
4
4
  "CDC",
relationalai/errors.py CHANGED
@@ -2397,17 +2397,18 @@ class UnsupportedColumnTypesWarning(RAIWarning):
2397
2397
  """)
2398
2398
 
2399
2399
  class QueryTimeoutExceededException(RAIException):
2400
- def __init__(self, timeout_mins: int, config_file_path: str | None = None):
2400
+ def __init__(self, timeout_mins: int, query_id: str | None = None, config_file_path: str | None = None):
2401
2401
  self.timeout_mins = timeout_mins
2402
- self.message = f"Query execution time exceeded the specified timeout of {timeout_mins} minutes."
2403
2402
  self.name = "Query Timeout Exceeded"
2403
+ self.message = f"Query execution time exceeded the specified timeout of {self.timeout_mins} minutes."
2404
+ self.query_id = query_id or ""
2404
2405
  self.config_file_path = config_file_path or ""
2405
2406
  self.content = self.format_message()
2406
2407
  super().__init__(self.message, self.name, self.content)
2407
2408
 
2408
2409
  def format_message(self):
2409
2410
  return textwrap.dedent(f"""
2410
- {self.message}
2411
+ Query execution time exceeded the specified timeout of {self.timeout_mins} minutes{f' for query with ID: {self.query_id}' if self.query_id else ''}.
2411
2412
 
2412
2413
  Consider increasing the 'query_timeout_mins' parameter in your configuration file{f' (stored in {self.config_file_path})' if self.config_file_path else ''} to allow more time for query execution.
2413
2414
  """)
@@ -2432,3 +2433,23 @@ class AzureUnsupportedQueryTimeoutException(RAIException):
2432
2433
  Please remove the 'query_timeout_mins' from your configuration file{f' (stored in {self.config_file_path})' if self.config_file_path else ''} when running on platform Azure.
2433
2434
  """)
2434
2435
 
2436
+ class AzureLegacyDependencyMissingException(RAIException):
2437
+ def __init__(self):
2438
+ self.message = "The Azure platform requires the 'legacy' extras to be installed."
2439
+ self.name = "Azure Legacy Dependency Missing"
2440
+ self.content = self.format_message()
2441
+ super().__init__(self.message, self.name, self.content)
2442
+
2443
+ def format_message(self):
2444
+ return textwrap.dedent("""
2445
+ The Azure platform requires the 'rai-sdk' package, which is not installed.
2446
+
2447
+ To use the Azure platform, please install the legacy extras:
2448
+
2449
+ pip install relationalai[legacy]
2450
+
2451
+ Or if upgrading an existing installation:
2452
+
2453
+ pip install --upgrade relationalai[legacy]
2454
+ """)
2455
+
@@ -5,3 +5,4 @@ external = Relationship.builtins["external"]
5
5
  concept_population = Relationship.builtins["concept_population"]
6
6
  function = Relationship.builtins["function"]
7
7
  from_cdc = Relationship.builtins["from_cdc"]
8
+ track = Relationship.builtins["track"]
@@ -514,11 +514,13 @@ class Producer:
514
514
  #--------------------------------------------------
515
515
 
516
516
  def in_(self, values:list[Any]|Fragment) -> Expression:
517
+ columns = None
517
518
  if isinstance(values, Fragment):
518
519
  return self == values
519
520
  if not isinstance(values[0], tuple):
520
521
  values = [tuple([v]) for v in values]
521
- d = data(values)
522
+ columns = [f"v{i}" for i in range(len(values[0]))]
523
+ d = data(values, columns)
522
524
  return self == d[0]
523
525
 
524
526
  #--------------------------------------------------
@@ -907,9 +909,9 @@ class Concept(Producer):
907
909
  if python_types_to_concepts.get(v):
908
910
  v = python_types_to_concepts[v]
909
911
  if isinstance(v, Concept):
910
- setattr(self, k, Property(f"{{{self._name}}} has {{{k}:{v._name}}}", short_name=k, model=self._model))
912
+ setattr(self, k, Property(f"{{{self._name}}} has {{{k}:{v._name}}}", parent=self, short_name=k, model=self._model))
911
913
  elif isinstance(v, type) and issubclass(v, self._model.Enum): #type: ignore
912
- setattr(self, k, Property(f"{{{self._name}}} has {{{k}:{v._concept._name}}}", short_name=k, model=self._model))
914
+ setattr(self, k, Property(f"{{{self._name}}} has {{{k}:{v._concept._name}}}", parent=self, short_name=k, model=self._model))
913
915
  elif isinstance(v, Relationship):
914
916
  self._validate_identifier_relationship(v)
915
917
  setattr(self, k, v)
@@ -1189,6 +1191,7 @@ def is_decimal(concept: Concept) -> bool:
1189
1191
  Concept.builtins["Int"] = Concept.builtins["Int128"]
1190
1192
  Concept.builtins["Integer"] = Concept.builtins["Int128"]
1191
1193
 
1194
+ _np_datetime = np.dtype('datetime64[ns]')
1192
1195
  python_types_to_concepts : dict[Any, Concept] = {
1193
1196
  int: Concept.builtins["Int128"],
1194
1197
  float: Concept.builtins["Float"],
@@ -1213,6 +1216,7 @@ python_types_to_concepts : dict[Any, Concept] = {
1213
1216
  np.dtype('float32'): Concept.builtins["Float"],
1214
1217
  np.dtype('bool'): Concept.builtins["Bool"],
1215
1218
  np.dtype('object'): Concept.builtins["String"], # Often strings are stored as object dtype
1219
+ _np_datetime: Concept.builtins["DateTime"],
1216
1220
 
1217
1221
  # Pandas extension dtypes
1218
1222
  pd.Int64Dtype(): Concept.builtins["Int128"],
@@ -1655,7 +1659,9 @@ class Expression(Producer):
1655
1659
  raise ValueError(f"Argument index should be positive, got {idx}")
1656
1660
  if len(self._params) <= idx:
1657
1661
  raise ValueError(f"Expression '{self.__str__()}' has only {len(self._params)} arguments")
1658
- return ArgumentRef(self, self._params[idx])
1662
+ param = self._params[idx]
1663
+ # if param is an Expression then refer the last param of this expression
1664
+ return ArgumentRef(self, param._params[-1] if isinstance(param, Expression) else param)
1659
1665
 
1660
1666
  def __getattr__(self, name: str):
1661
1667
  last = self._params[-1]
@@ -2090,8 +2096,20 @@ class DataColumn(Producer):
2090
2096
  self._data = data
2091
2097
  self._type = _type
2092
2098
  self._name = name if isinstance(name, str) else f"v{name}"
2099
+ if pd.api.types.is_datetime64_any_dtype(_type):
2100
+ _type = _np_datetime
2101
+ # dates are objects in pandas
2102
+ elif pd.api.types.is_object_dtype(_type) and self._is_date_column():
2103
+ _type = date
2093
2104
  self._ref = python_types_to_concepts[_type].ref(self._name)
2094
2105
 
2106
+ def _is_date_column(self) -> bool:
2107
+ sample = self._data._data[self._name].dropna()
2108
+ if sample.empty:
2109
+ return False
2110
+ sample_value = sample.iloc[0]
2111
+ return isinstance(sample_value, date) and not isinstance(sample_value, datetime)
2112
+
2095
2113
  def __str__(self):
2096
2114
  return f"DataColumn({self._name}, {self._type})"
2097
2115
 
@@ -11,4 +11,5 @@ adhoc_annotation = f.annotation(adhoc, [])
11
11
  annotations_to_emit = FrozenOrderedSet([
12
12
  adhoc.name,
13
13
  builtins.function.name,
14
+ builtins.track.name,
14
15
  ])
@@ -12,6 +12,7 @@ from relationalai import debugging
12
12
  from relationalai.semantics.lqp import result_helpers
13
13
  from relationalai.semantics.metamodel import ir, factory as f, executor as e
14
14
  from relationalai.semantics.lqp.compiler import Compiler
15
+ from relationalai.semantics.lqp.intrinsics import mk_intrinsic_datetime_now
15
16
  from relationalai.semantics.lqp.types import lqp_type_to_sql
16
17
  from lqp import print as lqp_print, ir as lqp_ir
17
18
  from lqp.parser import construct_configure
@@ -258,11 +259,47 @@ class LQPExecutor(e.Executor):
258
259
 
259
260
  return ", ".join(fields)
260
261
 
262
+ def _construct_configure(self):
263
+ config_dict = {}
264
+ # Only set the IVM flag if there is a value in `config`. Otherwise, let
265
+ # `construct_configure` set the default value.
266
+ ivm_flag = self.config.get('reasoner.rule.incremental_maintenance', None)
267
+ if ivm_flag:
268
+ config_dict['ivm.maintenance_level'] = lqp_ir.Value(value=ivm_flag, meta=None)
269
+ return construct_configure(config_dict, None)
270
+
271
+ def _compile_intrinsics(self) -> lqp_ir.Epoch:
272
+ """Construct an epoch that defines a number of built-in definitions used by the
273
+ emitter."""
274
+ with debugging.span("compile_intrinsics") as span:
275
+ debug_info = lqp_ir.DebugInfo(id_to_orig_name={}, meta=None)
276
+ intrinsics_fragment = lqp_ir.Fragment(
277
+ id = lqp_ir.FragmentId(id=b"__pyrel_lqp_intrinsics", meta=None),
278
+ declarations = [
279
+ mk_intrinsic_datetime_now(),
280
+ ],
281
+ debug_info = debug_info,
282
+ meta = None,
283
+ )
284
+
285
+ span["compile_type"] = "intrinsics"
286
+ span["lqp"] = lqp_print.to_string(intrinsics_fragment, {"print_names": True, "print_debug": False, "print_csv_filename": False})
287
+
288
+ return lqp_ir.Epoch(
289
+ writes=[
290
+ lqp_ir.Write(write_type=lqp_ir.Define(fragment=intrinsics_fragment, meta=None), meta=None)
291
+ ],
292
+ meta=None,
293
+ )
294
+
261
295
  def compile_lqp(self, model: ir.Model, task: ir.Task):
296
+ configure = self._construct_configure()
297
+
262
298
  model_txn = None
263
299
  if self._last_model != model:
264
300
  with debugging.span("compile", metamodel=model) as install_span:
265
301
  _, model_txn = self.compiler.compile(model, {"fragment_id": b"model"})
302
+ model_txn = txn_with_configure(model_txn, configure)
266
303
  install_span["compile_type"] = "model"
267
304
  install_span["lqp"] = lqp_print.to_string(model_txn, {"print_names": True, "print_debug": False, "print_csv_filename": False})
268
305
  self._last_model = model
@@ -275,23 +312,26 @@ class LQPExecutor(e.Executor):
275
312
  }
276
313
  result, final_model = self.compiler.compile_inner(query, options)
277
314
  export_info, query_txn = result
315
+ query_txn = txn_with_configure(query_txn, configure)
278
316
  compile_span["compile_type"] = "query"
279
317
  compile_span["lqp"] = lqp_print.to_string(query_txn, {"print_names": True, "print_debug": False, "print_csv_filename": False})
280
318
 
281
- txn = query_txn
319
+ # Merge the epochs into a single transactions. Long term the query bits should all
320
+ # go into a WhatIf action and the intrinsics could be fused with either of them. But
321
+ # for now we just use separate epochs.
322
+ epochs = []
323
+
324
+ epochs.append(self._compile_intrinsics())
325
+
282
326
  if model_txn is not None:
283
- # Merge the two LQP transactions into one. Long term the query bits should all
284
- # go into a WhatIf action. But for now we just use two separate epochs.
285
- model_epoch = model_txn.epochs[0]
286
- query_epoch = query_txn.epochs[0]
287
- txn = lqp_ir.Transaction(
288
- epochs=[model_epoch, query_epoch],
289
- configure=construct_configure({}, None),
290
- meta=None,
291
- )
327
+ epochs.append(model_txn.epochs[0])
292
328
 
293
- # Revalidate now that we've joined two epochs
294
- validate_lqp(txn)
329
+ epochs.append(query_txn.epochs[0])
330
+
331
+ txn = lqp_ir.Transaction(epochs=epochs, configure=configure, meta=None)
332
+
333
+ # Revalidate now that we've joined all the epochs.
334
+ validate_lqp(txn)
295
335
 
296
336
  txn_proto = convert_transaction(txn)
297
337
  # TODO (azreika): Should export_info be encoded as part of the txn_proto? [RAI-40312]
@@ -352,3 +392,12 @@ class LQPExecutor(e.Executor):
352
392
  # If processing the results failed, revert to the previous model.
353
393
  self._last_model = previous_model
354
394
  raise e
395
+
396
+ def txn_with_configure(txn: lqp_ir.Transaction, configure: lqp_ir.Configure) -> lqp_ir.Transaction:
397
+ """ Return a new transaction with the given configure. If the transaction already has
398
+ a configure, it is replaced. """
399
+ return lqp_ir.Transaction(
400
+ epochs=txn.epochs,
401
+ configure=configure,
402
+ meta=txn.meta,
403
+ )
@@ -0,0 +1,23 @@
1
+ from datetime import datetime, timezone
2
+
3
+ from relationalai.semantics.lqp import ir as lqp
4
+ from relationalai.semantics.lqp.constructors import mk_abstraction, mk_value, mk_var, mk_type, mk_primitive
5
+ from relationalai.semantics.lqp.utils import lqp_hash
6
+
7
+ def mk_intrinsic_datetime_now() -> lqp.Def:
8
+ """Constructs a definition of the current datetime."""
9
+ id = lqp_hash("__pyrel_lqp_intrinsic_datetime_now")
10
+ out = mk_var("out")
11
+ out_type = mk_type(lqp.TypeName.DATETIME)
12
+ now = mk_value(lqp.DateTimeValue(value=datetime.now(timezone.utc), meta=None))
13
+ datetime_now = mk_abstraction(
14
+ [(out, out_type)],
15
+ mk_primitive("rel_primitive_eq", [out, now]),
16
+ )
17
+
18
+ return lqp.Def(
19
+ name = lqp.RelationId(id=id, meta=None),
20
+ body = datetime_now,
21
+ attrs = [],
22
+ meta = None,
23
+ )
@@ -192,12 +192,21 @@ def _translate_effect(ctx: TranslationCtx, effect: Union[ir.Output, ir.Update],
192
192
  elif isinstance(effect, ir.Output):
193
193
  ctx.output_ids.append((rel_id, def_name))
194
194
 
195
+ # First we collect annotations on the effect itself, e.g. from something like
196
+ # `select(...).annotate(...)`.
197
+ annotations = effect.annotations
198
+ if isinstance(effect, ir.Update):
199
+ # Then we translate annotations on the relation itself, e.g.
200
+ # ```
201
+ # Bar.foo = model.Relationship(...)
202
+ # Bar.foo.annotate(...)
203
+ # ```
204
+ annotations = annotations | effect.relation.annotations
205
+
195
206
  return lqp.Def(
196
207
  name = rel_id,
197
208
  body = mk_abstraction(projection, new_body),
198
- # TODO this only covers the annotations on the effect itself. Annotations on the
199
- # relation are not included yet.
200
- attrs = _translate_annotations(effect.annotations),
209
+ attrs = _translate_annotations(annotations),
201
210
  meta = None,
202
211
  )
203
212
 
@@ -697,4 +706,4 @@ def _translate_join(ctx: TranslationCtx, task: ir.Lookup) -> lqp.Formula:
697
706
 
698
707
  output_term = _translate_term(ctx, target)[0]
699
708
 
700
- return lqp.Reduce(meta=None, op=op, body=body, terms=[output_term])
709
+ return lqp.Reduce(meta=None, op=op, body=body, terms=[output_term])
@@ -2,13 +2,12 @@ from relationalai.semantics.metamodel.compiler import Pass
2
2
  from relationalai.semantics.metamodel import ir, builtins as rel_builtins, factory as f, visitor
3
3
  from relationalai.semantics.metamodel.typer import Checker, InferTypes, typer
4
4
  from relationalai.semantics.metamodel import helpers, types
5
- from relationalai.semantics.metamodel.rewrite import (Splinter, ExtractNestedLogicals, ExtractKeys, FDConstraints,
6
- DNFUnionSplitter, DischargeConstraints)
7
5
  from relationalai.semantics.metamodel.util import FrozenOrderedSet
8
6
 
9
7
  from relationalai.semantics.metamodel.rewrite import Flatten
10
- # TODO: Move this into metamodel.rewrite
11
- from relationalai.semantics.rel.rewrite import QuantifyVars, CDC
8
+
9
+ from ..metamodel.rewrite import DischargeConstraints, DNFUnionSplitter, ExtractNestedLogicals
10
+ from .rewrite import CDC, ExtractCommon, ExtractKeys, FDConstraints, QuantifyVars, Splinter
12
11
 
13
12
  from relationalai.semantics.lqp.utils import output_names
14
13
 
@@ -25,8 +24,7 @@ def lqp_passes() -> list[Pass]:
25
24
  InferTypes(),
26
25
  DNFUnionSplitter(),
27
26
  ExtractKeys(),
28
- # Broken
29
- # ExtractCommon(),
27
+ ExtractCommon(),
30
28
  Flatten(),
31
29
  Splinter(), # Splits multi-headed rules into multiple rules
32
30
  QuantifyVars(), # Adds missing existentials
@@ -1,7 +1,7 @@
1
1
  from relationalai.semantics.metamodel.types import digits_to_bits
2
2
  from relationalai.semantics.lqp import ir as lqp
3
3
  from relationalai.semantics.lqp.types import is_numeric
4
- from relationalai.semantics.lqp.utils import UniqueNames
4
+ from relationalai.semantics.lqp.utils import UniqueNames, lqp_hash
5
5
  from relationalai.semantics.lqp.constructors import mk_primitive, mk_specialized_value, mk_type, mk_value, mk_var
6
6
 
7
7
  rel_to_lqp = {
@@ -62,6 +62,7 @@ rel_to_lqp = {
62
62
  "date_add": "rel_primitive_typed_add_date_period",
63
63
  "date_subtract": "rel_primitive_typed_subtract_date_period",
64
64
  "dates_period_days": "rel_primitive_date_days_between",
65
+ "datetime_now": "__pyrel_lqp_intrinsic_datetime_now",
65
66
  "datetime_add": "rel_primitive_typed_add_datetime_period",
66
67
  "datetime_subtract": "rel_primitive_typed_subtract_datetime_period",
67
68
  "datetime_year": "rel_primitive_datetime_year",
@@ -175,6 +176,16 @@ def build_primitive(
175
176
  terms, term_types = _reorder_primitive_terms(lqp_name, terms, term_types)
176
177
  _assert_primitive_terms(lqp_name, terms, term_types)
177
178
 
179
+ # Handle intrinsics. To callers of `build_primitive` the distinction between intrinsic
180
+ # and primitive doesn't matter, so we don't want to burden them with that detail.
181
+ # Intrinsics are built-in definitions added by the LQP emitter, that user logic can just
182
+ # refer to.
183
+ if lqp_name == "__pyrel_lqp_intrinsic_datetime_now":
184
+ id = lqp.RelationId(id=lqp_hash(lqp_name), meta=None)
185
+ assert len(terms) == 1
186
+ assert isinstance(terms[0], lqp.Term)
187
+ return lqp.Atom(name=id, terms=[terms[0]], meta=None)
188
+
178
189
  return mk_primitive(lqp_name, terms)
179
190
 
180
191
  def relname_to_lqp_name(name: str) -> str:
@@ -1,9 +1,15 @@
1
1
  from .cdc import CDC
2
2
  from .extract_common import ExtractCommon
3
+ from .extract_keys import ExtractKeys
4
+ from .fd_constraints import FDConstraints
3
5
  from .quantify_vars import QuantifyVars
6
+ from .splinter import Splinter
4
7
 
5
8
  __all__ = [
6
9
  "CDC",
7
10
  "ExtractCommon",
11
+ "ExtractKeys",
12
+ "FDConstraints",
8
13
  "QuantifyVars",
14
+ "Splinter",
9
15
  ]