relationalai 0.11.2__py3-none-any.whl → 0.11.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- relationalai/clients/snowflake.py +44 -15
- relationalai/clients/types.py +1 -0
- relationalai/clients/use_index_poller.py +446 -178
- relationalai/early_access/builder/std/__init__.py +1 -1
- relationalai/early_access/dsl/bindings/csv.py +4 -4
- relationalai/semantics/internal/internal.py +22 -4
- relationalai/semantics/lqp/executor.py +69 -18
- relationalai/semantics/lqp/intrinsics.py +23 -0
- relationalai/semantics/lqp/model2lqp.py +16 -6
- relationalai/semantics/lqp/passes.py +3 -4
- relationalai/semantics/lqp/primitives.py +38 -14
- relationalai/semantics/metamodel/builtins.py +152 -11
- relationalai/semantics/metamodel/factory.py +3 -2
- relationalai/semantics/metamodel/helpers.py +78 -2
- relationalai/semantics/reasoners/graph/core.py +343 -40
- relationalai/semantics/reasoners/optimization/solvers_dev.py +20 -1
- relationalai/semantics/reasoners/optimization/solvers_pb.py +24 -3
- relationalai/semantics/rel/compiler.py +5 -17
- relationalai/semantics/rel/executor.py +2 -2
- relationalai/semantics/rel/rel.py +6 -0
- relationalai/semantics/rel/rel_utils.py +37 -1
- relationalai/semantics/rel/rewrite/extract_common.py +153 -242
- relationalai/semantics/sql/compiler.py +540 -202
- relationalai/semantics/sql/executor/duck_db.py +21 -0
- relationalai/semantics/sql/executor/result_helpers.py +7 -0
- relationalai/semantics/sql/executor/snowflake.py +9 -2
- relationalai/semantics/sql/rewrite/denormalize.py +4 -6
- relationalai/semantics/sql/rewrite/recursive_union.py +23 -3
- relationalai/semantics/sql/sql.py +120 -46
- relationalai/semantics/std/__init__.py +9 -4
- relationalai/semantics/std/datetime.py +363 -0
- relationalai/semantics/std/math.py +77 -0
- relationalai/semantics/std/re.py +83 -0
- relationalai/semantics/std/strings.py +1 -1
- relationalai/tools/cli_controls.py +445 -60
- relationalai/util/format.py +78 -1
- {relationalai-0.11.2.dist-info → relationalai-0.11.4.dist-info}/METADATA +3 -2
- {relationalai-0.11.2.dist-info → relationalai-0.11.4.dist-info}/RECORD +41 -39
- relationalai/semantics/std/dates.py +0 -213
- {relationalai-0.11.2.dist-info → relationalai-0.11.4.dist-info}/WHEEL +0 -0
- {relationalai-0.11.2.dist-info → relationalai-0.11.4.dist-info}/entry_points.txt +0 -0
- {relationalai-0.11.2.dist-info → relationalai-0.11.4.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import warnings
|
|
2
2
|
|
|
3
|
-
from relationalai.semantics.std import range, hash, cast, dates, math, strings, decimals, integers, pragmas, \
|
|
3
|
+
from relationalai.semantics.std import range, hash, cast, datetime as dates, math, strings, decimals, integers, pragmas, \
|
|
4
4
|
constraints, uuid_to_string
|
|
5
5
|
|
|
6
6
|
__all__ = [
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from io import StringIO
|
|
2
|
-
from typing import Optional
|
|
2
|
+
from typing import Optional, Hashable
|
|
3
3
|
|
|
4
4
|
import numpy as np
|
|
5
5
|
import pandas as pd
|
|
@@ -76,7 +76,7 @@ class BindableCsvColumn(BindableColumn, b.Relationship):
|
|
|
76
76
|
|
|
77
77
|
|
|
78
78
|
class CsvTable(AbstractBindableTable[BindableCsvColumn]):
|
|
79
|
-
_basic_type_schema: dict[
|
|
79
|
+
_basic_type_schema: dict[Hashable, str]
|
|
80
80
|
_csv_data: list[pd.DataFrame]
|
|
81
81
|
_num_rows: int
|
|
82
82
|
|
|
@@ -157,13 +157,13 @@ class CsvSourceModule:
|
|
|
157
157
|
def _row_to_date_value_rule(column, row, value):
|
|
158
158
|
parse_date = b.Relationship.builtins['parse_date']
|
|
159
159
|
rez = b.Date.ref()
|
|
160
|
-
where(parse_date(value, '
|
|
160
|
+
where(parse_date(value, 'yyyy-mm-dd', rez)).define(column(row, rez))
|
|
161
161
|
|
|
162
162
|
@staticmethod
|
|
163
163
|
def _row_to_date_time_value_rule(column, row, value):
|
|
164
164
|
parse_datetime = b.Relationship.builtins['parse_datetime']
|
|
165
165
|
rez = b.DateTime.ref()
|
|
166
|
-
where(parse_datetime(value, '
|
|
166
|
+
where(parse_datetime(value, 'yyy-mm-dd HH:MM:SS z', rez)).define(column(row, rez))
|
|
167
167
|
|
|
168
168
|
@staticmethod
|
|
169
169
|
def _row_to_decimal_value_rule(column, row, value):
|
|
@@ -514,11 +514,13 @@ class Producer:
|
|
|
514
514
|
#--------------------------------------------------
|
|
515
515
|
|
|
516
516
|
def in_(self, values:list[Any]|Fragment) -> Expression:
|
|
517
|
+
columns = None
|
|
517
518
|
if isinstance(values, Fragment):
|
|
518
519
|
return self == values
|
|
519
520
|
if not isinstance(values[0], tuple):
|
|
520
521
|
values = [tuple([v]) for v in values]
|
|
521
|
-
|
|
522
|
+
columns = [f"v{i}" for i in range(len(values[0]))]
|
|
523
|
+
d = data(values, columns)
|
|
522
524
|
return self == d[0]
|
|
523
525
|
|
|
524
526
|
#--------------------------------------------------
|
|
@@ -907,9 +909,9 @@ class Concept(Producer):
|
|
|
907
909
|
if python_types_to_concepts.get(v):
|
|
908
910
|
v = python_types_to_concepts[v]
|
|
909
911
|
if isinstance(v, Concept):
|
|
910
|
-
setattr(self, k, Property(f"{{{self._name}}} has {{{k}:{v._name}}}", short_name=k, model=self._model))
|
|
912
|
+
setattr(self, k, Property(f"{{{self._name}}} has {{{k}:{v._name}}}", parent=self, short_name=k, model=self._model))
|
|
911
913
|
elif isinstance(v, type) and issubclass(v, self._model.Enum): #type: ignore
|
|
912
|
-
setattr(self, k, Property(f"{{{self._name}}} has {{{k}:{v._concept._name}}}", short_name=k, model=self._model))
|
|
914
|
+
setattr(self, k, Property(f"{{{self._name}}} has {{{k}:{v._concept._name}}}", parent=self, short_name=k, model=self._model))
|
|
913
915
|
elif isinstance(v, Relationship):
|
|
914
916
|
self._validate_identifier_relationship(v)
|
|
915
917
|
setattr(self, k, v)
|
|
@@ -1189,6 +1191,7 @@ def is_decimal(concept: Concept) -> bool:
|
|
|
1189
1191
|
Concept.builtins["Int"] = Concept.builtins["Int128"]
|
|
1190
1192
|
Concept.builtins["Integer"] = Concept.builtins["Int128"]
|
|
1191
1193
|
|
|
1194
|
+
_np_datetime = np.dtype('datetime64[ns]')
|
|
1192
1195
|
python_types_to_concepts : dict[Any, Concept] = {
|
|
1193
1196
|
int: Concept.builtins["Int128"],
|
|
1194
1197
|
float: Concept.builtins["Float"],
|
|
@@ -1213,6 +1216,7 @@ python_types_to_concepts : dict[Any, Concept] = {
|
|
|
1213
1216
|
np.dtype('float32'): Concept.builtins["Float"],
|
|
1214
1217
|
np.dtype('bool'): Concept.builtins["Bool"],
|
|
1215
1218
|
np.dtype('object'): Concept.builtins["String"], # Often strings are stored as object dtype
|
|
1219
|
+
_np_datetime: Concept.builtins["DateTime"],
|
|
1216
1220
|
|
|
1217
1221
|
# Pandas extension dtypes
|
|
1218
1222
|
pd.Int64Dtype(): Concept.builtins["Int128"],
|
|
@@ -1655,7 +1659,9 @@ class Expression(Producer):
|
|
|
1655
1659
|
raise ValueError(f"Argument index should be positive, got {idx}")
|
|
1656
1660
|
if len(self._params) <= idx:
|
|
1657
1661
|
raise ValueError(f"Expression '{self.__str__()}' has only {len(self._params)} arguments")
|
|
1658
|
-
|
|
1662
|
+
param = self._params[idx]
|
|
1663
|
+
# if param is an Expression then refer the last param of this expression
|
|
1664
|
+
return ArgumentRef(self, param._params[-1] if isinstance(param, Expression) else param)
|
|
1659
1665
|
|
|
1660
1666
|
def __getattr__(self, name: str):
|
|
1661
1667
|
last = self._params[-1]
|
|
@@ -2090,8 +2096,20 @@ class DataColumn(Producer):
|
|
|
2090
2096
|
self._data = data
|
|
2091
2097
|
self._type = _type
|
|
2092
2098
|
self._name = name if isinstance(name, str) else f"v{name}"
|
|
2099
|
+
if pd.api.types.is_datetime64_any_dtype(_type):
|
|
2100
|
+
_type = _np_datetime
|
|
2101
|
+
# dates are objects in pandas
|
|
2102
|
+
elif pd.api.types.is_object_dtype(_type) and self._is_date_column():
|
|
2103
|
+
_type = date
|
|
2093
2104
|
self._ref = python_types_to_concepts[_type].ref(self._name)
|
|
2094
2105
|
|
|
2106
|
+
def _is_date_column(self) -> bool:
|
|
2107
|
+
sample = self._data._data[self._name].dropna()
|
|
2108
|
+
if sample.empty:
|
|
2109
|
+
return False
|
|
2110
|
+
sample_value = sample.iloc[0]
|
|
2111
|
+
return isinstance(sample_value, date) and not isinstance(sample_value, datetime)
|
|
2112
|
+
|
|
2095
2113
|
def __str__(self):
|
|
2096
2114
|
return f"DataColumn({self._name}, {self._type})"
|
|
2097
2115
|
|
|
@@ -12,6 +12,7 @@ from relationalai import debugging
|
|
|
12
12
|
from relationalai.semantics.lqp import result_helpers
|
|
13
13
|
from relationalai.semantics.metamodel import ir, factory as f, executor as e
|
|
14
14
|
from relationalai.semantics.lqp.compiler import Compiler
|
|
15
|
+
from relationalai.semantics.lqp.intrinsics import mk_intrinsic_datetime_now
|
|
15
16
|
from relationalai.semantics.lqp.types import lqp_type_to_sql
|
|
16
17
|
from lqp import print as lqp_print, ir as lqp_ir
|
|
17
18
|
from lqp.parser import construct_configure
|
|
@@ -120,13 +121,15 @@ class LQPExecutor(e.Executor):
|
|
|
120
121
|
pyrel_errors[problem["props"]["pyrel_id"]].append(problem)
|
|
121
122
|
elif abort_on_error:
|
|
122
123
|
e = errors.RelQueryError(problem, source)
|
|
123
|
-
supplementary_message = "Troubleshooting:\n" + \
|
|
124
|
-
" 1. Please retry with a new name for your model. This can work around state-related issues.\n" + \
|
|
125
|
-
" 2. If the error persists, please retry with the `use_lqp` flag set to `False`, for example:\n" + \
|
|
126
|
-
" `model = Model(..., use_lqp=False)`\n" + \
|
|
127
|
-
" This will switch the execution to the legacy backend, which may avoid the issue with some performance cost.\n"
|
|
128
124
|
|
|
129
|
-
|
|
125
|
+
if code == 'SYSTEM_INTERNAL':
|
|
126
|
+
supplementary_message = "Troubleshooting:\n" + \
|
|
127
|
+
" 1. Please retry with a new name for your model. This can work around state-related issues.\n" + \
|
|
128
|
+
" 2. If the error persists, please retry with the `use_lqp` flag set to `False`, for example:\n" + \
|
|
129
|
+
" `model = Model(..., use_lqp=False)`\n" + \
|
|
130
|
+
" This will switch the execution to the legacy backend, which may avoid the issue with some performance cost.\n"
|
|
131
|
+
|
|
132
|
+
e.content = f"{e.content}{supplementary_message}"
|
|
130
133
|
all_errors.append(e)
|
|
131
134
|
else:
|
|
132
135
|
if code == "ARITY_MISMATCH":
|
|
@@ -256,11 +259,47 @@ class LQPExecutor(e.Executor):
|
|
|
256
259
|
|
|
257
260
|
return ", ".join(fields)
|
|
258
261
|
|
|
262
|
+
def _construct_configure(self):
|
|
263
|
+
config_dict = {}
|
|
264
|
+
# Only set the IVM flag if there is a value in `config`. Otherwise, let
|
|
265
|
+
# `construct_configure` set the default value.
|
|
266
|
+
ivm_flag = self.config.get('reasoner.rule.incremental_maintenance', None)
|
|
267
|
+
if ivm_flag:
|
|
268
|
+
config_dict['ivm.maintenance_level'] = lqp_ir.Value(value=ivm_flag, meta=None)
|
|
269
|
+
return construct_configure(config_dict, None)
|
|
270
|
+
|
|
271
|
+
def _compile_intrinsics(self) -> lqp_ir.Epoch:
|
|
272
|
+
"""Construct an epoch that defines a number of built-in definitions used by the
|
|
273
|
+
emitter."""
|
|
274
|
+
with debugging.span("compile_intrinsics") as span:
|
|
275
|
+
debug_info = lqp_ir.DebugInfo(id_to_orig_name={}, meta=None)
|
|
276
|
+
intrinsics_fragment = lqp_ir.Fragment(
|
|
277
|
+
id = lqp_ir.FragmentId(id=b"__pyrel_lqp_intrinsics", meta=None),
|
|
278
|
+
declarations = [
|
|
279
|
+
mk_intrinsic_datetime_now(),
|
|
280
|
+
],
|
|
281
|
+
debug_info = debug_info,
|
|
282
|
+
meta = None,
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
span["compile_type"] = "intrinsics"
|
|
286
|
+
span["lqp"] = lqp_print.to_string(intrinsics_fragment, {"print_names": True, "print_debug": False, "print_csv_filename": False})
|
|
287
|
+
|
|
288
|
+
return lqp_ir.Epoch(
|
|
289
|
+
writes=[
|
|
290
|
+
lqp_ir.Write(write_type=lqp_ir.Define(fragment=intrinsics_fragment, meta=None), meta=None)
|
|
291
|
+
],
|
|
292
|
+
meta=None,
|
|
293
|
+
)
|
|
294
|
+
|
|
259
295
|
def compile_lqp(self, model: ir.Model, task: ir.Task):
|
|
296
|
+
configure = self._construct_configure()
|
|
297
|
+
|
|
260
298
|
model_txn = None
|
|
261
299
|
if self._last_model != model:
|
|
262
300
|
with debugging.span("compile", metamodel=model) as install_span:
|
|
263
301
|
_, model_txn = self.compiler.compile(model, {"fragment_id": b"model"})
|
|
302
|
+
model_txn = txn_with_configure(model_txn, configure)
|
|
264
303
|
install_span["compile_type"] = "model"
|
|
265
304
|
install_span["lqp"] = lqp_print.to_string(model_txn, {"print_names": True, "print_debug": False, "print_csv_filename": False})
|
|
266
305
|
self._last_model = model
|
|
@@ -273,23 +312,26 @@ class LQPExecutor(e.Executor):
|
|
|
273
312
|
}
|
|
274
313
|
result, final_model = self.compiler.compile_inner(query, options)
|
|
275
314
|
export_info, query_txn = result
|
|
315
|
+
query_txn = txn_with_configure(query_txn, configure)
|
|
276
316
|
compile_span["compile_type"] = "query"
|
|
277
317
|
compile_span["lqp"] = lqp_print.to_string(query_txn, {"print_names": True, "print_debug": False, "print_csv_filename": False})
|
|
278
318
|
|
|
279
|
-
|
|
319
|
+
# Merge the epochs into a single transactions. Long term the query bits should all
|
|
320
|
+
# go into a WhatIf action and the intrinsics could be fused with either of them. But
|
|
321
|
+
# for now we just use separate epochs.
|
|
322
|
+
epochs = []
|
|
323
|
+
|
|
324
|
+
epochs.append(self._compile_intrinsics())
|
|
325
|
+
|
|
280
326
|
if model_txn is not None:
|
|
281
|
-
|
|
282
|
-
# go into a WhatIf action. But for now we just use two separate epochs.
|
|
283
|
-
model_epoch = model_txn.epochs[0]
|
|
284
|
-
query_epoch = query_txn.epochs[0]
|
|
285
|
-
txn = lqp_ir.Transaction(
|
|
286
|
-
epochs=[model_epoch, query_epoch],
|
|
287
|
-
configure=construct_configure({}, None),
|
|
288
|
-
meta=None,
|
|
289
|
-
)
|
|
327
|
+
epochs.append(model_txn.epochs[0])
|
|
290
328
|
|
|
291
|
-
|
|
292
|
-
|
|
329
|
+
epochs.append(query_txn.epochs[0])
|
|
330
|
+
|
|
331
|
+
txn = lqp_ir.Transaction(epochs=epochs, configure=configure, meta=None)
|
|
332
|
+
|
|
333
|
+
# Revalidate now that we've joined all the epochs.
|
|
334
|
+
validate_lqp(txn)
|
|
293
335
|
|
|
294
336
|
txn_proto = convert_transaction(txn)
|
|
295
337
|
# TODO (azreika): Should export_info be encoded as part of the txn_proto? [RAI-40312]
|
|
@@ -350,3 +392,12 @@ class LQPExecutor(e.Executor):
|
|
|
350
392
|
# If processing the results failed, revert to the previous model.
|
|
351
393
|
self._last_model = previous_model
|
|
352
394
|
raise e
|
|
395
|
+
|
|
396
|
+
def txn_with_configure(txn: lqp_ir.Transaction, configure: lqp_ir.Configure) -> lqp_ir.Transaction:
|
|
397
|
+
""" Return a new transaction with the given configure. If the transaction already has
|
|
398
|
+
a configure, it is replaced. """
|
|
399
|
+
return lqp_ir.Transaction(
|
|
400
|
+
epochs=txn.epochs,
|
|
401
|
+
configure=configure,
|
|
402
|
+
meta=txn.meta,
|
|
403
|
+
)
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from datetime import datetime, timezone
|
|
2
|
+
|
|
3
|
+
from relationalai.semantics.lqp import ir as lqp
|
|
4
|
+
from relationalai.semantics.lqp.constructors import mk_abstraction, mk_value, mk_var, mk_type, mk_primitive
|
|
5
|
+
from relationalai.semantics.lqp.utils import lqp_hash
|
|
6
|
+
|
|
7
|
+
def mk_intrinsic_datetime_now() -> lqp.Def:
|
|
8
|
+
"""Constructs a definition of the current datetime."""
|
|
9
|
+
id = lqp_hash("__pyrel_lqp_intrinsic_datetime_now")
|
|
10
|
+
out = mk_var("out")
|
|
11
|
+
out_type = mk_type(lqp.TypeName.DATETIME)
|
|
12
|
+
now = mk_value(lqp.DateTimeValue(value=datetime.now(timezone.utc), meta=None))
|
|
13
|
+
datetime_now = mk_abstraction(
|
|
14
|
+
[(out, out_type)],
|
|
15
|
+
mk_primitive("rel_primitive_eq", [out, now]),
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
return lqp.Def(
|
|
19
|
+
name = lqp.RelationId(id=id, meta=None),
|
|
20
|
+
body = datetime_now,
|
|
21
|
+
attrs = [],
|
|
22
|
+
meta = None,
|
|
23
|
+
)
|
|
@@ -13,7 +13,7 @@ from relationalai.semantics.lqp.utils import TranslationCtx, gen_unique_var
|
|
|
13
13
|
from relationalai.semantics.lqp.validators import assert_valid_input
|
|
14
14
|
|
|
15
15
|
from decimal import Decimal as PyDecimal
|
|
16
|
-
from datetime import datetime, date
|
|
16
|
+
from datetime import datetime, date, timezone
|
|
17
17
|
from typing import Tuple, cast, Union, Optional
|
|
18
18
|
from warnings import warn
|
|
19
19
|
import re
|
|
@@ -192,12 +192,21 @@ def _translate_effect(ctx: TranslationCtx, effect: Union[ir.Output, ir.Update],
|
|
|
192
192
|
elif isinstance(effect, ir.Output):
|
|
193
193
|
ctx.output_ids.append((rel_id, def_name))
|
|
194
194
|
|
|
195
|
+
# First we collect annotations on the effect itself, e.g. from something like
|
|
196
|
+
# `select(...).annotate(...)`.
|
|
197
|
+
annotations = effect.annotations
|
|
198
|
+
if isinstance(effect, ir.Update):
|
|
199
|
+
# Then we translate annotations on the relation itself, e.g.
|
|
200
|
+
# ```
|
|
201
|
+
# Bar.foo = model.Relationship(...)
|
|
202
|
+
# Bar.foo.annotate(...)
|
|
203
|
+
# ```
|
|
204
|
+
annotations = annotations | effect.relation.annotations
|
|
205
|
+
|
|
195
206
|
return lqp.Def(
|
|
196
207
|
name = rel_id,
|
|
197
208
|
body = mk_abstraction(projection, new_body),
|
|
198
|
-
|
|
199
|
-
# relation are not included yet.
|
|
200
|
-
attrs = _translate_annotations(effect.annotations),
|
|
209
|
+
attrs = _translate_annotations(annotations),
|
|
201
210
|
meta = None,
|
|
202
211
|
)
|
|
203
212
|
|
|
@@ -627,7 +636,8 @@ def to_lqp_value(value: ir.PyValue, value_type: ir.Type) -> lqp.Value:
|
|
|
627
636
|
elif typ.type_name == lqp.TypeName.DATE and isinstance(value, date):
|
|
628
637
|
val = lqp.DateValue(value=value, meta=None)
|
|
629
638
|
elif typ.type_name == lqp.TypeName.DATETIME and isinstance(value, datetime):
|
|
630
|
-
|
|
639
|
+
utc_value = value.astimezone(timezone.utc) if value.tzinfo is not None else value # Convert to UTC cf. Iceberg
|
|
640
|
+
val = lqp.DateTimeValue(value=utc_value, meta=None)
|
|
631
641
|
elif typ.type_name == lqp.TypeName.BOOLEAN and isinstance(value, bool):
|
|
632
642
|
val = lqp.BooleanValue(value=value, meta=None)
|
|
633
643
|
else:
|
|
@@ -696,4 +706,4 @@ def _translate_join(ctx: TranslationCtx, task: ir.Lookup) -> lqp.Formula:
|
|
|
696
706
|
|
|
697
707
|
output_term = _translate_term(ctx, target)[0]
|
|
698
708
|
|
|
699
|
-
return lqp.Reduce(meta=None, op=op, body=body, terms=[output_term])
|
|
709
|
+
return lqp.Reduce(meta=None, op=op, body=body, terms=[output_term])
|
|
@@ -8,7 +8,7 @@ from relationalai.semantics.metamodel.util import FrozenOrderedSet
|
|
|
8
8
|
|
|
9
9
|
from relationalai.semantics.metamodel.rewrite import Flatten
|
|
10
10
|
# TODO: Move this into metamodel.rewrite
|
|
11
|
-
from relationalai.semantics.rel.rewrite import QuantifyVars, CDC
|
|
11
|
+
from relationalai.semantics.rel.rewrite import QuantifyVars, CDC, ExtractCommon
|
|
12
12
|
|
|
13
13
|
from relationalai.semantics.lqp.utils import output_names
|
|
14
14
|
|
|
@@ -25,8 +25,7 @@ def lqp_passes() -> list[Pass]:
|
|
|
25
25
|
InferTypes(),
|
|
26
26
|
DNFUnionSplitter(),
|
|
27
27
|
ExtractKeys(),
|
|
28
|
-
|
|
29
|
-
# ExtractCommon(),
|
|
28
|
+
ExtractCommon(),
|
|
30
29
|
Flatten(),
|
|
31
30
|
Splinter(), # Splits multi-headed rules into multiple rules
|
|
32
31
|
QuantifyVars(), # Adds missing existentials
|
|
@@ -548,7 +547,7 @@ class PeriodMath(Pass):
|
|
|
548
547
|
return node
|
|
549
548
|
|
|
550
549
|
if node.relation.name not in {
|
|
551
|
-
"year", "month", "week", "day", "hour", "minute", "second", "millisecond"
|
|
550
|
+
"year", "month", "week", "day", "hour", "minute", "second", "millisecond", "microsecond", "nanosecond"
|
|
552
551
|
}:
|
|
553
552
|
return node
|
|
554
553
|
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from relationalai.semantics.metamodel.types import digits_to_bits
|
|
2
2
|
from relationalai.semantics.lqp import ir as lqp
|
|
3
3
|
from relationalai.semantics.lqp.types import is_numeric
|
|
4
|
-
from relationalai.semantics.lqp.utils import UniqueNames
|
|
4
|
+
from relationalai.semantics.lqp.utils import UniqueNames, lqp_hash
|
|
5
5
|
from relationalai.semantics.lqp.constructors import mk_primitive, mk_specialized_value, mk_type, mk_value, mk_var
|
|
6
6
|
|
|
7
7
|
rel_to_lqp = {
|
|
@@ -22,12 +22,18 @@ rel_to_lqp = {
|
|
|
22
22
|
"sinh": "rel_primitive_sinh",
|
|
23
23
|
"asin": "rel_primitive_asin",
|
|
24
24
|
"asinh": "rel_primitive_asinh",
|
|
25
|
+
"tan": "rel_primitive_tan",
|
|
26
|
+
"tanh": "rel_primitive_tanh",
|
|
27
|
+
"atan": "rel_primitive_atan",
|
|
28
|
+
"atanh": "rel_primitive_atanh",
|
|
29
|
+
"cot": "rel_primitive_cot",
|
|
30
|
+
"acot": "rel_primitive_acot",
|
|
25
31
|
"construct_date": "rel_primitive_construct_date",
|
|
26
32
|
"construct_date_from_datetime": "rel_primitive_datetime_date_convert",
|
|
27
|
-
"construct_datetime": "rel_primitive_construct_datetime",
|
|
28
33
|
"construct_datetime_ms_tz": "rel_primitive_construct_datetime",
|
|
29
34
|
"hash": "rel_primitive_hash_tuple_uint128",
|
|
30
35
|
"uuid_to_string": "rel_primitive_uuid_string",
|
|
36
|
+
"parse_uuid": "rel_primitive_parse_uuid",
|
|
31
37
|
"parse_date": "rel_primitive_parse_date",
|
|
32
38
|
"parse_datetime": "rel_primitive_parse_datetime",
|
|
33
39
|
"parse_decimal": "rel_primitive_parse_decimal",
|
|
@@ -47,23 +53,42 @@ rel_to_lqp = {
|
|
|
47
53
|
"replace": "rel_primitive_replace",
|
|
48
54
|
"strip": "rel_primitive_trim",
|
|
49
55
|
"date_year": "rel_primitive_date_year",
|
|
56
|
+
"date_quarter": "rel_primitive_date_quarterofyear",
|
|
50
57
|
"date_month": "rel_primitive_date_month",
|
|
51
58
|
"date_week": "rel_primitive_date_week",
|
|
52
59
|
"date_day": "rel_primitive_date_day",
|
|
60
|
+
"date_dayofyear": "rel_primitive_date_dayofyear",
|
|
61
|
+
"date_weekday": "rel_primitive_date_dayofweek",
|
|
53
62
|
"date_add": "rel_primitive_typed_add_date_period",
|
|
54
63
|
"date_subtract": "rel_primitive_typed_subtract_date_period",
|
|
55
64
|
"dates_period_days": "rel_primitive_date_days_between",
|
|
65
|
+
"datetime_now": "__pyrel_lqp_intrinsic_datetime_now",
|
|
56
66
|
"datetime_add": "rel_primitive_typed_add_datetime_period",
|
|
57
67
|
"datetime_subtract": "rel_primitive_typed_subtract_datetime_period",
|
|
68
|
+
"datetime_year": "rel_primitive_datetime_year",
|
|
69
|
+
"datetime_quarter": "rel_primitive_datetime_quarterofyear",
|
|
70
|
+
"datetime_month": "rel_primitive_datetime_month",
|
|
58
71
|
"datetime_week": "rel_primitive_datetime_week",
|
|
72
|
+
"datetime_day": "rel_primitive_datetime_day",
|
|
73
|
+
"datetime_dayofyear": "rel_primitive_datetime_dayofyear",
|
|
74
|
+
"datetime_hour": "rel_primitive_datetime_hour",
|
|
75
|
+
"datetime_minute": "rel_primitive_datetime_minute",
|
|
76
|
+
"datetime_second": "rel_primitive_datetime_second",
|
|
77
|
+
"datetime_weekday": "rel_primitive_datetime_dayofweek",
|
|
59
78
|
"datetimes_period_milliseconds": "rel_primitive_datetime_milliseconds_between",
|
|
60
79
|
"date_format": "rel_primitive_format_date",
|
|
61
80
|
"datetime_format": "rel_primitive_format_datetime",
|
|
62
81
|
"range": "rel_primitive_range",
|
|
63
82
|
"natural_log": "rel_primitive_natural_log",
|
|
83
|
+
"log": "rel_primitive_log",
|
|
84
|
+
"log2": "rel_primitive_log2",
|
|
85
|
+
"log10": "rel_primitive_log10",
|
|
64
86
|
"sqrt": "rel_primitive_sqrt",
|
|
65
87
|
"isinf": "rel_primitive_isinf",
|
|
66
88
|
"isnan": "rel_primitive_isnan",
|
|
89
|
+
"exp": "rel_primitive_natural_exp",
|
|
90
|
+
"erf": "rel_primitive_error_function",
|
|
91
|
+
"erfinv": "rel_primitive_error_function_inverse",
|
|
67
92
|
# Division is monotype, but only on the input args. Until we distinguish between input
|
|
68
93
|
# and output args, we can't use the same assertions for monotype-ness as the other ops.
|
|
69
94
|
"/": "rel_primitive_divide_monotype",
|
|
@@ -101,18 +126,7 @@ rel_to_lqp_monotype = {
|
|
|
101
126
|
# Insert extra terms where a raicode primitive expects more terms, and there are possible
|
|
102
127
|
# defaults.
|
|
103
128
|
def _extend_primitive_terms(name: str, terms: list[lqp.RelTerm], term_types: list[lqp.Type]) -> tuple[list[lqp.RelTerm], list[lqp.Type]]:
|
|
104
|
-
if name == "
|
|
105
|
-
# construct_datetime does not provide a timezone or milliseconds so we
|
|
106
|
-
# default to 0 milliseconds and UTC timezone.
|
|
107
|
-
terms = [*terms[:-1], mk_value(0), mk_value("UTC"), terms[-1]]
|
|
108
|
-
term_types = [
|
|
109
|
-
*term_types[:-1],
|
|
110
|
-
mk_type(lqp.TypeName.INT),
|
|
111
|
-
mk_type(lqp.TypeName.STRING),
|
|
112
|
-
term_types[-1],
|
|
113
|
-
]
|
|
114
|
-
|
|
115
|
-
elif name == "rel_primitive_parse_decimal" and len(terms) == 2:
|
|
129
|
+
if name == "rel_primitive_parse_decimal" and len(terms) == 2:
|
|
116
130
|
assert term_types
|
|
117
131
|
py_precision = term_types[1].parameters[0].value
|
|
118
132
|
bit_value = mk_value(digits_to_bits(py_precision))
|
|
@@ -162,6 +176,16 @@ def build_primitive(
|
|
|
162
176
|
terms, term_types = _reorder_primitive_terms(lqp_name, terms, term_types)
|
|
163
177
|
_assert_primitive_terms(lqp_name, terms, term_types)
|
|
164
178
|
|
|
179
|
+
# Handle intrinsics. To callers of `build_primitive` the distinction between intrinsic
|
|
180
|
+
# and primitive doesn't matter, so we don't want to burden them with that detail.
|
|
181
|
+
# Intrinsics are built-in definitions added by the LQP emitter, that user logic can just
|
|
182
|
+
# refer to.
|
|
183
|
+
if lqp_name == "__pyrel_lqp_intrinsic_datetime_now":
|
|
184
|
+
id = lqp.RelationId(id=lqp_hash(lqp_name), meta=None)
|
|
185
|
+
assert len(terms) == 1
|
|
186
|
+
assert isinstance(terms[0], lqp.Term)
|
|
187
|
+
return lqp.Atom(name=id, terms=[terms[0]], meta=None)
|
|
188
|
+
|
|
165
189
|
return mk_primitive(lqp_name, terms)
|
|
166
190
|
|
|
167
191
|
def relname_to_lqp_name(name: str) -> str:
|