relationalai 0.12.8__py3-none-any.whl → 0.12.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- relationalai/__init__.py +9 -0
- relationalai/clients/__init__.py +2 -2
- relationalai/clients/local.py +571 -0
- relationalai/clients/snowflake.py +106 -83
- relationalai/debugging.py +5 -2
- relationalai/semantics/__init__.py +2 -2
- relationalai/semantics/internal/__init__.py +2 -2
- relationalai/semantics/internal/internal.py +53 -14
- relationalai/semantics/lqp/README.md +34 -0
- relationalai/semantics/lqp/compiler.py +1 -1
- relationalai/semantics/lqp/constructors.py +7 -0
- relationalai/semantics/lqp/executor.py +35 -39
- relationalai/semantics/lqp/intrinsics.py +4 -3
- relationalai/semantics/lqp/ir.py +4 -0
- relationalai/semantics/lqp/model2lqp.py +47 -14
- relationalai/semantics/lqp/passes.py +7 -4
- relationalai/semantics/lqp/rewrite/__init__.py +4 -1
- relationalai/semantics/lqp/rewrite/annotate_constraints.py +55 -0
- relationalai/semantics/lqp/rewrite/extract_keys.py +22 -3
- relationalai/semantics/lqp/rewrite/function_annotations.py +91 -56
- relationalai/semantics/lqp/rewrite/functional_dependencies.py +314 -0
- relationalai/semantics/lqp/rewrite/quantify_vars.py +14 -0
- relationalai/semantics/lqp/validators.py +3 -0
- relationalai/semantics/metamodel/builtins.py +10 -0
- relationalai/semantics/metamodel/rewrite/extract_nested_logicals.py +5 -4
- relationalai/semantics/metamodel/rewrite/flatten.py +10 -4
- relationalai/semantics/metamodel/typer/typer.py +13 -0
- relationalai/semantics/metamodel/types.py +2 -1
- relationalai/semantics/reasoners/graph/core.py +44 -53
- relationalai/semantics/rel/compiler.py +19 -1
- relationalai/semantics/tests/test_snapshot_abstract.py +3 -0
- relationalai/tools/debugger.py +4 -2
- relationalai/tools/qb_debugger.py +5 -3
- relationalai/util/otel_handler.py +10 -4
- {relationalai-0.12.8.dist-info → relationalai-0.12.10.dist-info}/METADATA +2 -2
- {relationalai-0.12.8.dist-info → relationalai-0.12.10.dist-info}/RECORD +39 -35
- {relationalai-0.12.8.dist-info → relationalai-0.12.10.dist-info}/WHEEL +0 -0
- {relationalai-0.12.8.dist-info → relationalai-0.12.10.dist-info}/entry_points.txt +0 -0
- {relationalai-0.12.8.dist-info → relationalai-0.12.10.dist-info}/licenses/LICENSE +0 -0
|
@@ -441,7 +441,8 @@ class Resources(ResourcesBase):
|
|
|
441
441
|
code: str,
|
|
442
442
|
params: List[Any] | Any | None = None,
|
|
443
443
|
raw: bool = False,
|
|
444
|
-
help: bool = True
|
|
444
|
+
help: bool = True,
|
|
445
|
+
skip_auto_create: bool = False
|
|
445
446
|
) -> Any:
|
|
446
447
|
# print(f"\n--- sql---\n{code}\n--- end sql---\n")
|
|
447
448
|
if not self._session:
|
|
@@ -458,7 +459,6 @@ class Resources(ResourcesBase):
|
|
|
458
459
|
rai_app = self.config.get("rai_app_name", "")
|
|
459
460
|
current_role = self.config.get("role")
|
|
460
461
|
engine = self.get_default_engine_name()
|
|
461
|
-
engine_size = self.config.get_default_engine_size()
|
|
462
462
|
assert isinstance(rai_app, str), f"rai_app_name must be a string, not {type(rai_app)}"
|
|
463
463
|
assert isinstance(engine, str), f"engine must be a string, not {type(engine)}"
|
|
464
464
|
print("\n")
|
|
@@ -467,15 +467,10 @@ class Resources(ResourcesBase):
|
|
|
467
467
|
if re.search(f"database '{rai_app}' does not exist or not authorized.".lower(), orig_message):
|
|
468
468
|
exception = SnowflakeAppMissingException(rai_app, current_role)
|
|
469
469
|
raise exception from None
|
|
470
|
-
if
|
|
470
|
+
# skip creating the engine if the query is a user transaction. exec_async_v2 will handle that case.
|
|
471
|
+
if _is_engine_issue(orig_message) and not skip_auto_create:
|
|
471
472
|
try:
|
|
472
|
-
self.
|
|
473
|
-
app_name=self.get_app_name(),
|
|
474
|
-
sources=self.sources,
|
|
475
|
-
model=self.database,
|
|
476
|
-
engine_name=engine,
|
|
477
|
-
engine_size=engine_size
|
|
478
|
-
)
|
|
473
|
+
self.auto_create_engine(engine)
|
|
479
474
|
return self._exec(code, params, raw=raw, help=help)
|
|
480
475
|
except EngineNameValidationException as e:
|
|
481
476
|
raise EngineNameValidationException(engine) from e
|
|
@@ -1612,6 +1607,7 @@ Otherwise, remove it from your '{profile}' configuration profile.
|
|
|
1612
1607
|
response = self._exec(
|
|
1613
1608
|
sql_string,
|
|
1614
1609
|
raw_code,
|
|
1610
|
+
skip_auto_create=True,
|
|
1615
1611
|
)
|
|
1616
1612
|
if not response:
|
|
1617
1613
|
raise Exception("Failed to create transaction")
|
|
@@ -1629,6 +1625,7 @@ Otherwise, remove it from your '{profile}' configuration profile.
|
|
|
1629
1625
|
bypass_index=False,
|
|
1630
1626
|
language: str = "rel",
|
|
1631
1627
|
query_timeout_mins: int | None = None,
|
|
1628
|
+
gi_setup_skipped: bool = False,
|
|
1632
1629
|
):
|
|
1633
1630
|
if inputs is None:
|
|
1634
1631
|
inputs = {}
|
|
@@ -1638,6 +1635,8 @@ Otherwise, remove it from your '{profile}' configuration profile.
|
|
|
1638
1635
|
with debugging.span("transaction", **query_attrs_dict) as txn_span:
|
|
1639
1636
|
with debugging.span("create_v2", **query_attrs_dict) as create_span:
|
|
1640
1637
|
request_headers['user-agent'] = get_pyrel_version(self.generation)
|
|
1638
|
+
request_headers['gi_setup_skipped'] = str(gi_setup_skipped)
|
|
1639
|
+
request_headers['pyrel_program_id'] = debugging.get_program_span_id() or ""
|
|
1641
1640
|
response = self._exec_rai_app(
|
|
1642
1641
|
database=database,
|
|
1643
1642
|
engine=engine,
|
|
@@ -1897,26 +1896,29 @@ Otherwise, remove it from your '{profile}' configuration profile.
|
|
|
1897
1896
|
# Exec
|
|
1898
1897
|
#--------------------------------------------------
|
|
1899
1898
|
|
|
1900
|
-
def
|
|
1899
|
+
def _exec_with_gi_retry(
|
|
1901
1900
|
self,
|
|
1902
1901
|
database: str,
|
|
1903
1902
|
engine: str | None,
|
|
1904
|
-
raw_code:
|
|
1905
|
-
|
|
1906
|
-
|
|
1907
|
-
|
|
1908
|
-
|
|
1909
|
-
|
|
1910
|
-
|
|
1911
|
-
query_timeout_mins: int | None
|
|
1903
|
+
raw_code: str,
|
|
1904
|
+
inputs: Dict | None,
|
|
1905
|
+
readonly: bool,
|
|
1906
|
+
nowait_durable: bool,
|
|
1907
|
+
headers: Dict | None,
|
|
1908
|
+
bypass_index: bool,
|
|
1909
|
+
language: str,
|
|
1910
|
+
query_timeout_mins: int | None,
|
|
1912
1911
|
):
|
|
1913
|
-
|
|
1912
|
+
"""Execute with graph index retry logic.
|
|
1914
1913
|
|
|
1914
|
+
Attempts execution with gi_setup_skipped=True first. If an engine or database
|
|
1915
|
+
issue occurs, polls use_index and retries with gi_setup_skipped=False.
|
|
1916
|
+
"""
|
|
1915
1917
|
try:
|
|
1916
1918
|
return self._exec_async_v2(
|
|
1917
|
-
database, engine,
|
|
1918
|
-
headers=headers, bypass_index=bypass_index, language=
|
|
1919
|
-
query_timeout_mins=query_timeout_mins,
|
|
1919
|
+
database, engine, raw_code, inputs, readonly, nowait_durable,
|
|
1920
|
+
headers=headers, bypass_index=bypass_index, language=language,
|
|
1921
|
+
query_timeout_mins=query_timeout_mins, gi_setup_skipped=True,
|
|
1920
1922
|
)
|
|
1921
1923
|
except Exception as e:
|
|
1922
1924
|
err_message = str(e).lower()
|
|
@@ -1933,13 +1935,32 @@ Otherwise, remove it from your '{profile}' configuration profile.
|
|
|
1933
1935
|
)
|
|
1934
1936
|
|
|
1935
1937
|
return self._exec_async_v2(
|
|
1936
|
-
database, engine,
|
|
1937
|
-
headers=headers, bypass_index=bypass_index, language=
|
|
1938
|
-
query_timeout_mins=query_timeout_mins,
|
|
1938
|
+
database, engine, raw_code, inputs, readonly, nowait_durable,
|
|
1939
|
+
headers=headers, bypass_index=bypass_index, language=language,
|
|
1940
|
+
query_timeout_mins=query_timeout_mins, gi_setup_skipped=False,
|
|
1939
1941
|
)
|
|
1940
1942
|
else:
|
|
1941
1943
|
raise e
|
|
1942
1944
|
|
|
1945
|
+
def exec_lqp(
|
|
1946
|
+
self,
|
|
1947
|
+
database: str,
|
|
1948
|
+
engine: str | None,
|
|
1949
|
+
raw_code: bytes,
|
|
1950
|
+
readonly=True,
|
|
1951
|
+
*,
|
|
1952
|
+
inputs: Dict | None = None,
|
|
1953
|
+
nowait_durable=False,
|
|
1954
|
+
headers: Dict | None = None,
|
|
1955
|
+
bypass_index=False,
|
|
1956
|
+
query_timeout_mins: int | None = None,
|
|
1957
|
+
):
|
|
1958
|
+
raw_code_b64 = base64.b64encode(raw_code).decode("utf-8")
|
|
1959
|
+
return self._exec_with_gi_retry(
|
|
1960
|
+
database, engine, raw_code_b64, inputs, readonly, nowait_durable,
|
|
1961
|
+
headers, bypass_index, 'lqp', query_timeout_mins
|
|
1962
|
+
)
|
|
1963
|
+
|
|
1943
1964
|
|
|
1944
1965
|
def exec_raw(
|
|
1945
1966
|
self,
|
|
@@ -1955,45 +1976,10 @@ Otherwise, remove it from your '{profile}' configuration profile.
|
|
|
1955
1976
|
query_timeout_mins: int | None = None,
|
|
1956
1977
|
):
|
|
1957
1978
|
raw_code = raw_code.replace("'", "\\'")
|
|
1958
|
-
|
|
1959
|
-
|
|
1960
|
-
|
|
1961
|
-
|
|
1962
|
-
engine,
|
|
1963
|
-
raw_code,
|
|
1964
|
-
inputs,
|
|
1965
|
-
readonly,
|
|
1966
|
-
nowait_durable,
|
|
1967
|
-
headers=headers,
|
|
1968
|
-
bypass_index=bypass_index,
|
|
1969
|
-
query_timeout_mins=query_timeout_mins,
|
|
1970
|
-
)
|
|
1971
|
-
except Exception as e:
|
|
1972
|
-
err_message = str(e).lower()
|
|
1973
|
-
if _is_engine_issue(err_message) or _is_database_issue(err_message):
|
|
1974
|
-
engine_name = engine or self.get_default_engine_name()
|
|
1975
|
-
engine_size = self.config.get_default_engine_size()
|
|
1976
|
-
self._poll_use_index(
|
|
1977
|
-
app_name=self.get_app_name(),
|
|
1978
|
-
sources=self.sources,
|
|
1979
|
-
model=database,
|
|
1980
|
-
engine_name=engine_name,
|
|
1981
|
-
engine_size=engine_size,
|
|
1982
|
-
headers=headers,
|
|
1983
|
-
)
|
|
1984
|
-
return self._exec_async_v2(
|
|
1985
|
-
database,
|
|
1986
|
-
engine,
|
|
1987
|
-
raw_code,
|
|
1988
|
-
inputs,
|
|
1989
|
-
readonly,
|
|
1990
|
-
nowait_durable,
|
|
1991
|
-
headers=headers,
|
|
1992
|
-
bypass_index=bypass_index,
|
|
1993
|
-
query_timeout_mins=query_timeout_mins,
|
|
1994
|
-
)
|
|
1995
|
-
else:
|
|
1996
|
-
raise e
|
|
1979
|
+
return self._exec_with_gi_retry(
|
|
1980
|
+
database, engine, raw_code, inputs, readonly, nowait_durable,
|
|
1981
|
+
headers, bypass_index, 'rel', query_timeout_mins
|
|
1982
|
+
)
|
|
1997
1983
|
|
|
1998
1984
|
|
|
1999
1985
|
def format_results(self, results, task:m.Task|None=None) -> Tuple[DataFrame, List[Any]]:
|
|
@@ -3314,19 +3300,10 @@ class DirectAccessResources(Resources):
|
|
|
3314
3300
|
message = "" # Not used when we check status_code directly
|
|
3315
3301
|
|
|
3316
3302
|
# fix engine on engine error and retry
|
|
3317
|
-
# Skip auto-retry if skip_auto_create is True to avoid recursion
|
|
3318
|
-
if
|
|
3319
|
-
|
|
3320
|
-
|
|
3321
|
-
engine_size = self.config.get_default_engine_size()
|
|
3322
|
-
self._poll_use_index(
|
|
3323
|
-
app_name=self.get_app_name(),
|
|
3324
|
-
sources=self.sources,
|
|
3325
|
-
model=self.database,
|
|
3326
|
-
engine_name=engine_name,
|
|
3327
|
-
engine_size=engine_size,
|
|
3328
|
-
headers=headers,
|
|
3329
|
-
)
|
|
3303
|
+
# Skip auto-retry if skip_auto_create is True to avoid recursion or to let _exec_async_v2 poll the index.
|
|
3304
|
+
if _is_engine_issue(message) and not skip_auto_create:
|
|
3305
|
+
engine = payload.get("engine_name", "") if payload else ""
|
|
3306
|
+
self.auto_create_engine(engine)
|
|
3330
3307
|
response = _send_request()
|
|
3331
3308
|
except requests.exceptions.ConnectionError as e:
|
|
3332
3309
|
if "NameResolutionError" in str(e):
|
|
@@ -3340,6 +3317,48 @@ class DirectAccessResources(Resources):
|
|
|
3340
3317
|
raise e
|
|
3341
3318
|
return response
|
|
3342
3319
|
|
|
3320
|
+
def _txn_request_with_gi_retry(
|
|
3321
|
+
self,
|
|
3322
|
+
payload: Dict,
|
|
3323
|
+
headers: Dict[str, str],
|
|
3324
|
+
query_params: Dict,
|
|
3325
|
+
engine: Union[str, None],
|
|
3326
|
+
):
|
|
3327
|
+
"""Make request with graph index retry logic.
|
|
3328
|
+
|
|
3329
|
+
Attempts request with gi_setup_skipped=True first. If an engine or database
|
|
3330
|
+
issue occurs, polls use_index and retries with gi_setup_skipped=False.
|
|
3331
|
+
"""
|
|
3332
|
+
response = self.request(
|
|
3333
|
+
"create_txn", payload=payload, headers=headers, query_params=query_params, skip_auto_create=True
|
|
3334
|
+
)
|
|
3335
|
+
|
|
3336
|
+
if response.status_code != 200:
|
|
3337
|
+
try:
|
|
3338
|
+
message = response.json().get("message", "")
|
|
3339
|
+
except requests.exceptions.JSONDecodeError:
|
|
3340
|
+
message = ""
|
|
3341
|
+
|
|
3342
|
+
if _is_engine_issue(message) or _is_database_issue(message):
|
|
3343
|
+
engine_name = engine or self.get_default_engine_name()
|
|
3344
|
+
engine_size = self.config.get_default_engine_size()
|
|
3345
|
+
self._poll_use_index(
|
|
3346
|
+
app_name=self.get_app_name(),
|
|
3347
|
+
sources=self.sources,
|
|
3348
|
+
model=self.database,
|
|
3349
|
+
engine_name=engine_name,
|
|
3350
|
+
engine_size=engine_size,
|
|
3351
|
+
headers=headers,
|
|
3352
|
+
)
|
|
3353
|
+
headers['gi_setup_skipped'] = 'False'
|
|
3354
|
+
response = self.request(
|
|
3355
|
+
"create_txn", payload=payload, headers=headers, query_params=query_params, skip_auto_create=True
|
|
3356
|
+
)
|
|
3357
|
+
else:
|
|
3358
|
+
raise ResponseStatusException("Failed to create transaction.", response)
|
|
3359
|
+
|
|
3360
|
+
return response
|
|
3361
|
+
|
|
3343
3362
|
def _exec_async_v2(
|
|
3344
3363
|
self,
|
|
3345
3364
|
database: str,
|
|
@@ -3352,6 +3371,7 @@ class DirectAccessResources(Resources):
|
|
|
3352
3371
|
bypass_index=False,
|
|
3353
3372
|
language: str = "rel",
|
|
3354
3373
|
query_timeout_mins: int | None = None,
|
|
3374
|
+
gi_setup_skipped: bool = False,
|
|
3355
3375
|
):
|
|
3356
3376
|
|
|
3357
3377
|
with debugging.span("transaction") as txn_span:
|
|
@@ -3374,12 +3394,15 @@ class DirectAccessResources(Resources):
|
|
|
3374
3394
|
payload["timeout_mins"] = query_timeout_mins
|
|
3375
3395
|
query_params={"use_graph_index": str(use_graph_index and not bypass_index)}
|
|
3376
3396
|
|
|
3377
|
-
|
|
3378
|
-
|
|
3379
|
-
|
|
3397
|
+
# Add gi_setup_skipped to headers
|
|
3398
|
+
if headers is None:
|
|
3399
|
+
headers = {}
|
|
3400
|
+
headers["gi_setup_skipped"] = str(gi_setup_skipped)
|
|
3401
|
+
headers['pyrel_program_id'] = debugging.get_program_span_id() or ""
|
|
3380
3402
|
|
|
3381
|
-
|
|
3382
|
-
|
|
3403
|
+
response = self._txn_request_with_gi_retry(
|
|
3404
|
+
payload, headers, query_params, engine
|
|
3405
|
+
)
|
|
3383
3406
|
|
|
3384
3407
|
artifact_info = {}
|
|
3385
3408
|
response_content = response.json()
|
relationalai/debugging.py
CHANGED
|
@@ -26,6 +26,9 @@ find_block_in = find_block_in # re-export
|
|
|
26
26
|
DEBUG = True
|
|
27
27
|
handled_error = None
|
|
28
28
|
|
|
29
|
+
# Configurable debug log file location
|
|
30
|
+
DEBUG_LOG_FILE = os.environ.get('RAI_DEBUG_LOG', 'debug.jsonl')
|
|
31
|
+
|
|
29
32
|
#--------------------------------------------------
|
|
30
33
|
# Log Formatters
|
|
31
34
|
#--------------------------------------------------
|
|
@@ -70,7 +73,7 @@ class FlushingFileHandler(logging.FileHandler):
|
|
|
70
73
|
def emit(self, record):
|
|
71
74
|
if not self._initialized:
|
|
72
75
|
self._initialized = True
|
|
73
|
-
with open(
|
|
76
|
+
with open(DEBUG_LOG_FILE, 'w'):
|
|
74
77
|
pass
|
|
75
78
|
super().emit(record)
|
|
76
79
|
self.flush()
|
|
@@ -78,7 +81,7 @@ class FlushingFileHandler(logging.FileHandler):
|
|
|
78
81
|
try:
|
|
79
82
|
# keep the old file-based debugger around and working until it's fully replaced.
|
|
80
83
|
if DEBUG:
|
|
81
|
-
file_handler = FlushingFileHandler(
|
|
84
|
+
file_handler = FlushingFileHandler(DEBUG_LOG_FILE, mode='a')
|
|
82
85
|
file_handler.setFormatter(JsonFormatter())
|
|
83
86
|
logger.addHandler(file_handler)
|
|
84
87
|
except Exception:
|
|
@@ -8,7 +8,7 @@ __include_in_docs__ = True
|
|
|
8
8
|
|
|
9
9
|
from .internal import (
|
|
10
10
|
Model, Concept, Relationship, RelationshipReading, Expression, Fragment, Error, Field,
|
|
11
|
-
String, Integer, Int64, Int128, Float, Decimal, Bool,
|
|
11
|
+
AnyEntity, String, Integer, Int64, Int128, Float, Decimal, Bool,
|
|
12
12
|
Date, DateTime,
|
|
13
13
|
RawSource, Hash,
|
|
14
14
|
select, where, require, define, distinct, union, data,
|
|
@@ -19,7 +19,7 @@ from .internal import (
|
|
|
19
19
|
|
|
20
20
|
__all__ = [
|
|
21
21
|
"Model", "Concept", "Relationship", "RelationshipReading", "Expression", "Fragment", "Error", "Field",
|
|
22
|
-
"String", "Integer", "Int64", "Int128", "Float", "Decimal", "Bool",
|
|
22
|
+
"AnyEntity", "String", "Integer", "Int64", "Int128", "Float", "Decimal", "Bool",
|
|
23
23
|
"Date", "DateTime",
|
|
24
24
|
"RawSource", "Hash",
|
|
25
25
|
"select", "where", "require", "define", "distinct", "union", "data",
|
|
@@ -4,7 +4,7 @@ API for RelationalAI.
|
|
|
4
4
|
|
|
5
5
|
from .internal import (
|
|
6
6
|
Model, Concept, Relationship, RelationshipReading, Expression, Fragment, Error, Field,
|
|
7
|
-
String, Integer, Int64, Int128, Float, Decimal, Bool,
|
|
7
|
+
AnyEntity, String, Integer, Int64, Int128, Float, Decimal, Bool,
|
|
8
8
|
Date, DateTime,
|
|
9
9
|
RawSource, Hash,
|
|
10
10
|
select, where, require, define, distinct, union, data,
|
|
@@ -15,7 +15,7 @@ from .internal import (
|
|
|
15
15
|
|
|
16
16
|
__all__ = [
|
|
17
17
|
"Model", "Concept", "Relationship", "RelationshipReading", "Expression", "Fragment", "Error", "Field",
|
|
18
|
-
"String", "Integer", "Int64", "Int128", "Float", "Decimal", "Bool",
|
|
18
|
+
"AnyEntity", "String", "Integer", "Int64", "Int128", "Float", "Decimal", "Bool",
|
|
19
19
|
"Date", "DateTime",
|
|
20
20
|
"RawSource", "Hash",
|
|
21
21
|
"select", "where", "require", "define", "distinct", "union", "data",
|
|
@@ -40,7 +40,7 @@ _global_id = peekable(itertools.count(0))
|
|
|
40
40
|
|
|
41
41
|
# Single context variable with default values
|
|
42
42
|
_overrides = ContextVar("overrides", default = {})
|
|
43
|
-
def overrides(key: str, default: bool | str | dict):
|
|
43
|
+
def overrides(key: str, default: bool | str | dict | datetime | None):
|
|
44
44
|
return _overrides.get().get(key, default)
|
|
45
45
|
|
|
46
46
|
# Flag that users set in the config or directly on the model, but that can still be
|
|
@@ -60,6 +60,13 @@ def with_overrides(**kwargs):
|
|
|
60
60
|
finally:
|
|
61
61
|
_overrides.reset(token)
|
|
62
62
|
|
|
63
|
+
# Intrinsic values to override for stable snapshots.
|
|
64
|
+
def get_intrinsic_overrides() -> dict[str, Any]:
|
|
65
|
+
datetime_now = overrides('datetime_now', None)
|
|
66
|
+
if datetime_now is not None:
|
|
67
|
+
return {'datetime_now': datetime_now}
|
|
68
|
+
return {}
|
|
69
|
+
|
|
63
70
|
#--------------------------------------------------
|
|
64
71
|
# Root tracking
|
|
65
72
|
#--------------------------------------------------
|
|
@@ -614,7 +621,7 @@ class Producer:
|
|
|
614
621
|
|
|
615
622
|
if self._model and self._model._strict:
|
|
616
623
|
raise AttributeError(f"{self._name} has no relationship `{name}`")
|
|
617
|
-
if topmost_parent is not concept:
|
|
624
|
+
if topmost_parent is not concept and topmost_parent not in Concept.builtin_concepts:
|
|
618
625
|
topmost_parent._relationships[name] = topmost_parent._get_relationship(name)
|
|
619
626
|
rich.print(f"[red bold][Implicit Subtype Relationship][/red bold] [yellow]{concept}.{name}[/yellow] appended to topmost parent [yellow]{topmost_parent}[/yellow] instead")
|
|
620
627
|
|
|
@@ -953,12 +960,25 @@ class Concept(Producer):
|
|
|
953
960
|
self._validate_identifier_relationship(rel)
|
|
954
961
|
self._add_ref_scheme(*args)
|
|
955
962
|
|
|
956
|
-
def _add_ref_scheme(self, *
|
|
957
|
-
|
|
958
|
-
#
|
|
959
|
-
|
|
960
|
-
|
|
961
|
-
|
|
963
|
+
def _add_ref_scheme(self, *rels: Relationship|RelationshipReading):
|
|
964
|
+
# thanks to prior validation we we can safely assume that
|
|
965
|
+
# * the input types are correct due to prior validation
|
|
966
|
+
# * all relationships are binary and defined on this concept
|
|
967
|
+
|
|
968
|
+
self._reference_schemes.append(rels)
|
|
969
|
+
|
|
970
|
+
# for every concept x every field f has at most one value y.
|
|
971
|
+
# f(x,y): x -> y holds
|
|
972
|
+
concept_fields = tuple([rel.__getitem__(0) for rel in rels])
|
|
973
|
+
for field in concept_fields:
|
|
974
|
+
concept_uc = Unique(field, model=self._model)
|
|
975
|
+
require(concept_uc.to_expressions())
|
|
976
|
+
|
|
977
|
+
# for any combination of field values there is at most one concept x.
|
|
978
|
+
# f₁(x,y₁) ∧ … ∧ fₙ(x,yₙ): {y₁,…,yₙ} → {x}
|
|
979
|
+
key_fields = tuple([rel.__getitem__(1) for rel in rels])
|
|
980
|
+
key_uc = Unique(*key_fields, model=self._model)
|
|
981
|
+
require(key_uc.to_expressions())
|
|
962
982
|
|
|
963
983
|
def _validate_identifier_relationship(self, rel:Relationship|RelationshipReading):
|
|
964
984
|
if rel._arity() != 2:
|
|
@@ -1145,7 +1165,10 @@ Primitive = Concept.builtins["Primitive"] = Concept("Primitive")
|
|
|
1145
1165
|
Error = Concept.builtins["Error"] = ErrorConcept("Error")
|
|
1146
1166
|
|
|
1147
1167
|
def _register_builtin(name):
|
|
1148
|
-
|
|
1168
|
+
if name == "AnyEntity":
|
|
1169
|
+
c = Concept(name)
|
|
1170
|
+
else:
|
|
1171
|
+
c = Concept(name, extends=[Primitive])
|
|
1149
1172
|
Concept.builtin_concepts.add(c)
|
|
1150
1173
|
Concept.builtins[name] = c
|
|
1151
1174
|
|
|
@@ -1154,6 +1177,7 @@ for builtin in types.builtin_types:
|
|
|
1154
1177
|
if isinstance(builtin, ir.ScalarType):
|
|
1155
1178
|
_register_builtin(builtin.name)
|
|
1156
1179
|
|
|
1180
|
+
AnyEntity = Concept.builtins["AnyEntity"]
|
|
1157
1181
|
Float = Concept.builtins["Float"]
|
|
1158
1182
|
Number = Concept.builtins["Number"]
|
|
1159
1183
|
Int64 = Concept.builtins["Int64"]
|
|
@@ -2603,6 +2627,7 @@ class Model():
|
|
|
2603
2627
|
config_overrides = overrides('config', {})
|
|
2604
2628
|
for k, v in config_overrides.items():
|
|
2605
2629
|
self._config.set(k, v)
|
|
2630
|
+
self._intrinsic_overrides = get_intrinsic_overrides()
|
|
2606
2631
|
self._strict = cast(bool, overrides('strict', strict))
|
|
2607
2632
|
self._use_lqp = overridable_flag('reasoner.rule.use_lqp', self._config, use_lqp, default=not self._use_sql)
|
|
2608
2633
|
self._enable_otel_handler = overridable_flag('enable_otel_handler', self._config, enable_otel_handler, default=False)
|
|
@@ -2644,6 +2669,7 @@ class Model():
|
|
|
2644
2669
|
wide_outputs=self._wide_outputs,
|
|
2645
2670
|
connection=self._connection,
|
|
2646
2671
|
config=self._config,
|
|
2672
|
+
intrinsic_overrides=self._intrinsic_overrides,
|
|
2647
2673
|
)
|
|
2648
2674
|
elif self._use_sql:
|
|
2649
2675
|
self._executor = SnowflakeExecutor(
|
|
@@ -2874,10 +2900,9 @@ class Compiler():
|
|
|
2874
2900
|
if concept not in self.types:
|
|
2875
2901
|
self.to_type(concept)
|
|
2876
2902
|
self.to_relation(concept)
|
|
2877
|
-
|
|
2878
|
-
|
|
2879
|
-
|
|
2880
|
-
rules.append(rule)
|
|
2903
|
+
rule = self.concept_inheritance_rule(concept)
|
|
2904
|
+
if rule:
|
|
2905
|
+
rules.append(rule)
|
|
2881
2906
|
unresolved = []
|
|
2882
2907
|
for relationship in model.relationships:
|
|
2883
2908
|
if relationship not in self.relations:
|
|
@@ -3182,8 +3207,11 @@ class Compiler():
|
|
|
3182
3207
|
# filter extends to get only non-primitive parents
|
|
3183
3208
|
parents = []
|
|
3184
3209
|
for parent in concept._extends:
|
|
3185
|
-
if not parent._is_primitive():
|
|
3210
|
+
if not parent._is_primitive() and parent is not AnyEntity:
|
|
3186
3211
|
parents.append(parent)
|
|
3212
|
+
# always extend AnyEntity for non-primitive types that are not built-in
|
|
3213
|
+
if not concept._is_primitive() and concept not in Concept.builtin_concepts:
|
|
3214
|
+
parents.append(AnyEntity)
|
|
3187
3215
|
# only extends primitive types, no need for inheritance rules
|
|
3188
3216
|
if not parents:
|
|
3189
3217
|
return None
|
|
@@ -3196,6 +3224,17 @@ class Compiler():
|
|
|
3196
3224
|
*[f.derive(self.to_relation(parent), [var]) for parent in parents]
|
|
3197
3225
|
])
|
|
3198
3226
|
|
|
3227
|
+
def concept_any_entity_rule(self, entities:list[Concept]):
|
|
3228
|
+
"""
|
|
3229
|
+
Generate an inheritance rule for all these entities to AnyEntity.
|
|
3230
|
+
"""
|
|
3231
|
+
any_entity_relation = self.to_relation(AnyEntity)
|
|
3232
|
+
var = f.var("v", types.Any)
|
|
3233
|
+
return f.logical([
|
|
3234
|
+
f.union([f.lookup(self.to_relation(e), [var]) for e in entities]),
|
|
3235
|
+
f.derive(any_entity_relation, [var])
|
|
3236
|
+
])
|
|
3237
|
+
|
|
3199
3238
|
def relation_dict(self, items:dict[Relationship|Concept, Producer], ctx:CompilerContext) -> dict[ir.Relation, list[ir.Var]]:
|
|
3200
3239
|
return {self.to_relation(k): unwrap_list(self.lookup(v, ctx)) for k, v in items.items()}
|
|
3201
3240
|
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# Logic Engine LQP Backend
|
|
2
|
+
|
|
3
|
+
The logic engine runs the *Logical Query Protocol* (short *LQP*). This module includes a
|
|
4
|
+
compiler from the semantic metamodel to LQP along with an executor.
|
|
5
|
+
|
|
6
|
+
## Running against a local logic engine
|
|
7
|
+
|
|
8
|
+
For development and testing, it is possible to run PyRel models against a local logic engine
|
|
9
|
+
server process.
|
|
10
|
+
|
|
11
|
+
To start your local server, please refer to the [logic engine
|
|
12
|
+
docs](https://github.com/RelationalAI/raicode/tree/master/src/Server#starting-the-server).
|
|
13
|
+
|
|
14
|
+
With the local server running, add this to your `raiconfig.toml`:
|
|
15
|
+
|
|
16
|
+
```toml
|
|
17
|
+
[profile.local]
|
|
18
|
+
platform = "local"
|
|
19
|
+
engine = "local"
|
|
20
|
+
host = "localhost"
|
|
21
|
+
port = 8010
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
Then set `active_profile = "local"` at the top of the file.
|
|
25
|
+
|
|
26
|
+
**Known limitations:**
|
|
27
|
+
|
|
28
|
+
Local execution does not support running against Snowflake source tables.
|
|
29
|
+
|
|
30
|
+
At the moment, locally created databases cannot be cleaned up by the client. Eventually you
|
|
31
|
+
will need to clear your local pager directory.
|
|
32
|
+
|
|
33
|
+
At the moment, local execution is only supported for fast-path transactions, i.e. those
|
|
34
|
+
which complete in less than 5 seconds. Polling support will be added soon.
|
|
@@ -14,7 +14,7 @@ class Compiler(c.Compiler):
|
|
|
14
14
|
super().__init__(lqp_passes())
|
|
15
15
|
self.def_names = UniqueNames()
|
|
16
16
|
|
|
17
|
-
def do_compile(self, model: ir.Model, options:dict={}) -> tuple[Optional[tuple], lqp.
|
|
17
|
+
def do_compile(self, model: ir.Model, options:dict={}) -> tuple[Optional[tuple], lqp.Epoch]:
|
|
18
18
|
fragment_id: bytes = options.get("fragment_id", bytes(404))
|
|
19
19
|
# Reset the var context for each compilation
|
|
20
20
|
# TODO: Change to unique var names per lookup
|
|
@@ -59,3 +59,10 @@ def mk_pragma(name: str, terms: list[lqp.Var]) -> lqp.Pragma:
|
|
|
59
59
|
|
|
60
60
|
def mk_attribute(name: str, args: list[lqp.Value]) -> lqp.Attribute:
|
|
61
61
|
return lqp.Attribute(name=name, args=args, meta=None)
|
|
62
|
+
|
|
63
|
+
def mk_transaction(
|
|
64
|
+
epochs: list[lqp.Epoch],
|
|
65
|
+
configure: lqp.Configure = lqp.construct_configure({}, None),
|
|
66
|
+
sync = None
|
|
67
|
+
) -> lqp.Transaction:
|
|
68
|
+
return lqp.Transaction(epochs=epochs, configure=configure, sync=sync, meta=None)
|