relationalai 0.12.11__py3-none-any.whl → 0.12.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- relationalai/clients/snowflake.py +105 -64
- relationalai/semantics/lqp/passes.py +1 -1
- relationalai/semantics/lqp/rewrite/annotate_constraints.py +3 -1
- relationalai/semantics/lqp/rewrite/extract_common.py +185 -231
- relationalai/semantics/metamodel/rewrite/dnf_union_splitter.py +2 -2
- relationalai/semantics/metamodel/rewrite/flatten.py +11 -0
- relationalai/semantics/rel/compiler.py +1 -1
- {relationalai-0.12.11.dist-info → relationalai-0.12.13.dist-info}/METADATA +1 -1
- {relationalai-0.12.11.dist-info → relationalai-0.12.13.dist-info}/RECORD +12 -12
- {relationalai-0.12.11.dist-info → relationalai-0.12.13.dist-info}/WHEEL +0 -0
- {relationalai-0.12.11.dist-info → relationalai-0.12.13.dist-info}/entry_points.txt +0 -0
- {relationalai-0.12.11.dist-info → relationalai-0.12.13.dist-info}/licenses/LICENSE +0 -0
|
@@ -441,7 +441,8 @@ class Resources(ResourcesBase):
|
|
|
441
441
|
code: str,
|
|
442
442
|
params: List[Any] | Any | None = None,
|
|
443
443
|
raw: bool = False,
|
|
444
|
-
help: bool = True
|
|
444
|
+
help: bool = True,
|
|
445
|
+
skip_engine_db_error_retry: bool = False
|
|
445
446
|
) -> Any:
|
|
446
447
|
# print(f"\n--- sql---\n{code}\n--- end sql---\n")
|
|
447
448
|
if not self._session:
|
|
@@ -467,7 +468,8 @@ class Resources(ResourcesBase):
|
|
|
467
468
|
if re.search(f"database '{rai_app}' does not exist or not authorized.".lower(), orig_message):
|
|
468
469
|
exception = SnowflakeAppMissingException(rai_app, current_role)
|
|
469
470
|
raise exception from None
|
|
470
|
-
if
|
|
471
|
+
# skip initializing the index if the query is a user transaction. exec_raw/exec_lqp will handle that case with the correct request headers.
|
|
472
|
+
if (_is_engine_issue(orig_message) or _is_database_issue(orig_message)) and not skip_engine_db_error_retry:
|
|
471
473
|
try:
|
|
472
474
|
self._poll_use_index(
|
|
473
475
|
app_name=self.get_app_name(),
|
|
@@ -1609,9 +1611,11 @@ Otherwise, remove it from your '{profile}' configuration profile.
|
|
|
1609
1611
|
sql_string = f"CALL {APP_NAME}.api.exec_async_v2('{database}','{engine}', ?, {inputs}, {readonly}, {nowait_durable}, '{language}', {query_timeout_mins}, {request_headers});"
|
|
1610
1612
|
else:
|
|
1611
1613
|
sql_string = f"CALL {APP_NAME}.api.exec_async_v2('{database}','{engine}', ?, {inputs}, {readonly}, {nowait_durable}, '{language}', {request_headers});"
|
|
1614
|
+
# Don't let exec setup GI on failure, exec_raw and exec_lqp will do that and add the correct headers.
|
|
1612
1615
|
response = self._exec(
|
|
1613
1616
|
sql_string,
|
|
1614
1617
|
raw_code,
|
|
1618
|
+
skip_engine_db_error_retry=True,
|
|
1615
1619
|
)
|
|
1616
1620
|
if not response:
|
|
1617
1621
|
raise Exception("Failed to create transaction")
|
|
@@ -1629,6 +1633,7 @@ Otherwise, remove it from your '{profile}' configuration profile.
|
|
|
1629
1633
|
bypass_index=False,
|
|
1630
1634
|
language: str = "rel",
|
|
1631
1635
|
query_timeout_mins: int | None = None,
|
|
1636
|
+
gi_setup_skipped: bool = False,
|
|
1632
1637
|
):
|
|
1633
1638
|
if inputs is None:
|
|
1634
1639
|
inputs = {}
|
|
@@ -1638,6 +1643,8 @@ Otherwise, remove it from your '{profile}' configuration profile.
|
|
|
1638
1643
|
with debugging.span("transaction", **query_attrs_dict) as txn_span:
|
|
1639
1644
|
with debugging.span("create_v2", **query_attrs_dict) as create_span:
|
|
1640
1645
|
request_headers['user-agent'] = get_pyrel_version(self.generation)
|
|
1646
|
+
request_headers['gi_setup_skipped'] = str(gi_setup_skipped)
|
|
1647
|
+
request_headers['pyrel_program_id'] = debugging.get_program_span_id() or ""
|
|
1641
1648
|
response = self._exec_rai_app(
|
|
1642
1649
|
database=database,
|
|
1643
1650
|
engine=engine,
|
|
@@ -1897,26 +1904,29 @@ Otherwise, remove it from your '{profile}' configuration profile.
|
|
|
1897
1904
|
# Exec
|
|
1898
1905
|
#--------------------------------------------------
|
|
1899
1906
|
|
|
1900
|
-
def
|
|
1907
|
+
def _exec_with_gi_retry(
|
|
1901
1908
|
self,
|
|
1902
1909
|
database: str,
|
|
1903
1910
|
engine: str | None,
|
|
1904
|
-
raw_code:
|
|
1905
|
-
|
|
1906
|
-
|
|
1907
|
-
|
|
1908
|
-
|
|
1909
|
-
|
|
1910
|
-
|
|
1911
|
-
query_timeout_mins: int | None
|
|
1911
|
+
raw_code: str,
|
|
1912
|
+
inputs: Dict | None,
|
|
1913
|
+
readonly: bool,
|
|
1914
|
+
nowait_durable: bool,
|
|
1915
|
+
headers: Dict | None,
|
|
1916
|
+
bypass_index: bool,
|
|
1917
|
+
language: str,
|
|
1918
|
+
query_timeout_mins: int | None,
|
|
1912
1919
|
):
|
|
1913
|
-
|
|
1920
|
+
"""Execute with graph index retry logic.
|
|
1914
1921
|
|
|
1922
|
+
Attempts execution with gi_setup_skipped=True first. If an engine or database
|
|
1923
|
+
issue occurs, polls use_index and retries with gi_setup_skipped=False.
|
|
1924
|
+
"""
|
|
1915
1925
|
try:
|
|
1916
1926
|
return self._exec_async_v2(
|
|
1917
|
-
database, engine,
|
|
1918
|
-
headers=headers, bypass_index=bypass_index, language=
|
|
1919
|
-
query_timeout_mins=query_timeout_mins,
|
|
1927
|
+
database, engine, raw_code, inputs, readonly, nowait_durable,
|
|
1928
|
+
headers=headers, bypass_index=bypass_index, language=language,
|
|
1929
|
+
query_timeout_mins=query_timeout_mins, gi_setup_skipped=True,
|
|
1920
1930
|
)
|
|
1921
1931
|
except Exception as e:
|
|
1922
1932
|
err_message = str(e).lower()
|
|
@@ -1933,13 +1943,32 @@ Otherwise, remove it from your '{profile}' configuration profile.
|
|
|
1933
1943
|
)
|
|
1934
1944
|
|
|
1935
1945
|
return self._exec_async_v2(
|
|
1936
|
-
database, engine,
|
|
1937
|
-
headers=headers, bypass_index=bypass_index, language=
|
|
1938
|
-
query_timeout_mins=query_timeout_mins,
|
|
1946
|
+
database, engine, raw_code, inputs, readonly, nowait_durable,
|
|
1947
|
+
headers=headers, bypass_index=bypass_index, language=language,
|
|
1948
|
+
query_timeout_mins=query_timeout_mins, gi_setup_skipped=False,
|
|
1939
1949
|
)
|
|
1940
1950
|
else:
|
|
1941
1951
|
raise e
|
|
1942
1952
|
|
|
1953
|
+
def exec_lqp(
|
|
1954
|
+
self,
|
|
1955
|
+
database: str,
|
|
1956
|
+
engine: str | None,
|
|
1957
|
+
raw_code: bytes,
|
|
1958
|
+
readonly=True,
|
|
1959
|
+
*,
|
|
1960
|
+
inputs: Dict | None = None,
|
|
1961
|
+
nowait_durable=False,
|
|
1962
|
+
headers: Dict | None = None,
|
|
1963
|
+
bypass_index=False,
|
|
1964
|
+
query_timeout_mins: int | None = None,
|
|
1965
|
+
):
|
|
1966
|
+
raw_code_b64 = base64.b64encode(raw_code).decode("utf-8")
|
|
1967
|
+
return self._exec_with_gi_retry(
|
|
1968
|
+
database, engine, raw_code_b64, inputs, readonly, nowait_durable,
|
|
1969
|
+
headers, bypass_index, 'lqp', query_timeout_mins
|
|
1970
|
+
)
|
|
1971
|
+
|
|
1943
1972
|
|
|
1944
1973
|
def exec_raw(
|
|
1945
1974
|
self,
|
|
@@ -1955,45 +1984,10 @@ Otherwise, remove it from your '{profile}' configuration profile.
|
|
|
1955
1984
|
query_timeout_mins: int | None = None,
|
|
1956
1985
|
):
|
|
1957
1986
|
raw_code = raw_code.replace("'", "\\'")
|
|
1958
|
-
|
|
1959
|
-
|
|
1960
|
-
|
|
1961
|
-
|
|
1962
|
-
engine,
|
|
1963
|
-
raw_code,
|
|
1964
|
-
inputs,
|
|
1965
|
-
readonly,
|
|
1966
|
-
nowait_durable,
|
|
1967
|
-
headers=headers,
|
|
1968
|
-
bypass_index=bypass_index,
|
|
1969
|
-
query_timeout_mins=query_timeout_mins,
|
|
1970
|
-
)
|
|
1971
|
-
except Exception as e:
|
|
1972
|
-
err_message = str(e).lower()
|
|
1973
|
-
if _is_engine_issue(err_message) or _is_database_issue(err_message):
|
|
1974
|
-
engine_name = engine or self.get_default_engine_name()
|
|
1975
|
-
engine_size = self.config.get_default_engine_size()
|
|
1976
|
-
self._poll_use_index(
|
|
1977
|
-
app_name=self.get_app_name(),
|
|
1978
|
-
sources=self.sources,
|
|
1979
|
-
model=database,
|
|
1980
|
-
engine_name=engine_name,
|
|
1981
|
-
engine_size=engine_size,
|
|
1982
|
-
headers=headers,
|
|
1983
|
-
)
|
|
1984
|
-
return self._exec_async_v2(
|
|
1985
|
-
database,
|
|
1986
|
-
engine,
|
|
1987
|
-
raw_code,
|
|
1988
|
-
inputs,
|
|
1989
|
-
readonly,
|
|
1990
|
-
nowait_durable,
|
|
1991
|
-
headers=headers,
|
|
1992
|
-
bypass_index=bypass_index,
|
|
1993
|
-
query_timeout_mins=query_timeout_mins,
|
|
1994
|
-
)
|
|
1995
|
-
else:
|
|
1996
|
-
raise e
|
|
1987
|
+
return self._exec_with_gi_retry(
|
|
1988
|
+
database, engine, raw_code, inputs, readonly, nowait_durable,
|
|
1989
|
+
headers, bypass_index, 'rel', query_timeout_mins
|
|
1990
|
+
)
|
|
1997
1991
|
|
|
1998
1992
|
|
|
1999
1993
|
def format_results(self, results, task:m.Task|None=None) -> Tuple[DataFrame, List[Any]]:
|
|
@@ -3281,6 +3275,7 @@ class DirectAccessResources(Resources):
|
|
|
3281
3275
|
path_params: Dict[str, str] | None = None,
|
|
3282
3276
|
query_params: Dict[str, str] | None = None,
|
|
3283
3277
|
skip_auto_create: bool = False,
|
|
3278
|
+
skip_engine_db_error_retry: bool = False,
|
|
3284
3279
|
) -> requests.Response:
|
|
3285
3280
|
with debugging.span("direct_access_request"):
|
|
3286
3281
|
def _send_request():
|
|
@@ -3314,8 +3309,8 @@ class DirectAccessResources(Resources):
|
|
|
3314
3309
|
message = "" # Not used when we check status_code directly
|
|
3315
3310
|
|
|
3316
3311
|
# fix engine on engine error and retry
|
|
3317
|
-
# Skip
|
|
3318
|
-
if (_is_engine_issue(message) and not skip_auto_create) or _is_database_issue(message):
|
|
3312
|
+
# Skip setting up GI if skip_auto_create is True to avoid recursion or skip_engine_db_error_retry is true to let _exec_async_v2 perform the retry with the correct headers.
|
|
3313
|
+
if ((_is_engine_issue(message) and not skip_auto_create) or _is_database_issue(message)) and not skip_engine_db_error_retry:
|
|
3319
3314
|
engine_name = payload.get("caller_engine_name", "") if payload else ""
|
|
3320
3315
|
engine_name = engine_name or self.get_default_engine_name()
|
|
3321
3316
|
engine_size = self.config.get_default_engine_size()
|
|
@@ -3340,6 +3335,48 @@ class DirectAccessResources(Resources):
|
|
|
3340
3335
|
raise e
|
|
3341
3336
|
return response
|
|
3342
3337
|
|
|
3338
|
+
def _txn_request_with_gi_retry(
|
|
3339
|
+
self,
|
|
3340
|
+
payload: Dict,
|
|
3341
|
+
headers: Dict[str, str],
|
|
3342
|
+
query_params: Dict,
|
|
3343
|
+
engine: Union[str, None],
|
|
3344
|
+
):
|
|
3345
|
+
"""Make request with graph index retry logic.
|
|
3346
|
+
|
|
3347
|
+
Attempts request with gi_setup_skipped=True first. If an engine or database
|
|
3348
|
+
issue occurs, polls use_index and retries with gi_setup_skipped=False.
|
|
3349
|
+
"""
|
|
3350
|
+
response = self.request(
|
|
3351
|
+
"create_txn", payload=payload, headers=headers, query_params=query_params, skip_auto_create=True, skip_engine_db_error_retry=True
|
|
3352
|
+
)
|
|
3353
|
+
|
|
3354
|
+
if response.status_code != 200:
|
|
3355
|
+
try:
|
|
3356
|
+
message = response.json().get("message", "")
|
|
3357
|
+
except requests.exceptions.JSONDecodeError:
|
|
3358
|
+
message = ""
|
|
3359
|
+
|
|
3360
|
+
if _is_engine_issue(message) or _is_database_issue(message):
|
|
3361
|
+
engine_name = engine or self.get_default_engine_name()
|
|
3362
|
+
engine_size = self.config.get_default_engine_size()
|
|
3363
|
+
self._poll_use_index(
|
|
3364
|
+
app_name=self.get_app_name(),
|
|
3365
|
+
sources=self.sources,
|
|
3366
|
+
model=self.database,
|
|
3367
|
+
engine_name=engine_name,
|
|
3368
|
+
engine_size=engine_size,
|
|
3369
|
+
headers=headers,
|
|
3370
|
+
)
|
|
3371
|
+
headers['gi_setup_skipped'] = 'False'
|
|
3372
|
+
response = self.request(
|
|
3373
|
+
"create_txn", payload=payload, headers=headers, query_params=query_params, skip_auto_create=True, skip_engine_db_error_retry=True
|
|
3374
|
+
)
|
|
3375
|
+
else:
|
|
3376
|
+
raise ResponseStatusException("Failed to create transaction.", response)
|
|
3377
|
+
|
|
3378
|
+
return response
|
|
3379
|
+
|
|
3343
3380
|
def _exec_async_v2(
|
|
3344
3381
|
self,
|
|
3345
3382
|
database: str,
|
|
@@ -3352,6 +3389,7 @@ class DirectAccessResources(Resources):
|
|
|
3352
3389
|
bypass_index=False,
|
|
3353
3390
|
language: str = "rel",
|
|
3354
3391
|
query_timeout_mins: int | None = None,
|
|
3392
|
+
gi_setup_skipped: bool = False,
|
|
3355
3393
|
):
|
|
3356
3394
|
|
|
3357
3395
|
with debugging.span("transaction") as txn_span:
|
|
@@ -3374,12 +3412,15 @@ class DirectAccessResources(Resources):
|
|
|
3374
3412
|
payload["timeout_mins"] = query_timeout_mins
|
|
3375
3413
|
query_params={"use_graph_index": str(use_graph_index and not bypass_index)}
|
|
3376
3414
|
|
|
3377
|
-
|
|
3378
|
-
|
|
3379
|
-
|
|
3415
|
+
# Add gi_setup_skipped to headers
|
|
3416
|
+
if headers is None:
|
|
3417
|
+
headers = {}
|
|
3418
|
+
headers["gi_setup_skipped"] = str(gi_setup_skipped)
|
|
3419
|
+
headers['pyrel_program_id'] = debugging.get_program_span_id() or ""
|
|
3380
3420
|
|
|
3381
|
-
|
|
3382
|
-
|
|
3421
|
+
response = self._txn_request_with_gi_retry(
|
|
3422
|
+
payload, headers, query_params, engine
|
|
3423
|
+
)
|
|
3383
3424
|
|
|
3384
3425
|
artifact_info = {}
|
|
3385
3426
|
response_content = response.json()
|
|
@@ -29,8 +29,8 @@ def lqp_passes() -> list[Pass]:
|
|
|
29
29
|
InferTypes(),
|
|
30
30
|
DNFUnionSplitter(),
|
|
31
31
|
ExtractKeys(),
|
|
32
|
-
ExtractCommon(),
|
|
33
32
|
FormatOutputs(),
|
|
33
|
+
ExtractCommon(), # Extracts tasks that will become common after Flatten into their own definition
|
|
34
34
|
Flatten(),
|
|
35
35
|
Splinter(), # Splits multi-headed rules into multiple rules
|
|
36
36
|
QuantifyVars(), # Adds missing existentials
|
|
@@ -10,7 +10,7 @@ from relationalai.semantics.lqp.rewrite.functional_dependencies import (
|
|
|
10
10
|
is_valid_unique_constraint, normalized_fd
|
|
11
11
|
)
|
|
12
12
|
|
|
13
|
-
|
|
13
|
+
_DISABLE_CONSTRAINT_DECLARATIONS = True
|
|
14
14
|
|
|
15
15
|
class AnnotateConstraints(Pass):
|
|
16
16
|
"""
|
|
@@ -36,6 +36,8 @@ class AnnotateConstraintsRewriter(DischargeConstraintsVisitor):
|
|
|
36
36
|
"""
|
|
37
37
|
|
|
38
38
|
def _should_be_declarable_constraint(self, node: Require) -> bool:
|
|
39
|
+
if _DISABLE_CONSTRAINT_DECLARATIONS:
|
|
40
|
+
return False
|
|
39
41
|
if not is_valid_unique_constraint(node):
|
|
40
42
|
return False
|
|
41
43
|
# Currently, we only declare non-structural functional dependencies.
|
|
@@ -3,11 +3,13 @@ from __future__ import annotations
|
|
|
3
3
|
from dataclasses import dataclass
|
|
4
4
|
from typing import Optional
|
|
5
5
|
from relationalai.semantics.metamodel import ir, factory as f, helpers, visitor
|
|
6
|
-
from relationalai.semantics.metamodel.compiler import Pass
|
|
6
|
+
from relationalai.semantics.metamodel.compiler import Pass
|
|
7
7
|
from relationalai.semantics.metamodel.util import OrderedSet, ordered_set
|
|
8
8
|
from relationalai.semantics.metamodel import dependency
|
|
9
9
|
from relationalai.semantics.metamodel import builtins
|
|
10
10
|
|
|
11
|
+
from functools import reduce
|
|
12
|
+
|
|
11
13
|
class ExtractCommon(Pass):
|
|
12
14
|
"""
|
|
13
15
|
Pass to analyze Logical bodies and extract lookups in their own Logical if it makes
|
|
@@ -71,7 +73,7 @@ class ExtractCommon(Pass):
|
|
|
71
73
|
)
|
|
72
74
|
|
|
73
75
|
#--------------------------------------------------
|
|
74
|
-
#
|
|
76
|
+
# Extra classes
|
|
75
77
|
#--------------------------------------------------
|
|
76
78
|
|
|
77
79
|
class Context():
|
|
@@ -80,8 +82,26 @@ class ExtractCommon(Pass):
|
|
|
80
82
|
self.info = dependency.analyze(model.root)
|
|
81
83
|
self.options = options
|
|
82
84
|
|
|
85
|
+
@dataclass
|
|
86
|
+
class ExtractionPlan():
|
|
87
|
+
# tasks to extract to the body of the common logical
|
|
88
|
+
common_body: OrderedSet[ir.Task]
|
|
89
|
+
# tasks to remain in the original body
|
|
90
|
+
remaining_body: OrderedSet[ir.Task]
|
|
91
|
+
# variables to be exposed by the common logical
|
|
92
|
+
exposed_vars: OrderedSet[ir.Var]
|
|
93
|
+
# map from nested composite to the tasks in the common body that still need to be
|
|
94
|
+
# included in its body, because it contains variables not exposed by the common logical
|
|
95
|
+
local_dependencies: dict[ir.Task, OrderedSet[ir.Task]]
|
|
96
|
+
# a reference to the common connection created for this plan, if any
|
|
97
|
+
common_reference: Optional[ir.Lookup] = None
|
|
98
|
+
|
|
99
|
+
#--------------------------------------------------
|
|
100
|
+
# IR handlers
|
|
101
|
+
#--------------------------------------------------
|
|
102
|
+
|
|
83
103
|
def handle(self, task: ir.Task, ctx: Context):
|
|
84
|
-
#
|
|
104
|
+
# Currently we only extract if it's a sequence of Logicals, but we could in the
|
|
85
105
|
# future support other intermediate nodes
|
|
86
106
|
if isinstance(task, ir.Logical):
|
|
87
107
|
return self.handle_logical(task, ctx)
|
|
@@ -89,66 +109,52 @@ class ExtractCommon(Pass):
|
|
|
89
109
|
return task
|
|
90
110
|
|
|
91
111
|
def handle_logical(self, task: ir.Logical, ctx: Context):
|
|
92
|
-
# Process the original body to find
|
|
93
|
-
#
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
112
|
+
# Process the original body to find candidates for extraction. Each task is in one
|
|
113
|
+
# of three categories:
|
|
114
|
+
# - Binders: tasks that bind variables. These are candidates for extracting into
|
|
115
|
+
# the common body.
|
|
116
|
+
# - Flattenables: tasks that will later be extracted by the Flatten pass
|
|
117
|
+
# - Other: tasks that are neither binders nor flattenables; these will remain
|
|
118
|
+
# in the body as-is.
|
|
119
|
+
|
|
120
|
+
binders = ordered_set()
|
|
121
|
+
flattenables = ordered_set()
|
|
122
|
+
other = ordered_set()
|
|
123
|
+
|
|
124
|
+
for child in task.body:
|
|
125
|
+
if _is_binder(child):
|
|
126
|
+
binders.add(child)
|
|
127
|
+
elif _is_flattenable(ctx, child):
|
|
128
|
+
flattenables.add(child)
|
|
129
|
+
else:
|
|
130
|
+
other.add(child)
|
|
131
|
+
|
|
132
|
+
# The new body of the rewritten task
|
|
102
133
|
body:OrderedSet[ir.Task] = ordered_set()
|
|
103
134
|
|
|
104
|
-
#
|
|
105
|
-
# common binders if there are multiple, and there are also multiple
|
|
135
|
+
# Quick check to see if it's worth doing more analysis; we only want to extract
|
|
136
|
+
# common binders if there are multiple, and there are also multiple flattenables
|
|
106
137
|
# that will be extracted by the flatten pass later (so that they can share the
|
|
107
138
|
# extracted logic).
|
|
108
|
-
plan = None
|
|
109
|
-
if len(binders) > 1 and
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
exposed_vars = plan.exposed_vars.get_list()
|
|
119
|
-
plan.common_reference = f.lookup(helpers.extract(task, plan.common_body, exposed_vars, ctx.rewrite_ctx, "common"), exposed_vars)
|
|
120
|
-
# if we are not distributing the reference, add to the main body
|
|
121
|
-
if not plan.distribute_common_reference:
|
|
122
|
-
body.add(plan.common_reference)
|
|
123
|
-
|
|
124
|
-
# if we have a plan and will distribute the common reference, keep track of
|
|
125
|
-
# variables still needed by the remaining tasks, as they need to be hoisted by
|
|
126
|
-
# the remaining composites that get the common reference
|
|
127
|
-
remaining_vars = None
|
|
128
|
-
if plan and plan.distribute_common_reference:
|
|
129
|
-
# add variables hoisted by this logical that are in the exposed vars, to
|
|
130
|
-
# make sure they are hoisted all the way through
|
|
131
|
-
remaining_vars = OrderedSet.from_iterable(helpers.hoisted_vars(task.hoisted)) & plan.exposed_vars
|
|
132
|
-
for child in task.body:
|
|
133
|
-
if child in groups["other"] or child not in plan.remaining_body or child in composites_and_effects:
|
|
134
|
-
continue
|
|
135
|
-
remaining_vars.update(ctx.info.task_inputs(child))
|
|
136
|
-
remaining_vars.update(ctx.info.task_outputs(child))
|
|
137
|
-
remaining_vars = remaining_vars & plan.exposed_vars
|
|
138
|
-
|
|
139
|
-
# if the plan was not used in one of the cases above, ignore it completely, we
|
|
140
|
-
# are not extracting common nor distributing it around
|
|
141
|
-
if plan and not plan.distribute_common_reference and not len(composites_and_effects) > 1:
|
|
142
|
-
plan = None
|
|
139
|
+
plan: Optional[ExtractCommon.ExtractionPlan] = None
|
|
140
|
+
if len(binders) > 1 and len(flattenables) > 1:
|
|
141
|
+
plan = _create_extraction_plan(ctx, binders, flattenables, other)
|
|
142
|
+
if plan:
|
|
143
|
+
# plan is worthwhile, extract the common body and add the connection to the body
|
|
144
|
+
exposed_vars = plan.exposed_vars.get_list()
|
|
145
|
+
plan.common_reference = f.lookup(helpers.extract(task, plan.common_body, exposed_vars, ctx.rewrite_ctx, "common"), exposed_vars)
|
|
146
|
+
|
|
147
|
+
# Add plan common reference to the body.
|
|
148
|
+
body.add(plan.common_reference)
|
|
143
149
|
|
|
144
150
|
# recursively handle children
|
|
145
151
|
for child in task.body:
|
|
146
152
|
# skip children that were extracted
|
|
147
|
-
if plan and child not in
|
|
153
|
+
if plan and child not in other and child not in plan.remaining_body and child not in flattenables:
|
|
148
154
|
continue
|
|
149
155
|
|
|
150
156
|
# no plan or child is not a composite, so just add the handled to the body
|
|
151
|
-
if not plan or child not in
|
|
157
|
+
if not plan or child not in flattenables:
|
|
152
158
|
body.add(self.handle(child, ctx))
|
|
153
159
|
continue
|
|
154
160
|
|
|
@@ -156,7 +162,7 @@ class ExtractCommon(Pass):
|
|
|
156
162
|
replacement = self.handle(child, ctx)
|
|
157
163
|
|
|
158
164
|
# this child needs either extra local dependencies or the common reference
|
|
159
|
-
if child in plan.local_dependencies
|
|
165
|
+
if child in plan.local_dependencies:
|
|
160
166
|
# the new body will have maybe the common reference and the local deps
|
|
161
167
|
replacement_body = ordered_set()
|
|
162
168
|
|
|
@@ -172,19 +178,6 @@ class ExtractCommon(Pass):
|
|
|
172
178
|
dep_outputs.update(ctx.info.task_outputs(d))
|
|
173
179
|
hoisted.update(dep_outputs & ctx.info.task_inputs(replacement))
|
|
174
180
|
|
|
175
|
-
if plan.distribute_common_reference:
|
|
176
|
-
if len(composites_and_effects) == 1:
|
|
177
|
-
# if there's a single composite, just insert the whole common body into it
|
|
178
|
-
replacement_body.update(plan.common_body)
|
|
179
|
-
else:
|
|
180
|
-
# otherwise insert a clone of the reference on the extracted rule
|
|
181
|
-
assert(plan.common_reference)
|
|
182
|
-
replacement_body.add(plan.common_reference.clone())
|
|
183
|
-
# add remaining vars to hoisted, making sure there's no duplicates (due to VarOrDefault)
|
|
184
|
-
hoisted_vars = helpers.hoisted_vars(hoisted)
|
|
185
|
-
if remaining_vars:
|
|
186
|
-
hoisted = OrderedSet.from_iterable(filter(lambda v: v not in hoisted_vars, remaining_vars)) | hoisted
|
|
187
|
-
|
|
188
181
|
if child in plan.local_dependencies:
|
|
189
182
|
for local_dep in plan.local_dependencies[child]:
|
|
190
183
|
replacement_body.add(local_dep.clone())
|
|
@@ -208,177 +201,138 @@ class ExtractCommon(Pass):
|
|
|
208
201
|
|
|
209
202
|
return ir.Logical(task.engine, task.hoisted, tuple(body))
|
|
210
203
|
|
|
211
|
-
@dataclass
|
|
212
|
-
class ExtractionPlan():
|
|
213
|
-
# tasks to extract to the body of the common logical
|
|
214
|
-
common_body: OrderedSet[ir.Task]
|
|
215
|
-
# tasks to remain in the original body
|
|
216
|
-
remaining_body: OrderedSet[ir.Task]
|
|
217
|
-
# variables to be exposed by the common logical
|
|
218
|
-
exposed_vars: OrderedSet[ir.Var]
|
|
219
|
-
# map from nested composite to the tasks in the common body that still need to be
|
|
220
|
-
# included in its body, because it contains variables not exposed by the common logical
|
|
221
|
-
local_dependencies: dict[ir.Task, OrderedSet[ir.Task]]
|
|
222
|
-
# whether the common reference should be distributed to composites
|
|
223
|
-
distribute_common_reference: bool
|
|
224
|
-
# a reference to the common connection created for this plan, if any
|
|
225
|
-
common_reference: Optional[ir.Lookup] = None
|
|
226
204
|
|
|
205
|
+
#--------------------------------------------------
|
|
206
|
+
# Utilities
|
|
207
|
+
#--------------------------------------------------
|
|
227
208
|
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
return None
|
|
239
|
-
|
|
240
|
-
# Compute intersection of task dependencies
|
|
241
|
-
sample = composites.some()
|
|
242
|
-
deps = ctx.info.task_dependencies(sample)
|
|
243
|
-
if deps is None:
|
|
209
|
+
def _create_extraction_plan(ctx: ExtractCommon.Context, binders: OrderedSet[ir.Task], flattenables: OrderedSet[ir.Task], others: OrderedSet[ir.Task]) -> Optional[ExtractCommon.ExtractionPlan]:
|
|
210
|
+
"""
|
|
211
|
+
Compute a plan to extract tasks in this frame that are common dependencies
|
|
212
|
+
across these composite tasks.
|
|
213
|
+
"""
|
|
214
|
+
# If there are any pragma lookups, then don't extract anything. Pragma lookups are
|
|
215
|
+
# designed to control execution order, and extracting them may affect their
|
|
216
|
+
# semantics.
|
|
217
|
+
for b in binders:
|
|
218
|
+
if isinstance(b, ir.Lookup) and builtins.is_pragma(b.relation):
|
|
244
219
|
return None
|
|
245
|
-
# only get sibling dependencies
|
|
246
|
-
common_body = binders & deps
|
|
247
|
-
|
|
248
|
-
# For other composites, remove their sibling dependencies so that we end up with
|
|
249
|
-
# the intersection of dependencies
|
|
250
|
-
for composite in composites:
|
|
251
|
-
if composite is sample:
|
|
252
|
-
continue
|
|
253
220
|
|
|
254
|
-
|
|
255
|
-
|
|
221
|
+
# Compute intersection of task dependencies
|
|
222
|
+
all_deps = [ctx.info.task_dependencies(f) for f in flattenables]
|
|
223
|
+
deps = reduce(lambda a, b: a & b, all_deps)
|
|
224
|
+
common_body = binders & deps
|
|
225
|
+
|
|
226
|
+
# We don't need to extract anything if there's only zero or one common tasks
|
|
227
|
+
if len(common_body) < 2:
|
|
228
|
+
return None
|
|
229
|
+
|
|
230
|
+
# Keep track of remaining tasks that are not extracted in the common body
|
|
231
|
+
remaining = ordered_set()
|
|
232
|
+
|
|
233
|
+
# Compute the vars that should be output from the common body. These are the union of
|
|
234
|
+
# all input vars across all non-extracted tasks, intersected with output vars of
|
|
235
|
+
# the common body.
|
|
236
|
+
|
|
237
|
+
# First, compute the output vars of the common body
|
|
238
|
+
common_body_output_vars = OrderedSet()
|
|
239
|
+
for child in common_body:
|
|
240
|
+
common_body_output_vars.update(ctx.info.task_outputs(child))
|
|
241
|
+
|
|
242
|
+
# Next, compute the union of the input vars of all non-extracted tasks
|
|
243
|
+
non_extracted_tasks = (binders - common_body) | flattenables | others
|
|
244
|
+
all_exposed_vars: list[OrderedSet[ir.Var]] = []
|
|
245
|
+
for t in non_extracted_tasks:
|
|
246
|
+
input_vars = ctx.info.task_inputs(t)
|
|
247
|
+
all_exposed_vars.append(input_vars if input_vars else OrderedSet())
|
|
248
|
+
|
|
249
|
+
exposed_vars = reduce(lambda a, b: a | b, all_exposed_vars) & common_body_output_vars
|
|
250
|
+
|
|
251
|
+
# If there are no vars in common, then it's not worth extracting
|
|
252
|
+
if not exposed_vars:
|
|
253
|
+
return None
|
|
254
|
+
|
|
255
|
+
# Make sure that all local dependencies of the common body are included in the common
|
|
256
|
+
# body. This is important for the safety of this rewrite.
|
|
257
|
+
for task in common_body:
|
|
258
|
+
local_deps = ctx.info.local_dependencies(task)
|
|
259
|
+
if local_deps:
|
|
260
|
+
common_body.update(local_deps & binders)
|
|
261
|
+
|
|
262
|
+
# check which of the original binders remain, and make sure their dependencies also stay
|
|
263
|
+
for binder in binders:
|
|
264
|
+
if binder not in common_body:
|
|
265
|
+
remaining.add(binder)
|
|
266
|
+
deps = _compute_local_dependencies(ctx, binders, binder, exposed_vars)
|
|
256
267
|
if deps:
|
|
257
|
-
|
|
258
|
-
if task not in deps:
|
|
259
|
-
common_body.remove(task)
|
|
260
|
-
|
|
261
|
-
# Compute union of input vars
|
|
262
|
-
# Start with the output vars of the common body. We only want to expose vars that
|
|
263
|
-
# are output from the common body
|
|
264
|
-
body_output_vars = OrderedSet()
|
|
265
|
-
for child in common_body:
|
|
266
|
-
body_output_vars.update(ctx.info.task_outputs(child))
|
|
267
|
-
|
|
268
|
-
# Compute the union of input vars across all non-extracted tasks (basically
|
|
269
|
-
# composites and binders left behind), intersected with output
|
|
270
|
-
# vars of the common body
|
|
271
|
-
exposed_vars = OrderedSet.from_iterable(ctx.info.task_inputs(sample)) & body_output_vars
|
|
272
|
-
non_extracted_tasks = (binders - common_body) | composites
|
|
273
|
-
for composite in non_extracted_tasks:
|
|
274
|
-
if composite is sample:
|
|
275
|
-
continue
|
|
276
|
-
# compute common input vars
|
|
277
|
-
t_inputs = OrderedSet.from_iterable(ctx.info.task_inputs(composite))
|
|
278
|
-
exposed_vars.update(t_inputs & body_output_vars)
|
|
268
|
+
remaining.update(deps)
|
|
279
269
|
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
270
|
+
# for each composite, check if there are additional tasks needed, because the task
|
|
271
|
+
# depends on it but it is not exposed by the vars
|
|
272
|
+
local_dependencies: dict[ir.Task, OrderedSet[ir.Task]] = dict()
|
|
273
|
+
for flattenable in flattenables:
|
|
274
|
+
local = _compute_local_dependencies(ctx, binders, flattenable, exposed_vars)
|
|
275
|
+
if local:
|
|
276
|
+
local_dependencies[flattenable] = local
|
|
283
277
|
|
|
284
|
-
|
|
285
|
-
local_deps = ctx.info.local_dependencies(task)
|
|
286
|
-
if local_deps:
|
|
287
|
-
common_body.update(local_deps & binders)
|
|
278
|
+
return ExtractCommon.ExtractionPlan(common_body, remaining, exposed_vars, local_dependencies)
|
|
288
279
|
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
# common body
|
|
296
|
-
common_vars = ordered_set()
|
|
297
|
-
for task in common_body:
|
|
298
|
-
common_vars.update(ctx.info.task_outputs(task))
|
|
299
|
-
common_vars = common_vars - exposed_vars
|
|
300
|
-
for v in common_vars:
|
|
301
|
-
for binder in binders:
|
|
302
|
-
if binder not in common_body and ctx.info.task_inputs(binder) and v in ctx.info.task_inputs(binder):
|
|
303
|
-
exposed_vars.add(v)
|
|
304
|
-
break
|
|
305
|
-
|
|
306
|
-
# check which of the original binders remain, and make sure their dependencies also stay
|
|
307
|
-
remaining = ordered_set()
|
|
308
|
-
for binder in binders:
|
|
309
|
-
if binder not in common_body:
|
|
310
|
-
remaining.add(binder)
|
|
311
|
-
deps = self._compute_local_dependencies(ctx, binders, binder, exposed_vars)
|
|
312
|
-
if deps:
|
|
313
|
-
remaining.update(deps)
|
|
314
|
-
|
|
315
|
-
# for each composite, check if there are additional tasks needed, because the task
|
|
316
|
-
# depends on it but it is not exposed by the vars
|
|
317
|
-
local_dependencies: dict[ir.Task, OrderedSet[ir.Task]] = dict()
|
|
318
|
-
for composite in composites:
|
|
319
|
-
local = self._compute_local_dependencies(ctx, binders, composite, exposed_vars)
|
|
320
|
-
if local:
|
|
321
|
-
local_dependencies[composite] = local
|
|
322
|
-
|
|
323
|
-
# distribute the common reference only if all of the composites are extractable and there's nothing else remaining
|
|
324
|
-
distribute_common_reference = len(extractables) == len(composites) and not remaining
|
|
325
|
-
|
|
326
|
-
return ExtractCommon.ExtractionPlan(common_body, remaining, exposed_vars, local_dependencies, distribute_common_reference)
|
|
327
|
-
|
|
328
|
-
def _compute_local_dependencies(self, ctx: Context, binders: OrderedSet[ir.Task], composite: ir.Task, exposed_vars: OrderedSet[ir.Var]):
|
|
329
|
-
"""
|
|
330
|
-
The tasks in common_body will be extracted into a logical that will expose the exposed_vars.
|
|
331
|
-
Compute which additional dependencies are needed specifically for this composite, because
|
|
332
|
-
it depends on some tasks that are extracted to common_body but not exposed by exposed_vars.
|
|
333
|
-
"""
|
|
334
|
-
|
|
335
|
-
# working list of vars we still need to fulfill
|
|
336
|
-
inputs = ctx.info.task_inputs(composite)
|
|
337
|
-
if not inputs:
|
|
338
|
-
return None
|
|
339
|
-
|
|
340
|
-
# vars exposed by exposed vars + tasks added to the local body
|
|
341
|
-
vars_exposed = OrderedSet.from_iterable(exposed_vars)
|
|
342
|
-
vars_needed = (inputs - vars_exposed)
|
|
343
|
-
if not vars_needed:
|
|
344
|
-
return None
|
|
280
|
+
def _compute_local_dependencies(ctx: ExtractCommon.Context, binders: OrderedSet[ir.Task], composite: ir.Task, exposed_vars: OrderedSet[ir.Var]):
|
|
281
|
+
"""
|
|
282
|
+
The tasks in common_body will be extracted into a logical that will expose the exposed_vars.
|
|
283
|
+
Compute which additional dependencies are needed specifically for this composite, because
|
|
284
|
+
it depends on some tasks that are extracted to common_body but not exposed by exposed_vars.
|
|
285
|
+
"""
|
|
345
286
|
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
287
|
+
# working list of vars we still need to fulfill
|
|
288
|
+
inputs = ctx.info.task_inputs(composite)
|
|
289
|
+
if not inputs:
|
|
290
|
+
return None
|
|
291
|
+
|
|
292
|
+
# vars exposed by exposed vars + tasks added to the local body
|
|
293
|
+
vars_exposed = OrderedSet.from_iterable(exposed_vars)
|
|
294
|
+
vars_needed = (inputs - vars_exposed)
|
|
295
|
+
if not vars_needed:
|
|
296
|
+
return None
|
|
297
|
+
|
|
298
|
+
# this is a greedy algorithm that uses the first task in the common body that provides
|
|
299
|
+
# a variable needed; it may result in sub-optimal extraction, but should be correct
|
|
300
|
+
local_body = ordered_set()
|
|
301
|
+
while(vars_needed):
|
|
302
|
+
v = vars_needed.pop()
|
|
303
|
+
for x in binders:
|
|
304
|
+
if x not in local_body:
|
|
305
|
+
# an x that is not yet in local_body can fulfill v
|
|
306
|
+
x_outputs = ctx.info.task_outputs(x)
|
|
307
|
+
if x_outputs and v in x_outputs:
|
|
308
|
+
# add it to local_body and add its outputs to vars exposed
|
|
309
|
+
local_body.add(x)
|
|
310
|
+
vars_exposed.add(x_outputs)
|
|
311
|
+
# but add its inputs the vars now needed
|
|
312
|
+
inputs = ctx.info.task_inputs(x)
|
|
313
|
+
if inputs:
|
|
314
|
+
vars_needed.update(inputs - vars_exposed)
|
|
315
|
+
return local_body
|
|
316
|
+
|
|
317
|
+
def _is_binder(task: ir.Task):
|
|
318
|
+
# If the task itself is a binder
|
|
319
|
+
if any(isinstance(task, binder) for binder in (ir.Lookup, ir.Construct, ir.Exists, ir.Data, ir.Not)):
|
|
320
|
+
return True
|
|
321
|
+
|
|
322
|
+
# If the task is a Logical containing only binders
|
|
323
|
+
if isinstance(task, ir.Logical) and all(_is_binder(c) for c in task.body):
|
|
324
|
+
return True
|
|
325
|
+
|
|
326
|
+
# If the task is a Union containing only binders
|
|
327
|
+
if isinstance(task, ir.Union) and all(_is_binder(c) for c in task.tasks):
|
|
328
|
+
return True
|
|
329
|
+
|
|
330
|
+
return False
|
|
331
|
+
|
|
332
|
+
def _is_flattenable(ctx: ExtractCommon.Context, task: ir.Task):
|
|
333
|
+
# Each output will be flattened into its own top-level def
|
|
334
|
+
if isinstance(task, ir.Output):
|
|
335
|
+
return True
|
|
336
|
+
|
|
337
|
+
extractable_types = (ir.Update, ir.Aggregate, ir.Match, ir.Rank)
|
|
338
|
+
return isinstance(task, ir.Logical) and len(visitor.collect_by_type(extractable_types, task)) > 0
|
|
@@ -140,13 +140,13 @@ class DNFExtractor(Visitor):
|
|
|
140
140
|
for new_task in replacement_tasks:
|
|
141
141
|
# copy to mutate
|
|
142
142
|
new_body = list(body)
|
|
143
|
-
new_body.append(new_task)
|
|
143
|
+
new_body.append(new_task.clone())
|
|
144
144
|
new_replacement_bodies.append(new_body)
|
|
145
145
|
replacement_bodies = new_replacement_bodies
|
|
146
146
|
|
|
147
147
|
else:
|
|
148
148
|
for new_body in replacement_bodies:
|
|
149
|
-
new_body.append(task)
|
|
149
|
+
new_body.append(task.clone())
|
|
150
150
|
|
|
151
151
|
replacement_tasks: list[ir.Task] = []
|
|
152
152
|
for body in replacement_bodies:
|
|
@@ -211,6 +211,17 @@ class Flatten(Pass):
|
|
|
211
211
|
if child in composites:
|
|
212
212
|
all_composites_removed = False
|
|
213
213
|
|
|
214
|
+
# Filter out empty logicals from the body
|
|
215
|
+
new_body:OrderedSet[ir.Task] = ordered_set()
|
|
216
|
+
for b in body:
|
|
217
|
+
if isinstance(b, ir.Logical):
|
|
218
|
+
if not b.body:
|
|
219
|
+
# empty logical, skip
|
|
220
|
+
continue
|
|
221
|
+
new_body.add(b)
|
|
222
|
+
|
|
223
|
+
body = new_body
|
|
224
|
+
|
|
214
225
|
# all children were extracted or all composites were removed without any effects
|
|
215
226
|
# left and no outputs (so no way for outer dependencies), drop this logical
|
|
216
227
|
if not body or (all_composites_removed and not any([isinstance(t, helpers.EFFECTS) for t in body]) and not ctx.info.task_outputs(task)):
|
|
@@ -29,7 +29,7 @@ relationalai/clients/hash_util.py,sha256=pZVR1FX3q4G_19p_r6wpIR2tIM8_WUlfAR7AVZJ
|
|
|
29
29
|
relationalai/clients/local.py,sha256=uX1Or2WO0juDuqa6TCCvm3G2ieP6p0PtYp_jfrCMlVc,23577
|
|
30
30
|
relationalai/clients/profile_polling.py,sha256=pUH7WKH4nYDD0SlQtg3wsWdj0K7qt6nZqUw8jTthCBs,2565
|
|
31
31
|
relationalai/clients/result_helpers.py,sha256=wDSD02Ngx6W-YQqBIGKnpXD4Ju3pA1e9Nz6ORRI6SRI,17808
|
|
32
|
-
relationalai/clients/snowflake.py,sha256=
|
|
32
|
+
relationalai/clients/snowflake.py,sha256=pB_4t70vTmfGPUJnJpbrrsNH5pojPJyqodsMtUL6b9c,168261
|
|
33
33
|
relationalai/clients/types.py,sha256=eNo6akcMTbnBFbBbHd5IgVeY-zuAgtXlOs8Bo1SWmVU,2890
|
|
34
34
|
relationalai/clients/use_index_poller.py,sha256=rrkg35xiHqY0-2dZlPkgixEGENrIrl7bf_2TboX_qew,46794
|
|
35
35
|
relationalai/clients/util.py,sha256=NJC8fnrWHR01NydwESPSetIHRWf7jQJURYpaWJjmDyE,12311
|
|
@@ -314,7 +314,7 @@ relationalai/semantics/lqp/executor.py,sha256=GuPiSJpLaMYqjZOlVEFLOUilyuLcfkQgZ6
|
|
|
314
314
|
relationalai/semantics/lqp/intrinsics.py,sha256=oKPIcW8PYgU-yPTO21iSF00RBsFKPFFP5MICe6izjKk,871
|
|
315
315
|
relationalai/semantics/lqp/ir.py,sha256=6W9mUH0W7u5eIfF1S3o33uSOfQuM3UcqEkxrxpr1X_8,1867
|
|
316
316
|
relationalai/semantics/lqp/model2lqp.py,sha256=PTe2PtHioWAclu6tdkCu5iGaHJxPIe7zIG1Ew72qxDo,36957
|
|
317
|
-
relationalai/semantics/lqp/passes.py,sha256=
|
|
317
|
+
relationalai/semantics/lqp/passes.py,sha256=bpRCdssqcSlVw77tam6EIjI0BypDoFKGWJix8dTywhU,28621
|
|
318
318
|
relationalai/semantics/lqp/pragmas.py,sha256=FzzldrJEAZ1AIcEw6D-FfaVg3CoahRYgPCFo7xHfg1g,375
|
|
319
319
|
relationalai/semantics/lqp/primitives.py,sha256=9Hjow-Yp06jt0xatuUrH1dw0ErnzknIr9K0TB_AwdjU,11029
|
|
320
320
|
relationalai/semantics/lqp/result_helpers.py,sha256=oYpLoTBnzsiyOVIWA2rLMHlgs7P7BoEkqthQ2aMosnk,10123
|
|
@@ -322,9 +322,9 @@ relationalai/semantics/lqp/types.py,sha256=3TZ61ybwNV8lDyUMujZIWNFz3Fgn4uifsJb8E
|
|
|
322
322
|
relationalai/semantics/lqp/utils.py,sha256=iOoS-f8kyFjrgAnpK4cWDvAA-WmPgDRggSKUXm_JdTc,6317
|
|
323
323
|
relationalai/semantics/lqp/validators.py,sha256=FlKMKclHj0L71QUtl0aqKknqksSWM-di4N9bjGDJvnY,1561
|
|
324
324
|
relationalai/semantics/lqp/rewrite/__init__.py,sha256=V9ERED9qdh4VvY9Ud_M8Zn8lhVANdOGIgW03l55sGj0,492
|
|
325
|
-
relationalai/semantics/lqp/rewrite/annotate_constraints.py,sha256=
|
|
325
|
+
relationalai/semantics/lqp/rewrite/annotate_constraints.py,sha256=b_Ly4_80dQpRzWbeLC72JVfxzhwOPBpiCdEqtBiEiwM,2310
|
|
326
326
|
relationalai/semantics/lqp/rewrite/cdc.py,sha256=I6DeMOZScx-3UAVoSCMn9cuOgLzwdvJVKNwsgFa6R_k,10390
|
|
327
|
-
relationalai/semantics/lqp/rewrite/extract_common.py,sha256=
|
|
327
|
+
relationalai/semantics/lqp/rewrite/extract_common.py,sha256=ZRvmeYHN8JEkU-j3fRx1e0_JK-46n6NqhxtwZe6L10c,14690
|
|
328
328
|
relationalai/semantics/lqp/rewrite/extract_keys.py,sha256=iSbwGQG9p8j-erknEwl2pZkunJEpXTlnL9ohr4KVS8M,19317
|
|
329
329
|
relationalai/semantics/lqp/rewrite/function_annotations.py,sha256=9ZzLASvXh_OgQ04eup0AyoMIh2HxWHkoRETLm1-XtWs,4660
|
|
330
330
|
relationalai/semantics/lqp/rewrite/functional_dependencies.py,sha256=4oQcVQtAGDqY850B1bNszigQopf6y9Y_CaUyWx42PtM,12718
|
|
@@ -344,9 +344,9 @@ relationalai/semantics/metamodel/util.py,sha256=cmSmeww34JVMqcFudwVAY820IPM2ETSE
|
|
|
344
344
|
relationalai/semantics/metamodel/visitor.py,sha256=DFY0DACLhxlZ0e4p0vWqbK6ZJr_GWEvH66CU_HVuoTk,35527
|
|
345
345
|
relationalai/semantics/metamodel/rewrite/__init__.py,sha256=9ONWFSdMPHkWpObDMSljt8DywhpFf4Ehsq1aT3fTPt8,344
|
|
346
346
|
relationalai/semantics/metamodel/rewrite/discharge_constraints.py,sha256=0v613BqCLlo4sgWuZjcLSxxakp3d34mYWbG4ldhzGno,1949
|
|
347
|
-
relationalai/semantics/metamodel/rewrite/dnf_union_splitter.py,sha256=
|
|
347
|
+
relationalai/semantics/metamodel/rewrite/dnf_union_splitter.py,sha256=piV8FEr4GHKSDcCcsu_TLHJHrsX7blTq4TUvGpGrN8Q,7986
|
|
348
348
|
relationalai/semantics/metamodel/rewrite/extract_nested_logicals.py,sha256=vQ0-7t_GORskB1ZG50KuzM4phm6YNPvehfFn3v_LbgI,3354
|
|
349
|
-
relationalai/semantics/metamodel/rewrite/flatten.py,sha256=
|
|
349
|
+
relationalai/semantics/metamodel/rewrite/flatten.py,sha256=CMCFrMCPIQJs9Ln8TJBJYbZaw6EpIcyWh6N6HaZlLQA,22513
|
|
350
350
|
relationalai/semantics/metamodel/rewrite/format_outputs.py,sha256=n0IxC3RL3UMly6MWsq342EGfL2yGj3vOgVG_wg7kt-o,6225
|
|
351
351
|
relationalai/semantics/metamodel/typer/__init__.py,sha256=E3ydmhWRdm-cAqWsNR24_Qd3NcwiHx8ElO2tzNysAXc,143
|
|
352
352
|
relationalai/semantics/metamodel/typer/checker.py,sha256=frY0gilDO6skbDiYFiIpDUOWyt9s9jAJsRBs848DcG0,19184
|
|
@@ -364,7 +364,7 @@ relationalai/semantics/reasoners/optimization/solvers_dev.py,sha256=lbw3c8Z6PlHR
|
|
|
364
364
|
relationalai/semantics/reasoners/optimization/solvers_pb.py,sha256=ESwraHU9c4NCEVRZ16tnBZsUCmJg7lUhy-v0-GGq0qo,48000
|
|
365
365
|
relationalai/semantics/rel/__init__.py,sha256=pMlVTC_TbQ45mP1LpzwFBBgPxpKc0H3uJDvvDXEWzvs,55
|
|
366
366
|
relationalai/semantics/rel/builtins.py,sha256=kQToiELc4NnvCmXyFtu9CsGZNdTQtSzTB-nuyIfQcsM,1562
|
|
367
|
-
relationalai/semantics/rel/compiler.py,sha256
|
|
367
|
+
relationalai/semantics/rel/compiler.py,sha256=pFkEbuPKVd8AI4tiklcv06LbNnK8KfoV4FwmY9Lrhqo,43044
|
|
368
368
|
relationalai/semantics/rel/executor.py,sha256=v-yHl9R8AV0AA2xnm5YZDzue83pr8j2Q97Ky1MKkU70,17309
|
|
369
369
|
relationalai/semantics/rel/rel.py,sha256=9I_V6dQ83QRaLzq04Tt-KjBWhmNxNO3tFzeornBK4zc,15738
|
|
370
370
|
relationalai/semantics/rel/rel_utils.py,sha256=EH-NBROA4vIJXajLKniapt4Dxt7cXSqY4NEjD-wD8Mc,9566
|
|
@@ -442,8 +442,8 @@ frontend/debugger/dist/index.html,sha256=0wIQ1Pm7BclVV1wna6Mj8OmgU73B9rSEGPVX-Wo
|
|
|
442
442
|
frontend/debugger/dist/assets/favicon-Dy0ZgA6N.png,sha256=tPXOEhOrM4tJyZVJQVBO_yFgNAlgooY38ZsjyrFstgg,620
|
|
443
443
|
frontend/debugger/dist/assets/index-Cssla-O7.js,sha256=MxgIGfdKQyBWgufck1xYggQNhW5nj6BPjCF6Wleo-f0,298886
|
|
444
444
|
frontend/debugger/dist/assets/index-DlHsYx1V.css,sha256=21pZtAjKCcHLFjbjfBQTF6y7QmOic-4FYaKNmwdNZVE,60141
|
|
445
|
-
relationalai-0.12.
|
|
446
|
-
relationalai-0.12.
|
|
447
|
-
relationalai-0.12.
|
|
448
|
-
relationalai-0.12.
|
|
449
|
-
relationalai-0.12.
|
|
445
|
+
relationalai-0.12.13.dist-info/METADATA,sha256=qiiOC2n_SXtlfEHafFp8hVqY1M2VSzGOl5fqOPt1_ZQ,2563
|
|
446
|
+
relationalai-0.12.13.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
447
|
+
relationalai-0.12.13.dist-info/entry_points.txt,sha256=fo_oLFJih3PUgYuHXsk7RnCjBm9cqRNR--ab6DgI6-0,88
|
|
448
|
+
relationalai-0.12.13.dist-info/licenses/LICENSE,sha256=pPyTVXFYhirkEW9VsnHIgUjT0Vg8_xsE6olrF5SIgpc,11343
|
|
449
|
+
relationalai-0.12.13.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|