relationalai 0.12.1__py3-none-any.whl → 0.12.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- relationalai/clients/direct_access_client.py +5 -0
- relationalai/clients/snowflake.py +31 -8
- relationalai/clients/use_index_poller.py +24 -7
- relationalai/experimental/solvers.py +283 -79
- relationalai/semantics/internal/internal.py +8 -8
- relationalai/semantics/lqp/executor.py +10 -2
- relationalai/semantics/lqp/model2lqp.py +3 -2
- relationalai/semantics/rel/executor.py +11 -2
- {relationalai-0.12.1.dist-info → relationalai-0.12.2.dist-info}/METADATA +1 -1
- {relationalai-0.12.1.dist-info → relationalai-0.12.2.dist-info}/RECORD +13 -13
- {relationalai-0.12.1.dist-info → relationalai-0.12.2.dist-info}/WHEEL +0 -0
- {relationalai-0.12.1.dist-info → relationalai-0.12.2.dist-info}/entry_points.txt +0 -0
- {relationalai-0.12.1.dist-info → relationalai-0.12.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -49,6 +49,11 @@ class DirectAccessClient:
|
|
|
49
49
|
"suspend_engine": Endpoint(method="POST", endpoint="/v1alpha1/engines/{engine_type}/{engine_name}/suspend"),
|
|
50
50
|
"resume_engine": Endpoint(method="POST", endpoint="/v1alpha1/engines/{engine_type}/{engine_name}/resume_async"),
|
|
51
51
|
"prepare_index": Endpoint(method="POST", endpoint="/v1alpha1/index/prepare"),
|
|
52
|
+
"get_job": Endpoint(method="GET", endpoint="/v1alpha1/jobs/{job_type}/{job_id}"),
|
|
53
|
+
"list_jobs": Endpoint(method="GET", endpoint="/v1alpha1/jobs"),
|
|
54
|
+
"get_job_events": Endpoint(method="GET", endpoint="/v1alpha1/jobs/{job_type}/{job_id}/events/{stream_name}"),
|
|
55
|
+
"create_job": Endpoint(method="POST", endpoint="/v1alpha1/jobs"),
|
|
56
|
+
"cancel_job": Endpoint(method="POST", endpoint="/v1alpha1/jobs/{job_type}/{job_id}/cancel"),
|
|
52
57
|
}
|
|
53
58
|
self.http_session = self._create_retry_session()
|
|
54
59
|
|
|
@@ -761,13 +761,13 @@ Otherwise, remove it from your '{profile}' configuration profile.
|
|
|
761
761
|
with debugging.span("create_model", name=name):
|
|
762
762
|
self._exec(f"call {APP_NAME}.api.create_database('{name}', false, {debugging.gen_current_propagation_headers()});")
|
|
763
763
|
|
|
764
|
-
def delete_graph(self, name:str, force=False):
|
|
764
|
+
def delete_graph(self, name:str, force=False, language:str="rel"):
|
|
765
765
|
prop_hdrs = debugging.gen_current_propagation_headers()
|
|
766
766
|
if self.config.get("use_graph_index", USE_GRAPH_INDEX):
|
|
767
767
|
keep_database = not force and self.config.get("reuse_model", True)
|
|
768
|
-
with debugging.span("release_index", name=name, keep_database=keep_database):
|
|
768
|
+
with debugging.span("release_index", name=name, keep_database=keep_database, language=language):
|
|
769
769
|
#TODO add headers to release_index
|
|
770
|
-
response = self._exec(f"call {APP_NAME}.api.release_index('{name}', OBJECT_CONSTRUCT('keep_database', {keep_database}));")
|
|
770
|
+
response = self._exec(f"call {APP_NAME}.api.release_index('{name}', OBJECT_CONSTRUCT('keep_database', {keep_database}, 'language', '{language}'));")
|
|
771
771
|
if response:
|
|
772
772
|
result = next(iter(response))
|
|
773
773
|
obj = json.loads(result["RELEASE_INDEX"])
|
|
@@ -795,6 +795,7 @@ Otherwise, remove it from your '{profile}' configuration profile.
|
|
|
795
795
|
model: str,
|
|
796
796
|
engine_name: str,
|
|
797
797
|
engine_size: str | None = None,
|
|
798
|
+
language: str = "rel",
|
|
798
799
|
program_span_id: str | None = None,
|
|
799
800
|
headers: Dict | None = None,
|
|
800
801
|
):
|
|
@@ -805,6 +806,7 @@ Otherwise, remove it from your '{profile}' configuration profile.
|
|
|
805
806
|
model,
|
|
806
807
|
engine_name,
|
|
807
808
|
engine_size,
|
|
809
|
+
language,
|
|
808
810
|
program_span_id,
|
|
809
811
|
headers,
|
|
810
812
|
self.generation
|
|
@@ -2970,7 +2972,16 @@ class SnowflakeClient(Client):
|
|
|
2970
2972
|
|
|
2971
2973
|
query_attrs_dict = json.loads(headers.get("X-Query-Attributes", "{}")) if headers else {}
|
|
2972
2974
|
with debugging.span("poll_use_index", sources=self.resources.sources, model=model, engine=engine_name, **query_attrs_dict):
|
|
2973
|
-
self.poll_use_index(
|
|
2975
|
+
self.poll_use_index(
|
|
2976
|
+
app_name=app_name,
|
|
2977
|
+
sources=self.resources.sources,
|
|
2978
|
+
model=model,
|
|
2979
|
+
engine_name=engine_name,
|
|
2980
|
+
engine_size=engine_size,
|
|
2981
|
+
language="rel",
|
|
2982
|
+
program_span_id=program_span_id,
|
|
2983
|
+
headers=headers
|
|
2984
|
+
)
|
|
2974
2985
|
|
|
2975
2986
|
self.last_database_version = len(self.resources.sources)
|
|
2976
2987
|
self._manage_packages()
|
|
@@ -2989,12 +3000,20 @@ class SnowflakeClient(Client):
|
|
|
2989
3000
|
model: str,
|
|
2990
3001
|
engine_name: str,
|
|
2991
3002
|
engine_size: str | None = None,
|
|
3003
|
+
language: str = "rel",
|
|
2992
3004
|
program_span_id: str | None = None,
|
|
2993
3005
|
headers: Dict | None = None,
|
|
2994
3006
|
):
|
|
2995
3007
|
assert isinstance(self.resources, Resources)
|
|
2996
3008
|
return self.resources.poll_use_index(
|
|
2997
|
-
app_name,
|
|
3009
|
+
app_name=app_name,
|
|
3010
|
+
sources=sources,
|
|
3011
|
+
model=model,
|
|
3012
|
+
engine_name=engine_name,
|
|
3013
|
+
engine_size=engine_size,
|
|
3014
|
+
language=language,
|
|
3015
|
+
program_span_id=program_span_id,
|
|
3016
|
+
headers=headers
|
|
2998
3017
|
)
|
|
2999
3018
|
|
|
3000
3019
|
|
|
@@ -3302,6 +3321,7 @@ class DirectAccessResources(Resources):
|
|
|
3302
3321
|
model: str,
|
|
3303
3322
|
engine_name: str,
|
|
3304
3323
|
engine_size: str = "",
|
|
3324
|
+
language: str = "rel",
|
|
3305
3325
|
rai_relations: List[str] | None = None,
|
|
3306
3326
|
pyrel_program_id: str | None = None,
|
|
3307
3327
|
skip_pull_relations: bool = False,
|
|
@@ -3317,6 +3337,7 @@ class DirectAccessResources(Resources):
|
|
|
3317
3337
|
payload = {
|
|
3318
3338
|
"model_name": model,
|
|
3319
3339
|
"caller_engine_name": engine_name,
|
|
3340
|
+
"language": language,
|
|
3320
3341
|
"pyrel_program_id": pyrel_program_id,
|
|
3321
3342
|
"skip_pull_relations": skip_pull_relations,
|
|
3322
3343
|
"rai_relations": rai_relations or [],
|
|
@@ -3342,6 +3363,7 @@ class DirectAccessResources(Resources):
|
|
|
3342
3363
|
model: str,
|
|
3343
3364
|
engine_name: str,
|
|
3344
3365
|
engine_size: str | None = None,
|
|
3366
|
+
language: str = "rel",
|
|
3345
3367
|
program_span_id: str | None = None,
|
|
3346
3368
|
headers: Dict | None = None,
|
|
3347
3369
|
):
|
|
@@ -3352,6 +3374,7 @@ class DirectAccessResources(Resources):
|
|
|
3352
3374
|
model=model,
|
|
3353
3375
|
engine_name=engine_name,
|
|
3354
3376
|
engine_size=engine_size,
|
|
3377
|
+
language=language,
|
|
3355
3378
|
program_span_id=program_span_id,
|
|
3356
3379
|
headers=headers,
|
|
3357
3380
|
generation=self.generation,
|
|
@@ -3492,14 +3515,14 @@ class DirectAccessResources(Resources):
|
|
|
3492
3515
|
with debugging.span("create_model", dbname=name):
|
|
3493
3516
|
return self._create_database(name,"")
|
|
3494
3517
|
|
|
3495
|
-
def delete_graph(self, name:str, force=False):
|
|
3518
|
+
def delete_graph(self, name:str, force=False, language: str = "rel"):
|
|
3496
3519
|
prop_hdrs = debugging.gen_current_propagation_headers()
|
|
3497
3520
|
if self.config.get("use_graph_index", USE_GRAPH_INDEX):
|
|
3498
3521
|
keep_database = not force and self.config.get("reuse_model", True)
|
|
3499
|
-
with debugging.span("release_index", name=name, keep_database=keep_database):
|
|
3522
|
+
with debugging.span("release_index", name=name, keep_database=keep_database, language=language):
|
|
3500
3523
|
response = self.request(
|
|
3501
3524
|
"release_index",
|
|
3502
|
-
payload={"model_name": name, "keep_database": keep_database},
|
|
3525
|
+
payload={"model_name": name, "keep_database": keep_database, "language": language},
|
|
3503
3526
|
headers=prop_hdrs,
|
|
3504
3527
|
)
|
|
3505
3528
|
if (
|
|
@@ -161,8 +161,9 @@ class UseIndexPoller:
|
|
|
161
161
|
model: str,
|
|
162
162
|
engine_name: str,
|
|
163
163
|
engine_size: Optional[str],
|
|
164
|
-
|
|
165
|
-
|
|
164
|
+
language: str = "rel",
|
|
165
|
+
program_span_id: Optional[str] = None,
|
|
166
|
+
headers: Optional[Dict] = None,
|
|
166
167
|
generation: Optional[Generation] = None,
|
|
167
168
|
):
|
|
168
169
|
self.res = resource
|
|
@@ -171,6 +172,7 @@ class UseIndexPoller:
|
|
|
171
172
|
self.model = model
|
|
172
173
|
self.engine_name = engine_name
|
|
173
174
|
self.engine_size = engine_size or self.res.config.get_default_engine_size()
|
|
175
|
+
self.language = language
|
|
174
176
|
self.program_span_id = program_span_id
|
|
175
177
|
self.headers = headers or {}
|
|
176
178
|
self.counter = 1
|
|
@@ -190,8 +192,8 @@ class UseIndexPoller:
|
|
|
190
192
|
)
|
|
191
193
|
current_user = self.res.get_sf_session().get_current_user()
|
|
192
194
|
assert current_user is not None, "current_user must be set"
|
|
193
|
-
data_freshness = self.res.config.get_data_freshness_mins()
|
|
194
|
-
self.cache = GraphIndexCache(current_user, model, data_freshness, self.sources)
|
|
195
|
+
self.data_freshness = self.res.config.get_data_freshness_mins()
|
|
196
|
+
self.cache = GraphIndexCache(current_user, model, self.data_freshness, self.sources)
|
|
195
197
|
self.sources = self.cache.choose_sources()
|
|
196
198
|
# execution_id is allowed to group use_index call, which belongs to the same loop iteration
|
|
197
199
|
self.execution_id = str(uuid.uuid4())
|
|
@@ -486,6 +488,8 @@ class UseIndexPoller:
|
|
|
486
488
|
"wait_for_stream_sync": self.wait_for_stream_sync,
|
|
487
489
|
"should_check_cdc": self.should_check_cdc,
|
|
488
490
|
"init_engine_async": self.init_engine_async,
|
|
491
|
+
"language": self.language,
|
|
492
|
+
"data_freshness_mins": self.data_freshness,
|
|
489
493
|
})
|
|
490
494
|
|
|
491
495
|
request_headers = debugging.add_current_propagation_headers(self.headers)
|
|
@@ -853,11 +857,23 @@ class DirectUseIndexPoller(UseIndexPoller):
|
|
|
853
857
|
model: str,
|
|
854
858
|
engine_name: str,
|
|
855
859
|
engine_size: Optional[str],
|
|
856
|
-
|
|
857
|
-
|
|
860
|
+
language: str = "rel",
|
|
861
|
+
program_span_id: Optional[str] = None,
|
|
862
|
+
headers: Optional[Dict] = None,
|
|
858
863
|
generation: Optional[Generation] = None,
|
|
859
864
|
):
|
|
860
|
-
super().__init__(
|
|
865
|
+
super().__init__(
|
|
866
|
+
resource=resource,
|
|
867
|
+
app_name=app_name,
|
|
868
|
+
sources=sources,
|
|
869
|
+
model=model,
|
|
870
|
+
engine_name=engine_name,
|
|
871
|
+
engine_size=engine_size,
|
|
872
|
+
language=language,
|
|
873
|
+
program_span_id=program_span_id,
|
|
874
|
+
headers=headers,
|
|
875
|
+
generation=generation,
|
|
876
|
+
)
|
|
861
877
|
from relationalai.clients.snowflake import DirectAccessResources
|
|
862
878
|
self.res: DirectAccessResources = cast(DirectAccessResources, self.res)
|
|
863
879
|
|
|
@@ -878,6 +894,7 @@ class DirectUseIndexPoller(UseIndexPoller):
|
|
|
878
894
|
model=self.model,
|
|
879
895
|
engine_name=self.engine_name,
|
|
880
896
|
engine_size=self.engine_size,
|
|
897
|
+
language=self.language,
|
|
881
898
|
rai_relations=[],
|
|
882
899
|
pyrel_program_id=self.program_span_id,
|
|
883
900
|
skip_pull_relations=True,
|
|
@@ -9,11 +9,13 @@ from ..metamodel import Builtins
|
|
|
9
9
|
from ..tools.cli_controls import Spinner
|
|
10
10
|
from ..tools.constants import DEFAULT_QUERY_TIMEOUT_MINS
|
|
11
11
|
from .. import debugging
|
|
12
|
+
from .. errors import ResponseStatusException
|
|
12
13
|
import uuid
|
|
13
14
|
import relationalai
|
|
14
15
|
import json
|
|
15
16
|
from ..clients.util import poll_with_specified_overhead
|
|
16
17
|
from ..clients.snowflake import Resources as SnowflakeResources
|
|
18
|
+
from ..clients.snowflake import DirectAccessClient, DirectAccessResources
|
|
17
19
|
from ..util.timeout import calc_remaining_timeout_minutes
|
|
18
20
|
|
|
19
21
|
rel_sv = rel._tagged(Builtins.SingleValued)
|
|
@@ -23,7 +25,8 @@ APP_NAME = relationalai.clients.snowflake.APP_NAME
|
|
|
23
25
|
ENGINE_TYPE_SOLVER = "SOLVER"
|
|
24
26
|
# TODO (dba) The ERP still uses `worker` instead of `engine`. Change
|
|
25
27
|
# this once we fix this in the ERP.
|
|
26
|
-
|
|
28
|
+
WORKER_ERRORS = ["worker is suspended", "create/resume", "worker not found", "no workers found", "worker was deleted"]
|
|
29
|
+
ENGINE_ERRORS = ["engine is suspended", "create/resume", "engine not found", "no engines found", "engine was deleted"]
|
|
27
30
|
ENGINE_NOT_READY_MSGS = ["worker is in pending", "worker is provisioning", "worker is not ready to accept jobs"]
|
|
28
31
|
|
|
29
32
|
# --------------------------------------------------
|
|
@@ -213,13 +216,6 @@ class SolverModel:
|
|
|
213
216
|
config_file_path = getattr(rai_config, 'file_path', None)
|
|
214
217
|
start_time = time.monotonic()
|
|
215
218
|
remaining_timeout_minutes = query_timeout_mins
|
|
216
|
-
# 1. Materialize the model and store it.
|
|
217
|
-
# TODO(coey) Currently we must run a dummy query to install the pyrel rules in a separate txn
|
|
218
|
-
# to the solve_output updates. Ideally pyrel would offer an option to flush the rules separately.
|
|
219
|
-
self.graph.exec_raw("", query_timeout_mins=remaining_timeout_minutes)
|
|
220
|
-
remaining_timeout_minutes = calc_remaining_timeout_minutes(
|
|
221
|
-
start_time, query_timeout_mins, config_file_path=config_file_path,
|
|
222
|
-
)
|
|
223
219
|
response = self.graph.exec_raw(
|
|
224
220
|
textwrap.dedent(f"""
|
|
225
221
|
@inline
|
|
@@ -269,7 +265,9 @@ class SolverModel:
|
|
|
269
265
|
job_id = solver._exec_job(payload, log_to_console=log_to_console, query_timeout_mins=remaining_timeout_minutes)
|
|
270
266
|
except Exception as e:
|
|
271
267
|
err_message = str(e).lower()
|
|
272
|
-
if
|
|
268
|
+
if isinstance(e, ResponseStatusException):
|
|
269
|
+
err_message = e.response.json().get("message", "")
|
|
270
|
+
if any(kw in err_message.lower() for kw in ENGINE_ERRORS + WORKER_ERRORS + ENGINE_NOT_READY_MSGS):
|
|
273
271
|
solver._auto_create_solver_async()
|
|
274
272
|
remaining_timeout_minutes = calc_remaining_timeout_minutes(
|
|
275
273
|
start_time, query_timeout_mins, config_file_path=config_file_path
|
|
@@ -553,7 +551,11 @@ class Solver:
|
|
|
553
551
|
# may configure each individual solver.
|
|
554
552
|
self.engine_settings = settings
|
|
555
553
|
|
|
556
|
-
|
|
554
|
+
# Optimistically set the engine object to a `READY` engine to
|
|
555
|
+
# avoid checking the engine status on each execution.
|
|
556
|
+
self.engine:Optional[dict[str,Any]] = {"name": engine_name, "state": "READY"}
|
|
557
|
+
|
|
558
|
+
return None
|
|
557
559
|
|
|
558
560
|
# --------------------------------------------------
|
|
559
561
|
# Helper
|
|
@@ -572,6 +574,7 @@ class Solver:
|
|
|
572
574
|
assert len(engines) == 1 or len(engines) == 0
|
|
573
575
|
if len(engines) != 0:
|
|
574
576
|
engine = engines[0]
|
|
577
|
+
|
|
575
578
|
if engine:
|
|
576
579
|
# TODO (dba) Logic engines support altering the
|
|
577
580
|
# auto_suspend_mins setting. Currently, we don't have
|
|
@@ -653,31 +656,20 @@ class Solver:
|
|
|
653
656
|
|
|
654
657
|
self.engine = engine
|
|
655
658
|
|
|
656
|
-
def _exec_job_async(self, payload, query_timeout_mins: Optional[int]=None):
|
|
657
|
-
payload_json = json.dumps(payload)
|
|
658
|
-
engine_name = self.engine["name"]
|
|
659
|
-
if query_timeout_mins is None and (timeout_value := self.rai_config.get("query_timeout_mins", DEFAULT_QUERY_TIMEOUT_MINS)) is not None:
|
|
660
|
-
query_timeout_mins = int(timeout_value)
|
|
661
|
-
if query_timeout_mins is not None:
|
|
662
|
-
sql_string = textwrap.dedent(f"""
|
|
663
|
-
CALL {APP_NAME}.experimental.exec_job_async('{ENGINE_TYPE_SOLVER}', '{engine_name}', '{payload_json}', null, {query_timeout_mins})
|
|
664
|
-
""")
|
|
665
|
-
else:
|
|
666
|
-
sql_string = textwrap.dedent(f"""
|
|
667
|
-
CALL {APP_NAME}.experimental.exec_job_async('{ENGINE_TYPE_SOLVER}', '{engine_name}', '{payload_json}')
|
|
668
|
-
""")
|
|
669
|
-
res = self.provider.resources._exec(sql_string)
|
|
670
|
-
return res[0]["ID"]
|
|
671
|
-
|
|
672
659
|
def _exec_job(self, payload, log_to_console=True, query_timeout_mins: Optional[int]=None):
|
|
660
|
+
if self.engine is None:
|
|
661
|
+
raise Exception("Engine not initialized.")
|
|
662
|
+
|
|
673
663
|
# Make sure the engine is ready.
|
|
674
664
|
if self.engine["state"] != "READY":
|
|
675
665
|
poll_with_specified_overhead(lambda: self._is_solver_ready(), 0.1)
|
|
676
666
|
|
|
677
667
|
with debugging.span("job") as job_span:
|
|
678
|
-
job_id = self.
|
|
668
|
+
job_id = self.provider.create_job_async(self.engine["name"], payload, query_timeout_mins=query_timeout_mins)
|
|
679
669
|
job_span["job_id"] = job_id
|
|
680
670
|
debugging.event("job_created", job_span, job_id=job_id, engine_name=self.engine["name"], job_type=ENGINE_TYPE_SOLVER)
|
|
671
|
+
if not isinstance(job_id, str):
|
|
672
|
+
job_id = ""
|
|
681
673
|
polling_state = PollingState(job_id, "", False, log_to_console)
|
|
682
674
|
|
|
683
675
|
try:
|
|
@@ -693,7 +685,14 @@ class Solver:
|
|
|
693
685
|
return job_id
|
|
694
686
|
|
|
695
687
|
def _is_solver_ready(self):
|
|
688
|
+
if self.engine is None:
|
|
689
|
+
raise Exception("Engine not initialized.")
|
|
690
|
+
|
|
696
691
|
result = self.provider.get_solver(self.engine["name"])
|
|
692
|
+
|
|
693
|
+
if result is None:
|
|
694
|
+
raise Exception("No engine available.")
|
|
695
|
+
|
|
697
696
|
self.engine = result
|
|
698
697
|
state = result["state"]
|
|
699
698
|
if state != "READY" and state != "PENDING":
|
|
@@ -711,20 +710,11 @@ class Solver:
|
|
|
711
710
|
|
|
712
711
|
return status == "COMPLETED" or status == "FAILED" or status == "CANCELED"
|
|
713
712
|
|
|
714
|
-
def _get_job_events(self, job_id: str, continuation_token: str = ""):
|
|
715
|
-
results = self.provider.resources._exec(
|
|
716
|
-
f"SELECT {APP_NAME}.experimental.get_job_events('{ENGINE_TYPE_SOLVER}', '{job_id}', '{continuation_token}');"
|
|
717
|
-
)
|
|
718
|
-
if not results:
|
|
719
|
-
return {"events": [], "continuation_token": None}
|
|
720
|
-
row = results[0][0]
|
|
721
|
-
return json.loads(row)
|
|
722
|
-
|
|
723
713
|
def _print_solver_logs(self, state: PollingState):
|
|
724
714
|
if state.is_done:
|
|
725
715
|
return
|
|
726
716
|
|
|
727
|
-
resp = self.
|
|
717
|
+
resp = self.provider.get_job_events(state.job_id, state.continuation_token)
|
|
728
718
|
|
|
729
719
|
# Print solver logs to stdout.
|
|
730
720
|
for event in resp["events"]:
|
|
@@ -754,7 +744,12 @@ class Provider:
|
|
|
754
744
|
resources = relationalai.Resources()
|
|
755
745
|
if not isinstance(resources, relationalai.clients.snowflake.Resources):
|
|
756
746
|
raise Exception("Solvers are only supported on SPCS.")
|
|
747
|
+
|
|
757
748
|
self.resources = resources
|
|
749
|
+
self.direct_access_client: Optional[DirectAccessClient] = None
|
|
750
|
+
|
|
751
|
+
if isinstance(self.resources, DirectAccessResources):
|
|
752
|
+
self.direct_access_client = self.resources.direct_access_client
|
|
758
753
|
|
|
759
754
|
def create_solver(
|
|
760
755
|
self,
|
|
@@ -770,75 +765,285 @@ class Provider:
|
|
|
770
765
|
engine_config: dict[str, Any] = {"settings": settings}
|
|
771
766
|
if auto_suspend_mins is not None:
|
|
772
767
|
engine_config["auto_suspend_mins"] = auto_suspend_mins
|
|
773
|
-
self.resources.
|
|
774
|
-
f"CALL {APP_NAME}.experimental.create_engine('{ENGINE_TYPE_SOLVER}', '{name}', '{size}', {engine_config});"
|
|
768
|
+
self.resources._exec_sql(
|
|
769
|
+
f"CALL {APP_NAME}.experimental.create_engine('{ENGINE_TYPE_SOLVER}', '{name}', '{size}', {engine_config});", None
|
|
775
770
|
)
|
|
776
771
|
|
|
777
772
|
def create_solver_async(
|
|
778
773
|
self,
|
|
779
774
|
name: str,
|
|
780
775
|
size: str | None = None,
|
|
781
|
-
settings: dict =
|
|
776
|
+
settings: dict | None = None,
|
|
782
777
|
auto_suspend_mins: int | None = None,
|
|
783
778
|
):
|
|
784
779
|
if size is None:
|
|
785
780
|
size = "HIGHMEM_X64_S"
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
781
|
+
|
|
782
|
+
if self.direct_access_client is not None:
|
|
783
|
+
payload:dict[str, Any] = {
|
|
784
|
+
"name": name,
|
|
785
|
+
"settings": settings,
|
|
786
|
+
}
|
|
787
|
+
if auto_suspend_mins is not None:
|
|
788
|
+
payload["auto_suspend_mins"] = auto_suspend_mins
|
|
789
|
+
if size is not None:
|
|
790
|
+
payload["size"] = size
|
|
791
|
+
response = self.direct_access_client.request(
|
|
792
|
+
"create_engine",
|
|
793
|
+
payload=payload,
|
|
794
|
+
path_params={"engine_type": "solver"},
|
|
795
|
+
)
|
|
796
|
+
if response.status_code != 200:
|
|
797
|
+
raise ResponseStatusException(
|
|
798
|
+
f"Failed to create engine {name} with size {size}.", response
|
|
799
|
+
)
|
|
800
|
+
else:
|
|
801
|
+
engine_config: dict[str, Any] = {}
|
|
802
|
+
if settings is not None:
|
|
803
|
+
engine_config["settings"] = settings
|
|
804
|
+
if auto_suspend_mins is not None:
|
|
805
|
+
engine_config["auto_suspend_mins"] = auto_suspend_mins
|
|
806
|
+
self.resources._exec_sql(
|
|
807
|
+
f"CALL {APP_NAME}.experimental.create_engine_async('{ENGINE_TYPE_SOLVER}', '{name}', '{size}', {engine_config});",
|
|
808
|
+
None
|
|
809
|
+
)
|
|
794
810
|
|
|
795
811
|
def delete_solver(self, name: str):
|
|
796
|
-
self.
|
|
797
|
-
|
|
798
|
-
|
|
812
|
+
if self.direct_access_client is not None:
|
|
813
|
+
response = self.direct_access_client.request(
|
|
814
|
+
"delete_engine", path_params = {"engine_type": ENGINE_TYPE_SOLVER, "engine_name": name}
|
|
815
|
+
)
|
|
816
|
+
if response.status_code != 200:
|
|
817
|
+
raise ResponseStatusException("Failed to delete engine.", response)
|
|
818
|
+
return None
|
|
819
|
+
else:
|
|
820
|
+
self.resources._exec_sql(
|
|
821
|
+
f"CALL {APP_NAME}.experimental.delete_engine('{ENGINE_TYPE_SOLVER}', '{name}');",
|
|
822
|
+
None
|
|
823
|
+
)
|
|
799
824
|
|
|
800
825
|
def resume_solver_async(self, name: str):
|
|
801
|
-
self.
|
|
802
|
-
|
|
803
|
-
|
|
826
|
+
if self.direct_access_client is not None:
|
|
827
|
+
response = self.direct_access_client.request(
|
|
828
|
+
"resume_engine", path_params = {"engine_type": ENGINE_TYPE_SOLVER, "engine_name": name}
|
|
829
|
+
)
|
|
830
|
+
if response.status_code != 200:
|
|
831
|
+
raise ResponseStatusException("Failed to resume engine.", response)
|
|
832
|
+
return None
|
|
833
|
+
else:
|
|
834
|
+
self.resources._exec_sql(
|
|
835
|
+
f"CALL {APP_NAME}.experimental.resume_engine_async('{ENGINE_TYPE_SOLVER}', '{name}');",
|
|
836
|
+
None
|
|
837
|
+
)
|
|
838
|
+
return None
|
|
804
839
|
|
|
805
840
|
def get_solver(self, name: str):
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
841
|
+
if self.direct_access_client is not None:
|
|
842
|
+
response = self.direct_access_client.request(
|
|
843
|
+
"get_engine", path_params = {"engine_type": ENGINE_TYPE_SOLVER, "engine_name": name}
|
|
844
|
+
)
|
|
845
|
+
if response.status_code != 200:
|
|
846
|
+
raise ResponseStatusException("Failed to get engine.", response)
|
|
847
|
+
solver = response.json()
|
|
848
|
+
if not solver :
|
|
849
|
+
return None
|
|
850
|
+
solver_state = {
|
|
851
|
+
"name": solver["name"],
|
|
852
|
+
"id": solver["id"],
|
|
853
|
+
"size": solver["size"],
|
|
854
|
+
"state": solver["status"], # callers are expecting 'state'
|
|
855
|
+
"created_by": solver["created_by"],
|
|
856
|
+
"created_on": solver["created_on"],
|
|
857
|
+
"updated_on": solver["updated_on"],
|
|
858
|
+
"version": solver["version"],
|
|
859
|
+
"auto_suspend": solver["auto_suspend_mins"],
|
|
860
|
+
"suspends_at": solver["suspends_at"],
|
|
861
|
+
"solvers": []
|
|
862
|
+
if solver["settings"] == ""
|
|
863
|
+
else [
|
|
864
|
+
k
|
|
865
|
+
for (k,v) in json.loads(solver["settings"]).items()
|
|
866
|
+
if isinstance(v, dict) and v.get("enabled", False)
|
|
867
|
+
],
|
|
868
|
+
}
|
|
869
|
+
return solver_state
|
|
870
|
+
else:
|
|
871
|
+
results = self.resources._exec_sql(
|
|
872
|
+
f"CALL {APP_NAME}.experimental.get_engine('{ENGINE_TYPE_SOLVER}', '{name}');",
|
|
873
|
+
None
|
|
874
|
+
)
|
|
875
|
+
return solver_list_to_dicts(results)[0]
|
|
810
876
|
|
|
811
877
|
def list_solvers(self, state: str | None = None):
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
878
|
+
if self.direct_access_client is not None:
|
|
879
|
+
response = self.direct_access_client.request(
|
|
880
|
+
"list_engines"
|
|
881
|
+
)
|
|
882
|
+
if response.status_code != 200:
|
|
883
|
+
raise ResponseStatusException("Failed to list engines.", response)
|
|
884
|
+
response_content = response.json()
|
|
885
|
+
if not response_content:
|
|
886
|
+
return []
|
|
887
|
+
engines = [
|
|
888
|
+
{
|
|
889
|
+
"name": engine["name"],
|
|
890
|
+
"id": engine["id"],
|
|
891
|
+
"size": engine["size"],
|
|
892
|
+
"state": engine["status"], # callers are expecting 'state'
|
|
893
|
+
"created_by": engine["created_by"],
|
|
894
|
+
"created_on": engine["created_on"],
|
|
895
|
+
"updated_on": engine["updated_on"],
|
|
896
|
+
"auto_suspend_mins": engine["auto_suspend_mins"],
|
|
897
|
+
"solvers": []
|
|
898
|
+
if engine["settings"] == ""
|
|
899
|
+
else [
|
|
900
|
+
k
|
|
901
|
+
for (k, v) in json.loads(engine["settings"]).items()
|
|
902
|
+
if isinstance(v, dict) and v.get("enabled", False)
|
|
903
|
+
],
|
|
904
|
+
}
|
|
905
|
+
for engine in response_content.get("engines", [])
|
|
906
|
+
if (state is None or engine.get("status") == state) and (engine.get("type") == ENGINE_TYPE_SOLVER)
|
|
907
|
+
]
|
|
908
|
+
return sorted(engines, key=lambda x: x["name"])
|
|
909
|
+
else:
|
|
910
|
+
where_clause = f"WHERE TYPE='{ENGINE_TYPE_SOLVER}'"
|
|
911
|
+
where_clause = (
|
|
912
|
+
f"{where_clause} AND STATUS = '{state.upper()}'" if state else where_clause
|
|
913
|
+
)
|
|
914
|
+
statement = f"SELECT NAME,ID,SIZE,STATUS,CREATED_BY,CREATED_ON,UPDATED_ON,AUTO_SUSPEND_MINS,SETTINGS FROM {APP_NAME}.experimental.engines {where_clause};"
|
|
915
|
+
results = self.resources._exec_sql(statement, None)
|
|
916
|
+
return solver_list_to_dicts(results)
|
|
819
917
|
|
|
820
918
|
# --------------------------------------------------
|
|
821
919
|
# Job API
|
|
822
920
|
# --------------------------------------------------
|
|
823
921
|
|
|
922
|
+
def create_job_async(self, engine_name, payload, query_timeout_mins: Optional[int]=None):
|
|
923
|
+
payload_json = json.dumps(payload)
|
|
924
|
+
|
|
925
|
+
if query_timeout_mins is None and (timeout_value := self.resources.config.get("query_timeout_mins", DEFAULT_QUERY_TIMEOUT_MINS)) is not None:
|
|
926
|
+
query_timeout_mins = int(timeout_value)
|
|
927
|
+
|
|
928
|
+
if self.direct_access_client is not None:
|
|
929
|
+
job = {
|
|
930
|
+
"job_type":ENGINE_TYPE_SOLVER,
|
|
931
|
+
"worker_name": engine_name,
|
|
932
|
+
"timeout_mins": query_timeout_mins,
|
|
933
|
+
"payload": payload_json,
|
|
934
|
+
}
|
|
935
|
+
response = self.direct_access_client.request(
|
|
936
|
+
"create_job",
|
|
937
|
+
payload=job,
|
|
938
|
+
)
|
|
939
|
+
if response.status_code != 200:
|
|
940
|
+
raise ResponseStatusException("Failed to create job.", response)
|
|
941
|
+
response_content = response.json()
|
|
942
|
+
return response_content["id"]
|
|
943
|
+
else:
|
|
944
|
+
if query_timeout_mins is not None:
|
|
945
|
+
sql_string = textwrap.dedent(f"""
|
|
946
|
+
CALL {APP_NAME}.experimental.exec_job_async('{ENGINE_TYPE_SOLVER}', '{engine_name}', '{payload_json}', null, {query_timeout_mins})
|
|
947
|
+
""")
|
|
948
|
+
else:
|
|
949
|
+
sql_string = textwrap.dedent(f"""
|
|
950
|
+
CALL {APP_NAME}.experimental.exec_job_async('{ENGINE_TYPE_SOLVER}', '{engine_name}', '{payload_json}')
|
|
951
|
+
""")
|
|
952
|
+
res = self.resources._exec_sql(sql_string, None)
|
|
953
|
+
return res[0]["ID"]
|
|
954
|
+
|
|
824
955
|
def list_jobs(self, state=None, limit=None):
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
956
|
+
if self.direct_access_client is not None:
|
|
957
|
+
response = self.direct_access_client.request(
|
|
958
|
+
"list_jobs"
|
|
959
|
+
)
|
|
960
|
+
if response.status_code != 200:
|
|
961
|
+
raise ResponseStatusException("Failed to list jobs.", response)
|
|
962
|
+
response_content = response.json()
|
|
963
|
+
if not response_content:
|
|
964
|
+
return []
|
|
965
|
+
jobs = [
|
|
966
|
+
{
|
|
967
|
+
"id": job["id"],
|
|
968
|
+
"state": job["state"],
|
|
969
|
+
"created_by": job["created_by"],
|
|
970
|
+
"created_on": job["created_on"],
|
|
971
|
+
"finished_at": job.get("finished_at", None),
|
|
972
|
+
"duration": job["duration"] if "duration" in job else 0,
|
|
973
|
+
"solver": json.loads(job["payload"]).get("solver", ""),
|
|
974
|
+
"engine": job.get("engine_name", job["worker_name"]),
|
|
975
|
+
}
|
|
976
|
+
for job in response_content.get("jobs", [])
|
|
977
|
+
if state is None or job.get("state") == state
|
|
978
|
+
]
|
|
979
|
+
return sorted(jobs, key=lambda x: x["created_on"], reverse=True)
|
|
980
|
+
else:
|
|
981
|
+
state_clause = f"AND STATE = '{state.upper()}'" if state else ""
|
|
982
|
+
limit_clause = f"LIMIT {limit}" if limit else ""
|
|
983
|
+
results = self.resources._exec_sql(
|
|
984
|
+
f"SELECT ID,STATE,CREATED_BY,CREATED_ON,FINISHED_AT,DURATION,PAYLOAD,ENGINE_NAME FROM {APP_NAME}.experimental.jobs where type='{ENGINE_TYPE_SOLVER}' {state_clause} ORDER BY created_on DESC {limit_clause};",
|
|
985
|
+
None
|
|
986
|
+
)
|
|
987
|
+
return job_list_to_dicts(results)
|
|
831
988
|
|
|
832
989
|
def get_job(self, id: str):
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
990
|
+
if self.direct_access_client is not None:
|
|
991
|
+
response = self.direct_access_client.request(
|
|
992
|
+
"get_job", path_params = {"job_type": ENGINE_TYPE_SOLVER, "job_id": id}
|
|
993
|
+
)
|
|
994
|
+
if response.status_code != 200:
|
|
995
|
+
raise ResponseStatusException("Failed to get job.", response)
|
|
996
|
+
response_content = response.json()
|
|
997
|
+
return response_content["job"]
|
|
998
|
+
else:
|
|
999
|
+
results = self.resources._exec_sql(
|
|
1000
|
+
f"CALL {APP_NAME}.experimental.get_job('{ENGINE_TYPE_SOLVER}', '{id}');",
|
|
1001
|
+
None
|
|
1002
|
+
)
|
|
1003
|
+
return job_list_to_dicts(results)[0]
|
|
1004
|
+
|
|
1005
|
+
def get_job_events(self, job_id: str, continuation_token: str = ""):
|
|
1006
|
+
if self.direct_access_client is not None:
|
|
1007
|
+
response = self.direct_access_client.request(
|
|
1008
|
+
"get_job_events",
|
|
1009
|
+
path_params = {"job_type": ENGINE_TYPE_SOLVER, "job_id": job_id, "stream_name": "progress"},
|
|
1010
|
+
query_params={"continuation_token": continuation_token},
|
|
1011
|
+
)
|
|
1012
|
+
if response.status_code != 200:
|
|
1013
|
+
raise ResponseStatusException("Failed to get job events.", response)
|
|
1014
|
+
response_content = response.json()
|
|
1015
|
+
if not response_content:
|
|
1016
|
+
return {
|
|
1017
|
+
"events": [],
|
|
1018
|
+
"continuation_token": None
|
|
1019
|
+
}
|
|
1020
|
+
return response_content
|
|
1021
|
+
else:
|
|
1022
|
+
results = self.resources._exec_sql(
|
|
1023
|
+
f"SELECT {APP_NAME}.experimental.get_job_events('{ENGINE_TYPE_SOLVER}', '{job_id}', '{continuation_token}');",
|
|
1024
|
+
None
|
|
1025
|
+
)
|
|
1026
|
+
if not results:
|
|
1027
|
+
return {"events": [], "continuation_token": None}
|
|
1028
|
+
row = results[0][0]
|
|
1029
|
+
if not isinstance(row, str):
|
|
1030
|
+
row = ""
|
|
1031
|
+
return json.loads(row)
|
|
837
1032
|
|
|
838
1033
|
def cancel_job(self, id: str):
|
|
839
|
-
self.
|
|
840
|
-
|
|
841
|
-
|
|
1034
|
+
if self.direct_access_client is not None:
|
|
1035
|
+
response = self.direct_access_client.request(
|
|
1036
|
+
"cancel_job", path_params = {"job_type": ENGINE_TYPE_SOLVER, "job_id": id}
|
|
1037
|
+
)
|
|
1038
|
+
if response.status_code != 200:
|
|
1039
|
+
raise ResponseStatusException("Failed to cancel job.", response)
|
|
1040
|
+
return None
|
|
1041
|
+
else:
|
|
1042
|
+
self.resources._exec_sql(
|
|
1043
|
+
f"CALL {APP_NAME}.experimental.cancel_job('{ENGINE_TYPE_SOLVER}', '{id}');",
|
|
1044
|
+
None
|
|
1045
|
+
)
|
|
1046
|
+
return None
|
|
842
1047
|
|
|
843
1048
|
|
|
844
1049
|
def solver_list_to_dicts(results):
|
|
@@ -865,7 +1070,6 @@ def solver_list_to_dicts(results):
|
|
|
865
1070
|
for row in results
|
|
866
1071
|
]
|
|
867
1072
|
|
|
868
|
-
|
|
869
1073
|
def job_list_to_dicts(results):
|
|
870
1074
|
if not results:
|
|
871
1075
|
return []
|
|
@@ -346,7 +346,7 @@ def find_select_keys(item: Any, keys:OrderedSet[Key]|None = None, enable_primiti
|
|
|
346
346
|
|
|
347
347
|
if isinstance(item, (list, tuple)):
|
|
348
348
|
for it in item:
|
|
349
|
-
find_select_keys(it, keys)
|
|
349
|
+
find_select_keys(it, keys, enable_primitive_key=enable_primitive_key)
|
|
350
350
|
|
|
351
351
|
elif isinstance(item, (Relationship, RelationshipReading)) and item._parent:
|
|
352
352
|
find_select_keys(item._parent, keys)
|
|
@@ -390,7 +390,7 @@ def find_select_keys(item: Any, keys:OrderedSet[Key]|None = None, enable_primiti
|
|
|
390
390
|
find_select_keys(item._arg, keys)
|
|
391
391
|
|
|
392
392
|
elif isinstance(item, Alias):
|
|
393
|
-
find_select_keys(item._thing, keys)
|
|
393
|
+
find_select_keys(item._thing, keys, enable_primitive_key=enable_primitive_key)
|
|
394
394
|
|
|
395
395
|
elif isinstance(item, Aggregate):
|
|
396
396
|
keys.update( Key(k, True) for k in item._group )
|
|
@@ -2418,21 +2418,21 @@ class Fragment():
|
|
|
2418
2418
|
|
|
2419
2419
|
def meta(self, **kwargs: Any) -> Fragment:
|
|
2420
2420
|
"""Add metadata to the query.
|
|
2421
|
-
|
|
2421
|
+
|
|
2422
2422
|
Metadata can be used for debugging and observability purposes.
|
|
2423
|
-
|
|
2423
|
+
|
|
2424
2424
|
Args:
|
|
2425
2425
|
**kwargs: Metadata key-value pairs
|
|
2426
|
-
|
|
2426
|
+
|
|
2427
2427
|
Returns:
|
|
2428
2428
|
Fragment: Returns self for method chaining
|
|
2429
|
-
|
|
2429
|
+
|
|
2430
2430
|
Example:
|
|
2431
2431
|
select(Person.name).meta(workload_name="test", priority=1, enabled=True)
|
|
2432
2432
|
"""
|
|
2433
2433
|
if not kwargs:
|
|
2434
2434
|
raise ValueError("meta() requires at least one argument")
|
|
2435
|
-
|
|
2435
|
+
|
|
2436
2436
|
self._meta.update(kwargs)
|
|
2437
2437
|
return self
|
|
2438
2438
|
|
|
@@ -2560,7 +2560,7 @@ class Fragment():
|
|
|
2560
2560
|
with debugging.span("query", dsl=str(clone), **with_source(clone), meta=clone._meta):
|
|
2561
2561
|
query_task = qb_model._compiler.fragment(clone)
|
|
2562
2562
|
qb_model._to_executor().execute(ir_model, query_task, result_cols=result_cols, export_to=table._fqn, update=update, meta=clone._meta)
|
|
2563
|
-
|
|
2563
|
+
|
|
2564
2564
|
|
|
2565
2565
|
#--------------------------------------------------
|
|
2566
2566
|
# Select / Where
|
|
@@ -60,7 +60,7 @@ class LQPExecutor(e.Executor):
|
|
|
60
60
|
if not self.dry_run:
|
|
61
61
|
self.engine = self._resources.get_default_engine_name()
|
|
62
62
|
if not self.keep_model:
|
|
63
|
-
atexit.register(self._resources.delete_graph, self.database, True)
|
|
63
|
+
atexit.register(self._resources.delete_graph, self.database, True, "lqp")
|
|
64
64
|
return self._resources
|
|
65
65
|
|
|
66
66
|
# Checks the graph index and updates it if necessary
|
|
@@ -88,7 +88,15 @@ class LQPExecutor(e.Executor):
|
|
|
88
88
|
assert self.engine is not None
|
|
89
89
|
|
|
90
90
|
with debugging.span("poll_use_index", sources=sources, model=model, engine=engine_name):
|
|
91
|
-
resources.poll_use_index(
|
|
91
|
+
resources.poll_use_index(
|
|
92
|
+
app_name=app_name,
|
|
93
|
+
sources=sources,
|
|
94
|
+
model=model,
|
|
95
|
+
engine_name=self.engine,
|
|
96
|
+
engine_size=engine_size,
|
|
97
|
+
language="lqp",
|
|
98
|
+
program_span_id=program_span_id,
|
|
99
|
+
)
|
|
92
100
|
|
|
93
101
|
def report_errors(self, problems: list[dict[str, Any]], abort_on_error=True):
|
|
94
102
|
from relationalai import errors
|
|
@@ -580,8 +580,9 @@ def get_relation_id(ctx: TranslationCtx, relation: ir.Relation, projection: list
|
|
|
580
580
|
if relation.id in ctx.def_names.id_to_name:
|
|
581
581
|
unique_name = ctx.def_names.id_to_name[relation.id]
|
|
582
582
|
else:
|
|
583
|
-
|
|
584
|
-
|
|
583
|
+
name = helpers.relation_name_prefix(relation) + relation.name
|
|
584
|
+
name = helpers.sanitize(name)
|
|
585
|
+
unique_name = ctx.def_names.get_name_by_id(relation.id, name)
|
|
585
586
|
|
|
586
587
|
return utils.gen_rel_id(ctx, unique_name, types)
|
|
587
588
|
|
|
@@ -58,7 +58,7 @@ class RelExecutor(e.Executor):
|
|
|
58
58
|
if not self.dry_run:
|
|
59
59
|
self.engine = self._resources.get_default_engine_name()
|
|
60
60
|
if not self.keep_model:
|
|
61
|
-
atexit.register(self._resources.delete_graph, self.database, True)
|
|
61
|
+
atexit.register(self._resources.delete_graph, self.database, True, "rel")
|
|
62
62
|
return self._resources
|
|
63
63
|
|
|
64
64
|
def check_graph_index(self, headers: dict[str, Any] | None = None):
|
|
@@ -85,7 +85,16 @@ class RelExecutor(e.Executor):
|
|
|
85
85
|
assert self.engine is not None
|
|
86
86
|
|
|
87
87
|
with debugging.span("poll_use_index", sources=sources, model=model, engine=engine_name):
|
|
88
|
-
resources.poll_use_index(
|
|
88
|
+
resources.poll_use_index(
|
|
89
|
+
app_name=app_name,
|
|
90
|
+
sources=sources,
|
|
91
|
+
model=model,
|
|
92
|
+
engine_name=self.engine,
|
|
93
|
+
engine_size=engine_size,
|
|
94
|
+
language="rel",
|
|
95
|
+
program_span_id=program_span_id,
|
|
96
|
+
headers=headers,
|
|
97
|
+
)
|
|
89
98
|
|
|
90
99
|
def report_errors(self, problems: list[dict[str, Any]], abort_on_error=True):
|
|
91
100
|
from relationalai import errors
|
|
@@ -23,14 +23,14 @@ relationalai/clients/azure.py,sha256=6tYHxKVN2fHMIDV7J_EHZD9WntkYh2IreMRMqlq1Bhg
|
|
|
23
23
|
relationalai/clients/cache_store.py,sha256=A-qd11wcwN3TkIqvlN0_iFUU3aEjJal3T2pqFBwkkzQ,3966
|
|
24
24
|
relationalai/clients/client.py,sha256=4SSunUwuFEcRFXOPYotpSLDPr0CKuwJ4335W0DesR90,35792
|
|
25
25
|
relationalai/clients/config.py,sha256=hERaKjc3l4kd-kf0l-NUOHrWunCn8gmFWpuE0j3ScJg,24457
|
|
26
|
-
relationalai/clients/direct_access_client.py,sha256=
|
|
26
|
+
relationalai/clients/direct_access_client.py,sha256=VGjQ7wzduxCo04BkxSZjlPAgqK-aBc32zIXcMfAzzSU,6436
|
|
27
27
|
relationalai/clients/export_procedure.py.jinja,sha256=nhvVcs5hQyWExFDuROQbi1VyYzOCa_ZIRPR2KzZwDtI,10582
|
|
28
28
|
relationalai/clients/hash_util.py,sha256=pZVR1FX3q4G_19p_r6wpIR2tIM8_WUlfAR7AVZJjIYM,1495
|
|
29
29
|
relationalai/clients/profile_polling.py,sha256=pUH7WKH4nYDD0SlQtg3wsWdj0K7qt6nZqUw8jTthCBs,2565
|
|
30
30
|
relationalai/clients/result_helpers.py,sha256=wDSD02Ngx6W-YQqBIGKnpXD4Ju3pA1e9Nz6ORRI6SRI,17808
|
|
31
|
-
relationalai/clients/snowflake.py,sha256=
|
|
31
|
+
relationalai/clients/snowflake.py,sha256=c5z7sh9wXeMaAm0JiyrOZlHJFK_hbS76ml8Ed6kWxKo,159579
|
|
32
32
|
relationalai/clients/types.py,sha256=eNo6akcMTbnBFbBbHd5IgVeY-zuAgtXlOs8Bo1SWmVU,2890
|
|
33
|
-
relationalai/clients/use_index_poller.py,sha256=
|
|
33
|
+
relationalai/clients/use_index_poller.py,sha256=gxQDK-iaA9iz5-rkNWQjptZSxgdwv8WXIKC4HB44cHo,46291
|
|
34
34
|
relationalai/clients/util.py,sha256=NJC8fnrWHR01NydwESPSetIHRWf7jQJURYpaWJjmDyE,12311
|
|
35
35
|
relationalai/early_access/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
36
36
|
relationalai/early_access/builder/__init__.py,sha256=mrR-aGLPquUGc_e9-DOkVCCCo8QRE5A56GTTtgILNa4,993
|
|
@@ -226,7 +226,7 @@ relationalai/experimental/SF.py,sha256=cLIPCgTa6yCkmAU4zw3o2OlSM_Sl8ah3JUpHVoHzr
|
|
|
226
226
|
relationalai/experimental/__init__.py,sha256=alaOJulJ9YHRl4WGyQ3Kzxhrpc2gP542oNheWXzs7NQ,74
|
|
227
227
|
relationalai/experimental/graphs.py,sha256=vJQ_8ZF_Q2pL-w1xI4O22VGqbnTDimkkqTNqqIkGL7c,3521
|
|
228
228
|
relationalai/experimental/inspect.py,sha256=jpk_xl_bWRcX96xVrHsex-YE3sTqa-W2b5IjGXoxeWk,1635
|
|
229
|
-
relationalai/experimental/solvers.py,sha256=
|
|
229
|
+
relationalai/experimental/solvers.py,sha256=aD2O3ncmKVBgc4R6SKeLmee6FPEpC_2Xaxgc5Lh5t4s,43328
|
|
230
230
|
relationalai/experimental/pathfinder/__init__.py,sha256=wzF7zzl5Cvp2CkFaPpY2VU-Tg7I03wvxGq7kp8FVvU0,5429
|
|
231
231
|
relationalai/experimental/pathfinder/api.py,sha256=HT6PAt5gszgJhbnqsHACzZrr9ps9kwcHYu9Nf2EYtTA,4235
|
|
232
232
|
relationalai/experimental/pathfinder/automaton.py,sha256=Z_TYjf5j89-48rxjxl0jV9Q0BR4VhV9cKRTFES7c1t0,21898
|
|
@@ -256,16 +256,16 @@ relationalai/semantics/devtools/compilation_manager.py,sha256=XBqG_nYWtK3s_J6MeC
|
|
|
256
256
|
relationalai/semantics/devtools/extract_lqp.py,sha256=gxI3EvPUTPAkwgnkCKAkEm2vA6QkLfoM8AXXiVz0c34,3696
|
|
257
257
|
relationalai/semantics/internal/__init__.py,sha256=JXrpFaL-fdZrvKpWTEn1UoLXITOoTGnAYwmgeiglhSk,774
|
|
258
258
|
relationalai/semantics/internal/annotations.py,sha256=P_nRpBm4wLmE_8L0VO3TDORL1p2flXaPOVDC0GG7KsQ,306
|
|
259
|
-
relationalai/semantics/internal/internal.py,sha256=
|
|
259
|
+
relationalai/semantics/internal/internal.py,sha256=Vh4LFatRA8xO6sIBxRSlbAv6HOMJPU65t2c_ZEh9lug,148972
|
|
260
260
|
relationalai/semantics/internal/snowflake.py,sha256=DR6rgbPawen2ZOFegRuPkXt8aQUfciiCYzDI74u1Iwc,13036
|
|
261
261
|
relationalai/semantics/lqp/__init__.py,sha256=XgcQZxK-zz_LqPDVtwREhsIvjTuUIt4BZhIedCeMY-s,48
|
|
262
262
|
relationalai/semantics/lqp/builtins.py,sha256=bRmQ6fdceWU-4xf4l-W-YiuyDxJTPey1s6O4xlyW6iM,540
|
|
263
263
|
relationalai/semantics/lqp/compiler.py,sha256=Nury1gPw_-Oi_mqT1-rhr13L4UmyIP2BGuotbuklQKA,949
|
|
264
264
|
relationalai/semantics/lqp/constructors.py,sha256=8U4eUL8-m1wYRQnws_YWC1coGquTugVH5YC0Zek6VT8,2309
|
|
265
|
-
relationalai/semantics/lqp/executor.py,sha256=
|
|
265
|
+
relationalai/semantics/lqp/executor.py,sha256=j87LJFnzzNKHrazoYj0X1LK1jyy9ysDCKZ0ExhN1_xs,19643
|
|
266
266
|
relationalai/semantics/lqp/intrinsics.py,sha256=Pb1mLIme7Q-5Y-CVacUOEvapfhKs076bgtRNi3f0ayY,833
|
|
267
267
|
relationalai/semantics/lqp/ir.py,sha256=DUw0ltul0AS9CRjntNlmllWTwXpxMyYg4iJ9t7NFYMA,1791
|
|
268
|
-
relationalai/semantics/lqp/model2lqp.py,sha256=
|
|
268
|
+
relationalai/semantics/lqp/model2lqp.py,sha256=M---RMVLEollI4aGHFh-2klR4y6C88vPKCHFd6tPNz8,31753
|
|
269
269
|
relationalai/semantics/lqp/passes.py,sha256=nLppoHvIQkGP6VuG56OAZ1oOrYhEqpR_0w91gfJ7t_s,27540
|
|
270
270
|
relationalai/semantics/lqp/pragmas.py,sha256=FzzldrJEAZ1AIcEw6D-FfaVg3CoahRYgPCFo7xHfg1g,375
|
|
271
271
|
relationalai/semantics/lqp/primitives.py,sha256=Gbh6cohoAArhqEJTN_TgIRc7wmtdxXt231NRW0beEj0,10898
|
|
@@ -376,7 +376,7 @@ relationalai/semantics/reasoners/optimization/solvers_pb.py,sha256=ryNARpyph3uvr
|
|
|
376
376
|
relationalai/semantics/rel/__init__.py,sha256=pMlVTC_TbQ45mP1LpzwFBBgPxpKc0H3uJDvvDXEWzvs,55
|
|
377
377
|
relationalai/semantics/rel/builtins.py,sha256=qu4yZvLovn4Vn2x44D4XugqGD6Qo5xLxj_RKA34cpF4,1527
|
|
378
378
|
relationalai/semantics/rel/compiler.py,sha256=hiLIaZzhVU5VMtU6rdo3tH2pmKMEtXvh6AHh94CvRAg,42203
|
|
379
|
-
relationalai/semantics/rel/executor.py,sha256=
|
|
379
|
+
relationalai/semantics/rel/executor.py,sha256=mRBNyw6qHgmF3aOnM4aTIxBHCn4yaho4nQlMYM08pIA,15883
|
|
380
380
|
relationalai/semantics/rel/rel.py,sha256=9I_V6dQ83QRaLzq04Tt-KjBWhmNxNO3tFzeornBK4zc,15738
|
|
381
381
|
relationalai/semantics/rel/rel_utils.py,sha256=F14Ch8mn45J8SmM7HZnIHUNqDnb3WQLnkEGLme04iBk,9386
|
|
382
382
|
relationalai/semantics/snowflake/__init__.py,sha256=BW_zvPQBWGTAtY6cluG6tDDG-QmU_jRb-F7PeCpDhIU,134
|
|
@@ -453,8 +453,8 @@ frontend/debugger/dist/index.html,sha256=0wIQ1Pm7BclVV1wna6Mj8OmgU73B9rSEGPVX-Wo
|
|
|
453
453
|
frontend/debugger/dist/assets/favicon-Dy0ZgA6N.png,sha256=tPXOEhOrM4tJyZVJQVBO_yFgNAlgooY38ZsjyrFstgg,620
|
|
454
454
|
frontend/debugger/dist/assets/index-Cssla-O7.js,sha256=MxgIGfdKQyBWgufck1xYggQNhW5nj6BPjCF6Wleo-f0,298886
|
|
455
455
|
frontend/debugger/dist/assets/index-DlHsYx1V.css,sha256=21pZtAjKCcHLFjbjfBQTF6y7QmOic-4FYaKNmwdNZVE,60141
|
|
456
|
-
relationalai-0.12.
|
|
457
|
-
relationalai-0.12.
|
|
458
|
-
relationalai-0.12.
|
|
459
|
-
relationalai-0.12.
|
|
460
|
-
relationalai-0.12.
|
|
456
|
+
relationalai-0.12.2.dist-info/METADATA,sha256=XPjRexFpG92ZfObHk7CAwEtVSR7Epw2HkPnwnZACiFo,2562
|
|
457
|
+
relationalai-0.12.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
458
|
+
relationalai-0.12.2.dist-info/entry_points.txt,sha256=fo_oLFJih3PUgYuHXsk7RnCjBm9cqRNR--ab6DgI6-0,88
|
|
459
|
+
relationalai-0.12.2.dist-info/licenses/LICENSE,sha256=pPyTVXFYhirkEW9VsnHIgUjT0Vg8_xsE6olrF5SIgpc,11343
|
|
460
|
+
relationalai-0.12.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|