MindsDB 25.5.4.0__py3-none-any.whl → 25.5.4.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of MindsDB might be problematic. Click here for more details.
- mindsdb/__about__.py +8 -8
- mindsdb/api/a2a/__main__.py +38 -8
- mindsdb/api/a2a/run_a2a.py +10 -53
- mindsdb/api/a2a/task_manager.py +19 -53
- mindsdb/api/executor/command_executor.py +147 -291
- mindsdb/api/http/namespaces/config.py +61 -86
- mindsdb/integrations/handlers/byom_handler/requirements.txt +1 -2
- mindsdb/integrations/handlers/lancedb_handler/requirements.txt +0 -1
- mindsdb/integrations/handlers/litellm_handler/litellm_handler.py +37 -20
- mindsdb/integrations/libs/llm/config.py +13 -0
- mindsdb/integrations/libs/llm/utils.py +37 -65
- mindsdb/integrations/utilities/rag/rerankers/base_reranker.py +230 -227
- mindsdb/interfaces/agents/constants.py +17 -13
- mindsdb/interfaces/agents/langchain_agent.py +93 -94
- mindsdb/interfaces/knowledge_base/controller.py +230 -221
- mindsdb/utilities/config.py +43 -84
- mindsdb/utilities/starters.py +9 -1
- {mindsdb-25.5.4.0.dist-info → mindsdb-25.5.4.2.dist-info}/METADATA +268 -266
- {mindsdb-25.5.4.0.dist-info → mindsdb-25.5.4.2.dist-info}/RECORD +22 -26
- mindsdb/api/a2a/a2a_client.py +0 -439
- mindsdb/api/a2a/common/client/__init__.py +0 -4
- mindsdb/api/a2a/common/client/card_resolver.py +0 -21
- mindsdb/api/a2a/common/client/client.py +0 -86
- {mindsdb-25.5.4.0.dist-info → mindsdb-25.5.4.2.dist-info}/WHEEL +0 -0
- {mindsdb-25.5.4.0.dist-info → mindsdb-25.5.4.2.dist-info}/licenses/LICENSE +0 -0
- {mindsdb-25.5.4.0.dist-info → mindsdb-25.5.4.2.dist-info}/top_level.txt +0 -0
|
@@ -65,7 +65,7 @@ from mindsdb_sql_parser.ast.mindsdb import (
|
|
|
65
65
|
RetrainPredictor,
|
|
66
66
|
UpdateAgent,
|
|
67
67
|
UpdateChatBot,
|
|
68
|
-
UpdateSkill
|
|
68
|
+
UpdateSkill,
|
|
69
69
|
)
|
|
70
70
|
|
|
71
71
|
import mindsdb.utilities.profiler as profiler
|
|
@@ -140,18 +140,12 @@ def _get_show_where(
|
|
|
140
140
|
)
|
|
141
141
|
)
|
|
142
142
|
if statement.like is not None and like_name is not None:
|
|
143
|
-
where.append(
|
|
144
|
-
BinaryOperation(
|
|
145
|
-
"like", args=[Identifier(like_name), Constant(statement.like)]
|
|
146
|
-
)
|
|
147
|
-
)
|
|
143
|
+
where.append(BinaryOperation("like", args=[Identifier(like_name), Constant(statement.like)]))
|
|
148
144
|
if statement.where is not None:
|
|
149
145
|
where.append(statement.where)
|
|
150
146
|
|
|
151
147
|
if len(where) > 0:
|
|
152
|
-
return reduce(
|
|
153
|
-
lambda prev, next: BinaryOperation("and", args=[prev, next]), where
|
|
154
|
-
)
|
|
148
|
+
return reduce(lambda prev, next: BinaryOperation("and", args=[prev, next]), where)
|
|
155
149
|
return None
|
|
156
150
|
|
|
157
151
|
|
|
@@ -184,7 +178,7 @@ class ExecuteCommands:
|
|
|
184
178
|
name,
|
|
185
179
|
handler=statement.handler,
|
|
186
180
|
params=statement.params,
|
|
187
|
-
if_not_exists=getattr(statement, "if_not_exists", False)
|
|
181
|
+
if_not_exists=getattr(statement, "if_not_exists", False),
|
|
188
182
|
)
|
|
189
183
|
elif statement_type is DropMLEngine:
|
|
190
184
|
return self.answer_drop_ml_engine(statement)
|
|
@@ -199,7 +193,7 @@ class ExecuteCommands:
|
|
|
199
193
|
# NOTE in sql 'describe table' is same as 'show columns'
|
|
200
194
|
obj_type = statement.type
|
|
201
195
|
|
|
202
|
-
if obj_type is None or obj_type.upper() in (
|
|
196
|
+
if obj_type is None or obj_type.upper() in ("MODEL", "PREDICTOR"):
|
|
203
197
|
return self.answer_describe_predictor(statement.value, database_name)
|
|
204
198
|
else:
|
|
205
199
|
return self.answer_describe_object(obj_type.upper(), statement.value, database_name)
|
|
@@ -234,9 +228,7 @@ class ExecuteCommands:
|
|
|
234
228
|
return self.answer_select(query)
|
|
235
229
|
elif sql_category == "plugins":
|
|
236
230
|
if statement.where is not None or statement.like:
|
|
237
|
-
raise ExecutorException(
|
|
238
|
-
"'SHOW PLUGINS' query should be used without filters"
|
|
239
|
-
)
|
|
231
|
+
raise ExecutorException("'SHOW PLUGINS' query should be used without filters")
|
|
240
232
|
new_statement = Select(
|
|
241
233
|
targets=[Star()],
|
|
242
234
|
from_table=Identifier(parts=["information_schema", "PLUGINS"]),
|
|
@@ -262,10 +254,7 @@ class ExecuteCommands:
|
|
|
262
254
|
return self.answer_select(query)
|
|
263
255
|
elif sql_category in ("tables", "full tables"):
|
|
264
256
|
schema = database_name or "mindsdb"
|
|
265
|
-
if
|
|
266
|
-
statement.from_table is not None
|
|
267
|
-
and statement.in_table is not None
|
|
268
|
-
):
|
|
257
|
+
if statement.from_table is not None and statement.in_table is not None:
|
|
269
258
|
raise ExecutorException(
|
|
270
259
|
"You have an error in your SQL syntax: 'from' and 'in' cannot be used together"
|
|
271
260
|
)
|
|
@@ -277,13 +266,13 @@ class ExecuteCommands:
|
|
|
277
266
|
schema = statement.in_table.parts[-1]
|
|
278
267
|
statement.in_table = None
|
|
279
268
|
|
|
280
|
-
table_types = [Constant(t) for t in [
|
|
269
|
+
table_types = [Constant(t) for t in ["MODEL", "BASE TABLE", "SYSTEM VIEW", "VIEW"]]
|
|
281
270
|
where = BinaryOperation(
|
|
282
271
|
"and",
|
|
283
272
|
args=[
|
|
284
273
|
BinaryOperation("=", args=[Identifier("table_schema"), Constant(schema)]),
|
|
285
|
-
BinaryOperation("in", args=[Identifier("table_type"), Tuple(table_types)])
|
|
286
|
-
]
|
|
274
|
+
BinaryOperation("in", args=[Identifier("table_type"), Tuple(table_types)]),
|
|
275
|
+
],
|
|
287
276
|
)
|
|
288
277
|
|
|
289
278
|
new_statement = Select(
|
|
@@ -294,15 +283,11 @@ class ExecuteCommands:
|
|
|
294
283
|
)
|
|
295
284
|
],
|
|
296
285
|
from_table=Identifier(parts=["information_schema", "TABLES"]),
|
|
297
|
-
where=_get_show_where(
|
|
298
|
-
statement, like_name=f"Tables_in_{schema}", initial=where
|
|
299
|
-
),
|
|
286
|
+
where=_get_show_where(statement, like_name=f"Tables_in_{schema}", initial=where),
|
|
300
287
|
)
|
|
301
288
|
|
|
302
289
|
if "FULL" in statement.modes:
|
|
303
|
-
new_statement.targets.append(
|
|
304
|
-
Identifier(parts=["TABLE_TYPE"], alias=Identifier("Table_type"))
|
|
305
|
-
)
|
|
290
|
+
new_statement.targets.append(Identifier(parts=["TABLE_TYPE"], alias=Identifier("Table_type")))
|
|
306
291
|
|
|
307
292
|
query = SQLQuery(new_statement, session=self.session, database=database_name)
|
|
308
293
|
return self.answer_select(query)
|
|
@@ -327,9 +312,7 @@ class ExecuteCommands:
|
|
|
327
312
|
var_name = var_name.replace("@@", "")
|
|
328
313
|
if is_session and var_name.startswith("session.") is False:
|
|
329
314
|
continue
|
|
330
|
-
if var_name.startswith("session.") or var_name.startswith(
|
|
331
|
-
"GLOBAL."
|
|
332
|
-
):
|
|
315
|
+
if var_name.startswith("session.") or var_name.startswith("GLOBAL."):
|
|
333
316
|
name = var_name.replace("session.", "").replace("GLOBAL.", "")
|
|
334
317
|
data[name] = var_data[0]
|
|
335
318
|
elif var_name not in data:
|
|
@@ -338,28 +321,20 @@ class ExecuteCommands:
|
|
|
338
321
|
df = pd.DataFrame(data.items(), columns=["Variable_name", "Value"])
|
|
339
322
|
df2 = query_df(df, new_statement)
|
|
340
323
|
|
|
341
|
-
return ExecuteAnswer(
|
|
342
|
-
data=ResultSet.from_df(df2, table_name="session_variables")
|
|
343
|
-
)
|
|
324
|
+
return ExecuteAnswer(data=ResultSet.from_df(df2, table_name="session_variables"))
|
|
344
325
|
elif sql_category == "search_path":
|
|
345
326
|
return ExecuteAnswer(
|
|
346
327
|
data=ResultSet(
|
|
347
|
-
columns=[
|
|
348
|
-
|
|
349
|
-
],
|
|
350
|
-
values=[['"$user", public']]
|
|
328
|
+
columns=[Column(name="search_path", table_name="search_path", type="str")],
|
|
329
|
+
values=[['"$user", public']],
|
|
351
330
|
)
|
|
352
331
|
)
|
|
353
332
|
elif "show status like 'ssl_version'" in sql_lower:
|
|
354
333
|
return ExecuteAnswer(
|
|
355
334
|
data=ResultSet(
|
|
356
335
|
columns=[
|
|
357
|
-
Column(
|
|
358
|
-
|
|
359
|
-
),
|
|
360
|
-
Column(
|
|
361
|
-
name="Value", table_name="session_variables", type="str"
|
|
362
|
-
),
|
|
336
|
+
Column(name="Value", table_name="session_variables", type="str"),
|
|
337
|
+
Column(name="Value", table_name="session_variables", type="str"),
|
|
363
338
|
],
|
|
364
339
|
values=[["Ssl_version", "TLSv1.1"]],
|
|
365
340
|
)
|
|
@@ -398,23 +373,17 @@ class ExecuteCommands:
|
|
|
398
373
|
# FIXME if have answer on that request, then DataGrip show warning '[S0022] Column 'Non_unique' not found.'
|
|
399
374
|
elif "show create table" in sql_lower:
|
|
400
375
|
# SHOW CREATE TABLE `MINDSDB`.`predictors`
|
|
401
|
-
table = sql[sql.rfind(".") + 1:].strip(" .;\n\t").replace("`", "")
|
|
376
|
+
table = sql[sql.rfind(".") + 1 :].strip(" .;\n\t").replace("`", "")
|
|
402
377
|
return self.answer_show_create_table(table)
|
|
403
378
|
elif sql_category in ("character set", "charset"):
|
|
404
379
|
new_statement = Select(
|
|
405
380
|
targets=[
|
|
406
381
|
Identifier("CHARACTER_SET_NAME", alias=Identifier("Charset")),
|
|
407
|
-
Identifier(
|
|
408
|
-
|
|
409
|
-
),
|
|
410
|
-
Identifier(
|
|
411
|
-
"DESCRIPTION", alias=Identifier("Default collation")
|
|
412
|
-
),
|
|
382
|
+
Identifier("DEFAULT_COLLATE_NAME", alias=Identifier("Description")),
|
|
383
|
+
Identifier("DESCRIPTION", alias=Identifier("Default collation")),
|
|
413
384
|
Identifier("MAXLEN", alias=Identifier("Maxlen")),
|
|
414
385
|
],
|
|
415
|
-
from_table=Identifier(
|
|
416
|
-
parts=["INFORMATION_SCHEMA", "CHARACTER_SETS"]
|
|
417
|
-
),
|
|
386
|
+
from_table=Identifier(parts=["INFORMATION_SCHEMA", "CHARACTER_SETS"]),
|
|
418
387
|
where=_get_show_where(statement, like_name="CHARACTER_SET_NAME"),
|
|
419
388
|
)
|
|
420
389
|
query = SQLQuery(new_statement, session=self.session, database=database_name)
|
|
@@ -467,9 +436,18 @@ class ExecuteCommands:
|
|
|
467
436
|
database_name=database_name,
|
|
468
437
|
)
|
|
469
438
|
|
|
470
|
-
elif sql_category in (
|
|
471
|
-
|
|
472
|
-
|
|
439
|
+
elif sql_category in (
|
|
440
|
+
"agents",
|
|
441
|
+
"jobs",
|
|
442
|
+
"skills",
|
|
443
|
+
"chatbots",
|
|
444
|
+
"triggers",
|
|
445
|
+
"views",
|
|
446
|
+
"knowledge_bases",
|
|
447
|
+
"knowledge bases",
|
|
448
|
+
"predictors",
|
|
449
|
+
"models",
|
|
450
|
+
):
|
|
473
451
|
if sql_category == "knowledge bases":
|
|
474
452
|
sql_category = "knowledge_bases"
|
|
475
453
|
|
|
@@ -480,25 +458,21 @@ class ExecuteCommands:
|
|
|
480
458
|
if statement.from_table is not None:
|
|
481
459
|
db_name = statement.from_table.parts[-1]
|
|
482
460
|
|
|
483
|
-
where = BinaryOperation(op=
|
|
461
|
+
where = BinaryOperation(op="=", args=[Identifier("project"), Constant(db_name)])
|
|
484
462
|
|
|
485
463
|
select_statement = Select(
|
|
486
464
|
targets=[Star()],
|
|
487
|
-
from_table=Identifier(
|
|
488
|
-
parts=["information_schema", sql_category]
|
|
489
|
-
),
|
|
465
|
+
from_table=Identifier(parts=["information_schema", sql_category]),
|
|
490
466
|
where=_get_show_where(statement, like_name="name", initial=where),
|
|
491
467
|
)
|
|
492
468
|
query = SQLQuery(select_statement, session=self.session)
|
|
493
469
|
return self.answer_select(query)
|
|
494
470
|
|
|
495
471
|
elif sql_category == "projects":
|
|
496
|
-
where = BinaryOperation(op=
|
|
472
|
+
where = BinaryOperation(op="=", args=[Identifier("type"), Constant("project")])
|
|
497
473
|
select_statement = Select(
|
|
498
|
-
targets=[Identifier(parts=["NAME"], alias=Identifier(
|
|
499
|
-
from_table=Identifier(
|
|
500
|
-
parts=["information_schema", "DATABASES"]
|
|
501
|
-
),
|
|
474
|
+
targets=[Identifier(parts=["NAME"], alias=Identifier("project"))],
|
|
475
|
+
from_table=Identifier(parts=["information_schema", "DATABASES"]),
|
|
502
476
|
where=_get_show_where(statement, like_name="project", from_name="project", initial=where),
|
|
503
477
|
)
|
|
504
478
|
|
|
@@ -565,9 +539,7 @@ class ExecuteCommands:
|
|
|
565
539
|
return self.answer_update_model_version(statement.value, database_name)
|
|
566
540
|
|
|
567
541
|
else:
|
|
568
|
-
logger.warning(
|
|
569
|
-
f"SQL statement is not processable, return OK package: {sql}"
|
|
570
|
-
)
|
|
542
|
+
logger.warning(f"SQL statement is not processable, return OK package: {sql}")
|
|
571
543
|
return ExecuteAnswer()
|
|
572
544
|
elif statement_type is Use:
|
|
573
545
|
db_name = statement.value.parts[-1]
|
|
@@ -584,26 +556,16 @@ class ExecuteCommands:
|
|
|
584
556
|
return self.answer_drop_view(statement, database_name)
|
|
585
557
|
elif statement_type is Delete:
|
|
586
558
|
query = SQLQuery(statement, session=self.session, database=database_name)
|
|
587
|
-
return ExecuteAnswer(
|
|
588
|
-
affected_rows=query.fetched_data.affected_rows
|
|
589
|
-
)
|
|
559
|
+
return ExecuteAnswer(affected_rows=query.fetched_data.affected_rows)
|
|
590
560
|
elif statement_type is Insert:
|
|
591
561
|
query = SQLQuery(statement, session=self.session, database=database_name)
|
|
592
562
|
if query.fetched_data.length() > 0:
|
|
593
563
|
return self.answer_select(query)
|
|
594
|
-
return ExecuteAnswer(
|
|
595
|
-
affected_rows=query.fetched_data.affected_rows
|
|
596
|
-
)
|
|
564
|
+
return ExecuteAnswer(affected_rows=query.fetched_data.affected_rows)
|
|
597
565
|
elif statement_type is Update:
|
|
598
566
|
query = SQLQuery(statement, session=self.session, database=database_name)
|
|
599
|
-
return ExecuteAnswer(
|
|
600
|
-
|
|
601
|
-
)
|
|
602
|
-
elif (
|
|
603
|
-
statement_type is Alter
|
|
604
|
-
and ("disable keys" in sql_lower)
|
|
605
|
-
or ("enable keys" in sql_lower)
|
|
606
|
-
):
|
|
567
|
+
return ExecuteAnswer(affected_rows=query.fetched_data.affected_rows)
|
|
568
|
+
elif statement_type is Alter and ("disable keys" in sql_lower) or ("enable keys" in sql_lower):
|
|
607
569
|
return ExecuteAnswer()
|
|
608
570
|
elif statement_type is Select:
|
|
609
571
|
ret = self.exec_service_function(statement, database_name)
|
|
@@ -677,11 +639,11 @@ class ExecuteCommands:
|
|
|
677
639
|
|
|
678
640
|
command = target.op.lower()
|
|
679
641
|
args = [arg.value for arg in target.args if isinstance(arg, Constant)]
|
|
680
|
-
if command ==
|
|
642
|
+
if command == "query_resume":
|
|
681
643
|
ret = SQLQuery(None, session=self.session, query_id=args[0])
|
|
682
644
|
return self.answer_select(ret)
|
|
683
645
|
|
|
684
|
-
elif command ==
|
|
646
|
+
elif command == "query_cancel":
|
|
685
647
|
query_context_controller.cancel_query(*args)
|
|
686
648
|
return ExecuteAnswer()
|
|
687
649
|
|
|
@@ -822,25 +784,17 @@ class ExecuteCommands:
|
|
|
822
784
|
def answer_evaluate_metric(self, statement, database_name):
|
|
823
785
|
# heavy import, so we do it here on-demand
|
|
824
786
|
from mindsdb_evaluator.accuracy.general import evaluate_accuracy
|
|
787
|
+
|
|
825
788
|
try:
|
|
826
789
|
sqlquery = SQLQuery(statement.data, session=self.session, database=database_name)
|
|
827
790
|
except Exception as e:
|
|
828
|
-
raise Exception(
|
|
829
|
-
f'Nested query failed to execute with error: "{e}", please check and try again.'
|
|
830
|
-
)
|
|
791
|
+
raise Exception(f'Nested query failed to execute with error: "{e}", please check and try again.')
|
|
831
792
|
df = sqlquery.fetched_data.to_df()
|
|
832
|
-
df.columns = [
|
|
833
|
-
str(t.alias) if hasattr(t, "alias") else str(t.parts[-1])
|
|
834
|
-
for t in statement.data.targets
|
|
835
|
-
]
|
|
793
|
+
df.columns = [str(t.alias) if hasattr(t, "alias") else str(t.parts[-1]) for t in statement.data.targets]
|
|
836
794
|
|
|
837
795
|
for col in ["actual", "prediction"]:
|
|
838
|
-
assert
|
|
839
|
-
|
|
840
|
-
), f"`{col}` column was not provided, please try again."
|
|
841
|
-
assert (
|
|
842
|
-
df[col].isna().sum() == 0
|
|
843
|
-
), f"There are missing values in the `{col}` column, please try again."
|
|
796
|
+
assert col in df.columns, f"`{col}` column was not provided, please try again."
|
|
797
|
+
assert df[col].isna().sum() == 0, f"There are missing values in the `{col}` column, please try again."
|
|
844
798
|
|
|
845
799
|
metric_name = statement.name.parts[-1]
|
|
846
800
|
target_series = df.pop("prediction")
|
|
@@ -861,9 +815,17 @@ class ExecuteCommands:
|
|
|
861
815
|
)
|
|
862
816
|
|
|
863
817
|
def answer_describe_object(self, obj_type: str, obj_name: Identifier, database_name: str):
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
818
|
+
project_objects = (
|
|
819
|
+
"AGENTS",
|
|
820
|
+
"JOBS",
|
|
821
|
+
"SKILLS",
|
|
822
|
+
"CHATBOTS",
|
|
823
|
+
"TRIGGERS",
|
|
824
|
+
"VIEWS",
|
|
825
|
+
"KNOWLEDGE_BASES",
|
|
826
|
+
"PREDICTORS",
|
|
827
|
+
"MODELS",
|
|
828
|
+
)
|
|
867
829
|
|
|
868
830
|
global_objects = ("DATABASES", "PROJECTS", "HANDLERS", "ML_ENGINES")
|
|
869
831
|
|
|
@@ -871,39 +833,31 @@ class ExecuteCommands:
|
|
|
871
833
|
|
|
872
834
|
# is not plural?
|
|
873
835
|
if obj_type not in all_objects:
|
|
874
|
-
if obj_type +
|
|
875
|
-
obj_type = obj_type +
|
|
876
|
-
elif obj_type +
|
|
877
|
-
obj_type = obj_type +
|
|
836
|
+
if obj_type + "S" in all_objects:
|
|
837
|
+
obj_type = obj_type + "S"
|
|
838
|
+
elif obj_type + "ES" in all_objects:
|
|
839
|
+
obj_type = obj_type + "ES"
|
|
878
840
|
else:
|
|
879
|
-
raise WrongArgumentError(f
|
|
841
|
+
raise WrongArgumentError(f"Unknown describe type: {obj_type}")
|
|
880
842
|
|
|
881
843
|
parts = obj_name.parts
|
|
882
844
|
if len(parts) > 2:
|
|
883
845
|
raise WrongArgumentError(
|
|
884
|
-
f"Invalid object name: {obj_name.to_string()}.\
|
|
885
|
-
"Only models support three-part namespaces."
|
|
846
|
+
f"Invalid object name: {obj_name.to_string()}.\nOnly models support three-part namespaces."
|
|
886
847
|
)
|
|
887
848
|
|
|
888
849
|
name = parts[-1]
|
|
889
|
-
where = BinaryOperation(op=
|
|
890
|
-
Identifier('name'),
|
|
891
|
-
Constant(name)
|
|
892
|
-
])
|
|
850
|
+
where = BinaryOperation(op="=", args=[Identifier("name"), Constant(name)])
|
|
893
851
|
|
|
894
852
|
if obj_type in project_objects:
|
|
895
853
|
database_name = parts[0] if len(parts) > 1 else database_name
|
|
896
|
-
where = BinaryOperation(
|
|
897
|
-
where,
|
|
898
|
-
|
|
899
|
-
])
|
|
854
|
+
where = BinaryOperation(
|
|
855
|
+
op="and", args=[where, BinaryOperation(op="=", args=[Identifier("project"), Constant(database_name)])]
|
|
856
|
+
)
|
|
900
857
|
|
|
901
858
|
select_statement = Select(
|
|
902
859
|
targets=[Star()],
|
|
903
|
-
from_table=Identifier(
|
|
904
|
-
parts=["information_schema", obj_type]
|
|
905
|
-
),
|
|
906
|
-
|
|
860
|
+
from_table=Identifier(parts=["information_schema", obj_type]),
|
|
907
861
|
where=where,
|
|
908
862
|
)
|
|
909
863
|
query = SQLQuery(select_statement, session=self.session)
|
|
@@ -924,7 +878,9 @@ class ExecuteCommands:
|
|
|
924
878
|
# model.?attrs
|
|
925
879
|
parts = value[:1]
|
|
926
880
|
attrs = value[1:]
|
|
927
|
-
model_info = self._get_model_info(
|
|
881
|
+
model_info = self._get_model_info(
|
|
882
|
+
Identifier(parts=parts), except_absent=False, database_name=database_name
|
|
883
|
+
)
|
|
928
884
|
|
|
929
885
|
if model_info is None:
|
|
930
886
|
raise ExecutorException(f"Model not found: {obj_name}")
|
|
@@ -939,20 +895,14 @@ class ExecuteCommands:
|
|
|
939
895
|
model_info["project_name"],
|
|
940
896
|
model_info["model_record"].name,
|
|
941
897
|
attribute=attrs,
|
|
942
|
-
version=model_info[
|
|
898
|
+
version=model_info["model_record"].version,
|
|
943
899
|
)
|
|
944
900
|
|
|
945
|
-
return ExecuteAnswer(
|
|
946
|
-
data=ResultSet.from_df(df, table_name="")
|
|
947
|
-
)
|
|
901
|
+
return ExecuteAnswer(data=ResultSet.from_df(df, table_name=""))
|
|
948
902
|
|
|
949
903
|
def answer_create_kb_index(self, statement, database_name):
|
|
950
904
|
table_name = statement.name.parts[-1]
|
|
951
|
-
project_name = (
|
|
952
|
-
statement.name.parts[0]
|
|
953
|
-
if len(statement.name.parts) > 1
|
|
954
|
-
else database_name
|
|
955
|
-
)
|
|
905
|
+
project_name = statement.name.parts[0] if len(statement.name.parts) > 1 else database_name
|
|
956
906
|
self.session.kb_controller.create_index(table_name=table_name, project_name=project_name)
|
|
957
907
|
return ExecuteAnswer()
|
|
958
908
|
|
|
@@ -994,8 +944,7 @@ class ExecuteCommands:
|
|
|
994
944
|
shortest_training = None
|
|
995
945
|
for model in models:
|
|
996
946
|
if (
|
|
997
|
-
model.status
|
|
998
|
-
in (PREDICTOR_STATUS.GENERATING, PREDICTOR_STATUS.TRAINING)
|
|
947
|
+
model.status in (PREDICTOR_STATUS.GENERATING, PREDICTOR_STATUS.TRAINING)
|
|
999
948
|
and model.training_start_at is not None
|
|
1000
949
|
and model.training_stop_at is None
|
|
1001
950
|
):
|
|
@@ -1003,10 +952,7 @@ class ExecuteCommands:
|
|
|
1003
952
|
if shortest_training is None or training_time < shortest_training:
|
|
1004
953
|
shortest_training = training_time
|
|
1005
954
|
|
|
1006
|
-
if (
|
|
1007
|
-
shortest_training is not None
|
|
1008
|
-
and shortest_training < datetime.timedelta(hours=1)
|
|
1009
|
-
):
|
|
955
|
+
if shortest_training is not None and shortest_training < datetime.timedelta(hours=1):
|
|
1010
956
|
raise ExecutorException(
|
|
1011
957
|
f"Can't start {phase_name} process while any other predictor is in status 'training' or 'generating'"
|
|
1012
958
|
)
|
|
@@ -1017,13 +963,9 @@ class ExecuteCommands:
|
|
|
1017
963
|
if statement.query_str is None:
|
|
1018
964
|
if model_record.data_integration_ref is not None:
|
|
1019
965
|
if model_record.data_integration_ref["type"] == "integration":
|
|
1020
|
-
integration = self.session.integration_controller.get_by_id(
|
|
1021
|
-
model_record.data_integration_ref["id"]
|
|
1022
|
-
)
|
|
966
|
+
integration = self.session.integration_controller.get_by_id(model_record.data_integration_ref["id"])
|
|
1023
967
|
if integration is None:
|
|
1024
|
-
raise EntityNotExistsError(
|
|
1025
|
-
"The database from which the model was trained no longer exists"
|
|
1026
|
-
)
|
|
968
|
+
raise EntityNotExistsError("The database from which the model was trained no longer exists")
|
|
1027
969
|
elif statement.integration_name is None:
|
|
1028
970
|
# set to current project
|
|
1029
971
|
statement.integration_name = Identifier(database_name)
|
|
@@ -1035,25 +977,19 @@ class ExecuteCommands:
|
|
|
1035
977
|
|
|
1036
978
|
if "engine" in statement.using:
|
|
1037
979
|
ml_integration_name = statement.using.pop("engine")
|
|
1038
|
-
ml_handler = self.session.integration_controller.get_ml_handler(
|
|
1039
|
-
ml_integration_name
|
|
1040
|
-
)
|
|
980
|
+
ml_handler = self.session.integration_controller.get_ml_handler(ml_integration_name)
|
|
1041
981
|
|
|
1042
982
|
# use current ml handler
|
|
1043
983
|
if ml_handler is None:
|
|
1044
984
|
integration_record = get_predictor_integration(model_record)
|
|
1045
985
|
if integration_record is None:
|
|
1046
986
|
raise EntityNotExistsError("ML engine model was trained with does not esxists")
|
|
1047
|
-
ml_handler = self.session.integration_controller.get_ml_handler(
|
|
1048
|
-
integration_record.name
|
|
1049
|
-
)
|
|
987
|
+
ml_handler = self.session.integration_controller.get_ml_handler(integration_record.name)
|
|
1050
988
|
|
|
1051
989
|
self._sync_predictor_check(phase_name="retrain")
|
|
1052
990
|
df = self.session.model_controller.retrain_model(statement, ml_handler)
|
|
1053
991
|
|
|
1054
|
-
return ExecuteAnswer(
|
|
1055
|
-
data=ResultSet.from_df(df)
|
|
1056
|
-
)
|
|
992
|
+
return ExecuteAnswer(data=ResultSet.from_df(df))
|
|
1057
993
|
|
|
1058
994
|
@profiler.profile()
|
|
1059
995
|
@mark_process("learn")
|
|
@@ -1071,19 +1007,13 @@ class ExecuteCommands:
|
|
|
1071
1007
|
# use current ml handler
|
|
1072
1008
|
integration_record = get_predictor_integration(model_record)
|
|
1073
1009
|
if integration_record is None:
|
|
1074
|
-
raise Exception(
|
|
1075
|
-
|
|
1076
|
-
)
|
|
1077
|
-
ml_handler = self.session.integration_controller.get_ml_handler(
|
|
1078
|
-
integration_record.name
|
|
1079
|
-
)
|
|
1010
|
+
raise Exception("The ML engine that the model was trained with does not exist.")
|
|
1011
|
+
ml_handler = self.session.integration_controller.get_ml_handler(integration_record.name)
|
|
1080
1012
|
|
|
1081
1013
|
self._sync_predictor_check(phase_name="finetune")
|
|
1082
1014
|
df = self.session.model_controller.finetune_model(statement, ml_handler)
|
|
1083
1015
|
|
|
1084
|
-
return ExecuteAnswer(
|
|
1085
|
-
data=ResultSet.from_df(df)
|
|
1086
|
-
)
|
|
1016
|
+
return ExecuteAnswer(data=ResultSet.from_df(df))
|
|
1087
1017
|
|
|
1088
1018
|
def _create_integration(self, name: str, engine: str, connection_args: dict):
|
|
1089
1019
|
# we have connection checkers not for any db. So do nothing if fail
|
|
@@ -1100,7 +1030,9 @@ class ExecuteCommands:
|
|
|
1100
1030
|
raise ExecutorException(f"There is no engine '{engine}'")
|
|
1101
1031
|
|
|
1102
1032
|
if handler_meta.get("import", {}).get("success") is not True:
|
|
1103
|
-
raise ExecutorException(
|
|
1033
|
+
raise ExecutorException(
|
|
1034
|
+
f"The '{engine}' handler isn't installed.\n" + get_handler_install_message(engine)
|
|
1035
|
+
)
|
|
1104
1036
|
|
|
1105
1037
|
accept_connection_args = handler_meta.get("connection_args")
|
|
1106
1038
|
if accept_connection_args is not None and connection_args is not None:
|
|
@@ -1118,25 +1050,17 @@ class ExecuteCommands:
|
|
|
1118
1050
|
if isinstance(arg_value, (str, dict)) is False:
|
|
1119
1051
|
raise ExecutorException(f"Unknown type of arg: '{arg_value}'")
|
|
1120
1052
|
if isinstance(arg_value, str) or "path" in arg_value:
|
|
1121
|
-
path = (
|
|
1122
|
-
arg_value
|
|
1123
|
-
if isinstance(arg_value, str)
|
|
1124
|
-
else arg_value["path"]
|
|
1125
|
-
)
|
|
1053
|
+
path = arg_value if isinstance(arg_value, str) else arg_value["path"]
|
|
1126
1054
|
if Path(path).is_file() is False:
|
|
1127
1055
|
raise ExecutorException(f"File not found at: '{path}'")
|
|
1128
1056
|
elif "url" in arg_value:
|
|
1129
1057
|
path = download_file(arg_value["url"])
|
|
1130
1058
|
else:
|
|
1131
|
-
raise ExecutorException(
|
|
1132
|
-
f"Argument '{arg_name}' must be path or url to the file"
|
|
1133
|
-
)
|
|
1059
|
+
raise ExecutorException(f"Argument '{arg_name}' must be path or url to the file")
|
|
1134
1060
|
connection_args[arg_name] = path
|
|
1135
1061
|
|
|
1136
1062
|
handler = self.session.integration_controller.create_tmp_handler(
|
|
1137
|
-
name=name,
|
|
1138
|
-
engine=engine,
|
|
1139
|
-
connection_args=connection_args
|
|
1063
|
+
name=name, engine=engine, connection_args=connection_args
|
|
1140
1064
|
)
|
|
1141
1065
|
status = handler.check_connection()
|
|
1142
1066
|
if status.copy_storage:
|
|
@@ -1149,13 +1073,13 @@ class ExecuteCommands:
|
|
|
1149
1073
|
|
|
1150
1074
|
integration = self.session.integration_controller.get(name)
|
|
1151
1075
|
if integration is not None:
|
|
1152
|
-
raise EntityExistsError(
|
|
1076
|
+
raise EntityExistsError("Database already exists", name)
|
|
1153
1077
|
try:
|
|
1154
1078
|
integration = ProjectController().get(name=name)
|
|
1155
1079
|
except EntityNotExistsError:
|
|
1156
1080
|
pass
|
|
1157
1081
|
if integration is not None:
|
|
1158
|
-
raise EntityExistsError(
|
|
1082
|
+
raise EntityExistsError("Project exists with this name", name)
|
|
1159
1083
|
|
|
1160
1084
|
self.session.integration_controller.add(name, engine, connection_args)
|
|
1161
1085
|
if storage:
|
|
@@ -1163,11 +1087,10 @@ class ExecuteCommands:
|
|
|
1163
1087
|
handler.handler_storage.import_files(storage)
|
|
1164
1088
|
|
|
1165
1089
|
def answer_create_ml_engine(self, name: str, handler: str, params: dict = None, if_not_exists=False):
|
|
1166
|
-
|
|
1167
1090
|
integrations = self.session.integration_controller.get_all()
|
|
1168
1091
|
if name in integrations:
|
|
1169
1092
|
if not if_not_exists:
|
|
1170
|
-
raise EntityExistsError(
|
|
1093
|
+
raise EntityExistsError("Integration already exists", name)
|
|
1171
1094
|
else:
|
|
1172
1095
|
return ExecuteAnswer()
|
|
1173
1096
|
|
|
@@ -1185,21 +1108,19 @@ class ExecuteCommands:
|
|
|
1185
1108
|
params_out[key] = value
|
|
1186
1109
|
|
|
1187
1110
|
try:
|
|
1188
|
-
self.session.integration_controller.add(
|
|
1189
|
-
name=name, engine=handler, connection_args=params_out
|
|
1190
|
-
)
|
|
1111
|
+
self.session.integration_controller.add(name=name, engine=handler, connection_args=params_out)
|
|
1191
1112
|
except Exception as e:
|
|
1192
1113
|
msg = str(e)
|
|
1193
1114
|
if type(e) in (ImportError, ModuleNotFoundError):
|
|
1194
1115
|
msg = dedent(
|
|
1195
1116
|
f"""\
|
|
1196
|
-
The '{handler_module_meta[
|
|
1197
|
-
{handler_module_meta[
|
|
1117
|
+
The '{handler_module_meta["name"]}' handler cannot be used. Reason is:
|
|
1118
|
+
{handler_module_meta["import"]["error_message"]}
|
|
1198
1119
|
"""
|
|
1199
1120
|
)
|
|
1200
1121
|
is_cloud = self.session.config.get("cloud", False)
|
|
1201
|
-
if is_cloud is False and "No module named" in handler_module_meta[
|
|
1202
|
-
logger.info(get_handler_install_message(handler_module_meta[
|
|
1122
|
+
if is_cloud is False and "No module named" in handler_module_meta["import"]["error_message"]:
|
|
1123
|
+
logger.info(get_handler_install_message(handler_module_meta["name"]))
|
|
1203
1124
|
ast_drop = DropMLEngine(name=Identifier(name))
|
|
1204
1125
|
self.answer_drop_ml_engine(ast_drop)
|
|
1205
1126
|
logger.info(msg)
|
|
@@ -1212,7 +1133,7 @@ class ExecuteCommands:
|
|
|
1212
1133
|
integrations = self.session.integration_controller.get_all()
|
|
1213
1134
|
if name not in integrations:
|
|
1214
1135
|
if not statement.if_exists:
|
|
1215
|
-
raise EntityNotExistsError(
|
|
1136
|
+
raise EntityNotExistsError("Integration does not exists", name)
|
|
1216
1137
|
else:
|
|
1217
1138
|
return ExecuteAnswer()
|
|
1218
1139
|
self.session.integration_controller.delete(name)
|
|
@@ -1281,25 +1202,15 @@ class ExecuteCommands:
|
|
|
1281
1202
|
# TODO do we need feature: delete object from project via drop table?
|
|
1282
1203
|
|
|
1283
1204
|
project = self.session.database_controller.get_project(db_name)
|
|
1284
|
-
project_tables = {
|
|
1285
|
-
key: val
|
|
1286
|
-
for key, val in project.get_tables().items()
|
|
1287
|
-
if val.get("deletable") is True
|
|
1288
|
-
}
|
|
1205
|
+
project_tables = {key: val for key, val in project.get_tables().items() if val.get("deletable") is True}
|
|
1289
1206
|
table_name = table.to_string()
|
|
1290
1207
|
|
|
1291
1208
|
if table_name in project_tables:
|
|
1292
|
-
self.session.model_controller.delete_model(
|
|
1293
|
-
table_name, project_name=db_name
|
|
1294
|
-
)
|
|
1209
|
+
self.session.model_controller.delete_model(table_name, project_name=db_name)
|
|
1295
1210
|
elif statement.if_exists is False:
|
|
1296
|
-
raise ExecutorException(
|
|
1297
|
-
f"Cannot delete a table from database '{db_name}': table does not exists"
|
|
1298
|
-
)
|
|
1211
|
+
raise ExecutorException(f"Cannot delete a table from database '{db_name}': table does not exists")
|
|
1299
1212
|
else:
|
|
1300
|
-
raise ExecutorException(
|
|
1301
|
-
f"Cannot delete a table from database '{db_name}'"
|
|
1302
|
-
)
|
|
1213
|
+
raise ExecutorException(f"Cannot delete a table from database '{db_name}'")
|
|
1303
1214
|
|
|
1304
1215
|
return ExecuteAnswer()
|
|
1305
1216
|
|
|
@@ -1320,9 +1231,7 @@ class ExecuteCommands:
|
|
|
1320
1231
|
if isinstance(statement.from_table, Identifier):
|
|
1321
1232
|
query = Select(
|
|
1322
1233
|
targets=[Star()],
|
|
1323
|
-
from_table=NativeQuery(
|
|
1324
|
-
integration=statement.from_table, query=statement.query_str
|
|
1325
|
-
),
|
|
1234
|
+
from_table=NativeQuery(integration=statement.from_table, query=statement.query_str),
|
|
1326
1235
|
)
|
|
1327
1236
|
query_str = str(query)
|
|
1328
1237
|
else:
|
|
@@ -1332,15 +1241,11 @@ class ExecuteCommands:
|
|
|
1332
1241
|
# check create view sql
|
|
1333
1242
|
query.limit = Constant(1)
|
|
1334
1243
|
|
|
1335
|
-
query_context_controller.set_context(
|
|
1336
|
-
query_context_controller.IGNORE_CONTEXT
|
|
1337
|
-
)
|
|
1244
|
+
query_context_controller.set_context(query_context_controller.IGNORE_CONTEXT)
|
|
1338
1245
|
try:
|
|
1339
1246
|
SQLQuery(query, session=self.session, database=database_name)
|
|
1340
1247
|
finally:
|
|
1341
|
-
query_context_controller.release_context(
|
|
1342
|
-
query_context_controller.IGNORE_CONTEXT
|
|
1343
|
-
)
|
|
1248
|
+
query_context_controller.release_context(query_context_controller.IGNORE_CONTEXT)
|
|
1344
1249
|
|
|
1345
1250
|
project = self.session.database_controller.get_project(project_name)
|
|
1346
1251
|
try:
|
|
@@ -1370,24 +1275,23 @@ class ExecuteCommands:
|
|
|
1370
1275
|
return ExecuteAnswer()
|
|
1371
1276
|
|
|
1372
1277
|
def answer_create_kb(self, statement: CreateKnowledgeBase, database_name: str):
|
|
1373
|
-
|
|
1374
|
-
|
|
1375
|
-
|
|
1376
|
-
|
|
1377
|
-
|
|
1278
|
+
if statement.model:
|
|
1279
|
+
raise ExecutorException(
|
|
1280
|
+
"Creating a knowledge base using pre-existing models is no longer supported.\n"
|
|
1281
|
+
"Please pass the model parameters as a JSON object in the embedding_model field."
|
|
1282
|
+
)
|
|
1283
|
+
|
|
1284
|
+
project_name = statement.name.parts[0] if len(statement.name.parts) > 1 else database_name
|
|
1378
1285
|
|
|
1379
1286
|
if statement.storage is not None:
|
|
1380
1287
|
if len(statement.storage.parts) != 2:
|
|
1381
1288
|
raise ExecutorException(
|
|
1382
|
-
f"Invalid vectordatabase table name: {statement.storage}"
|
|
1383
|
-
"Need the form 'database_name.table_name'"
|
|
1289
|
+
f"Invalid vectordatabase table name: {statement.storage}Need the form 'database_name.table_name'"
|
|
1384
1290
|
)
|
|
1385
1291
|
|
|
1386
1292
|
if statement.from_query is not None:
|
|
1387
1293
|
# TODO: implement this
|
|
1388
|
-
raise ExecutorException(
|
|
1389
|
-
"Create a knowledge base from a select is not supported yet"
|
|
1390
|
-
)
|
|
1294
|
+
raise ExecutorException("Create a knowledge base from a select is not supported yet")
|
|
1391
1295
|
|
|
1392
1296
|
kb_name = statement.name.parts[-1]
|
|
1393
1297
|
|
|
@@ -1395,7 +1299,7 @@ class ExecuteCommands:
|
|
|
1395
1299
|
_ = self.session.kb_controller.add(
|
|
1396
1300
|
name=kb_name,
|
|
1397
1301
|
project_name=project_name,
|
|
1398
|
-
embedding_model=statement.model,
|
|
1302
|
+
# embedding_model=statement.model,
|
|
1399
1303
|
storage=statement.storage,
|
|
1400
1304
|
params=statement.params,
|
|
1401
1305
|
if_not_exists=statement.if_not_exists,
|
|
@@ -1405,11 +1309,7 @@ class ExecuteCommands:
|
|
|
1405
1309
|
|
|
1406
1310
|
def answer_drop_kb(self, statement: DropKnowledgeBase, database_name: str):
|
|
1407
1311
|
name = statement.name.parts[-1]
|
|
1408
|
-
project_name = (
|
|
1409
|
-
statement.name.parts[0]
|
|
1410
|
-
if len(statement.name.parts) > 1
|
|
1411
|
-
else database_name
|
|
1412
|
-
)
|
|
1312
|
+
project_name = statement.name.parts[0] if len(statement.name.parts) > 1 else database_name
|
|
1413
1313
|
|
|
1414
1314
|
# delete the knowledge base
|
|
1415
1315
|
self.session.kb_controller.delete(
|
|
@@ -1422,19 +1322,10 @@ class ExecuteCommands:
|
|
|
1422
1322
|
|
|
1423
1323
|
def answer_create_skill(self, statement, database_name):
|
|
1424
1324
|
name = statement.name.parts[-1]
|
|
1425
|
-
project_name = (
|
|
1426
|
-
statement.name.parts[0]
|
|
1427
|
-
if len(statement.name.parts) > 1
|
|
1428
|
-
else database_name
|
|
1429
|
-
)
|
|
1325
|
+
project_name = statement.name.parts[0] if len(statement.name.parts) > 1 else database_name
|
|
1430
1326
|
|
|
1431
1327
|
try:
|
|
1432
|
-
_ = self.session.skills_controller.add_skill(
|
|
1433
|
-
name,
|
|
1434
|
-
project_name,
|
|
1435
|
-
statement.type,
|
|
1436
|
-
statement.params
|
|
1437
|
-
)
|
|
1328
|
+
_ = self.session.skills_controller.add_skill(name, project_name, statement.type, statement.params)
|
|
1438
1329
|
except ValueError as e:
|
|
1439
1330
|
# Project does not exist or skill already exists.
|
|
1440
1331
|
raise ExecutorException(str(e))
|
|
@@ -1443,11 +1334,7 @@ class ExecuteCommands:
|
|
|
1443
1334
|
|
|
1444
1335
|
def answer_drop_skill(self, statement, database_name):
|
|
1445
1336
|
name = statement.name.parts[-1]
|
|
1446
|
-
project_name = (
|
|
1447
|
-
statement.name.parts[0]
|
|
1448
|
-
if len(statement.name.parts) > 1
|
|
1449
|
-
else database_name
|
|
1450
|
-
)
|
|
1337
|
+
project_name = statement.name.parts[0] if len(statement.name.parts) > 1 else database_name
|
|
1451
1338
|
|
|
1452
1339
|
try:
|
|
1453
1340
|
self.session.skills_controller.delete_skill(name, project_name)
|
|
@@ -1459,19 +1346,12 @@ class ExecuteCommands:
|
|
|
1459
1346
|
|
|
1460
1347
|
def answer_update_skill(self, statement, database_name):
|
|
1461
1348
|
name = statement.name.parts[-1]
|
|
1462
|
-
project_name = (
|
|
1463
|
-
statement.name.parts[0]
|
|
1464
|
-
if len(statement.name.parts) > 1
|
|
1465
|
-
else database_name
|
|
1466
|
-
)
|
|
1349
|
+
project_name = statement.name.parts[0] if len(statement.name.parts) > 1 else database_name
|
|
1467
1350
|
|
|
1468
|
-
type = statement.params.pop(
|
|
1351
|
+
type = statement.params.pop("type", None)
|
|
1469
1352
|
try:
|
|
1470
1353
|
_ = self.session.skills_controller.update_skill(
|
|
1471
|
-
name,
|
|
1472
|
-
project_name=project_name,
|
|
1473
|
-
type=type,
|
|
1474
|
-
params=statement.params
|
|
1354
|
+
name, project_name=project_name, type=type, params=statement.params
|
|
1475
1355
|
)
|
|
1476
1356
|
except ValueError as e:
|
|
1477
1357
|
# Project does not exist or skill does not exist.
|
|
@@ -1481,14 +1361,10 @@ class ExecuteCommands:
|
|
|
1481
1361
|
|
|
1482
1362
|
def answer_create_agent(self, statement, database_name):
|
|
1483
1363
|
name = statement.name.parts[-1]
|
|
1484
|
-
project_name = (
|
|
1485
|
-
statement.name.parts[0]
|
|
1486
|
-
if len(statement.name.parts) > 1
|
|
1487
|
-
else database_name
|
|
1488
|
-
)
|
|
1364
|
+
project_name = statement.name.parts[0] if len(statement.name.parts) > 1 else database_name
|
|
1489
1365
|
|
|
1490
|
-
skills = statement.params.pop(
|
|
1491
|
-
provider = statement.params.pop(
|
|
1366
|
+
skills = statement.params.pop("skills", [])
|
|
1367
|
+
provider = statement.params.pop("provider", None)
|
|
1492
1368
|
try:
|
|
1493
1369
|
_ = self.session.agents_controller.add_agent(
|
|
1494
1370
|
name=name,
|
|
@@ -1496,7 +1372,7 @@ class ExecuteCommands:
|
|
|
1496
1372
|
model_name=statement.model,
|
|
1497
1373
|
skills=skills,
|
|
1498
1374
|
provider=provider,
|
|
1499
|
-
params=statement.params
|
|
1375
|
+
params=statement.params,
|
|
1500
1376
|
)
|
|
1501
1377
|
except ValueError as e:
|
|
1502
1378
|
# Project does not exist or agent already exists.
|
|
@@ -1506,11 +1382,7 @@ class ExecuteCommands:
|
|
|
1506
1382
|
|
|
1507
1383
|
def answer_drop_agent(self, statement, database_name):
|
|
1508
1384
|
name = statement.name.parts[-1]
|
|
1509
|
-
project_name = (
|
|
1510
|
-
statement.name.parts[0]
|
|
1511
|
-
if len(statement.name.parts) > 1
|
|
1512
|
-
else database_name
|
|
1513
|
-
)
|
|
1385
|
+
project_name = statement.name.parts[0] if len(statement.name.parts) > 1 else database_name
|
|
1514
1386
|
|
|
1515
1387
|
try:
|
|
1516
1388
|
self.session.agents_controller.delete_agent(name, project_name)
|
|
@@ -1522,15 +1394,11 @@ class ExecuteCommands:
|
|
|
1522
1394
|
|
|
1523
1395
|
def answer_update_agent(self, statement, database_name):
|
|
1524
1396
|
name = statement.name.parts[-1]
|
|
1525
|
-
project_name = (
|
|
1526
|
-
statement.name.parts[0]
|
|
1527
|
-
if len(statement.name.parts) > 1
|
|
1528
|
-
else database_name
|
|
1529
|
-
)
|
|
1397
|
+
project_name = statement.name.parts[0] if len(statement.name.parts) > 1 else database_name
|
|
1530
1398
|
|
|
1531
|
-
model = statement.params.pop(
|
|
1532
|
-
skills_to_add = statement.params.pop(
|
|
1533
|
-
skills_to_remove = statement.params.pop(
|
|
1399
|
+
model = statement.params.pop("model", None)
|
|
1400
|
+
skills_to_add = statement.params.pop("skills_to_add", [])
|
|
1401
|
+
skills_to_remove = statement.params.pop("skills_to_remove", [])
|
|
1534
1402
|
try:
|
|
1535
1403
|
_ = self.session.agents_controller.update_agent(
|
|
1536
1404
|
name,
|
|
@@ -1538,7 +1406,7 @@ class ExecuteCommands:
|
|
|
1538
1406
|
model_name=model,
|
|
1539
1407
|
skills_to_add=skills_to_add,
|
|
1540
1408
|
skills_to_remove=skills_to_remove,
|
|
1541
|
-
params=statement.params
|
|
1409
|
+
params=statement.params,
|
|
1542
1410
|
)
|
|
1543
1411
|
except (EntityExistsError, EntityNotExistsError, ValueError) as e:
|
|
1544
1412
|
# Project does not exist or agent does not exist.
|
|
@@ -1568,24 +1436,17 @@ class ExecuteCommands:
|
|
|
1568
1436
|
statement.integration_name = Identifier(database_name)
|
|
1569
1437
|
|
|
1570
1438
|
try:
|
|
1571
|
-
ml_handler = self.session.integration_controller.get_ml_handler(
|
|
1572
|
-
ml_integration_name
|
|
1573
|
-
)
|
|
1439
|
+
ml_handler = self.session.integration_controller.get_ml_handler(ml_integration_name)
|
|
1574
1440
|
except EntityNotExistsError:
|
|
1575
1441
|
# not exist, try to create it with same name as handler
|
|
1576
1442
|
self.answer_create_ml_engine(ml_integration_name, handler=ml_integration_name)
|
|
1577
1443
|
|
|
1578
|
-
ml_handler = self.session.integration_controller.get_ml_handler(
|
|
1579
|
-
ml_integration_name
|
|
1580
|
-
)
|
|
1444
|
+
ml_handler = self.session.integration_controller.get_ml_handler(ml_integration_name)
|
|
1581
1445
|
|
|
1582
1446
|
if getattr(statement, "is_replace", False) is True:
|
|
1583
1447
|
# try to delete
|
|
1584
1448
|
try:
|
|
1585
|
-
self.session.model_controller.delete_model(
|
|
1586
|
-
model_name,
|
|
1587
|
-
project_name=integration_name
|
|
1588
|
-
)
|
|
1449
|
+
self.session.model_controller.delete_model(model_name, project_name=integration_name)
|
|
1589
1450
|
except EntityNotExistsError:
|
|
1590
1451
|
pass
|
|
1591
1452
|
|
|
@@ -1618,9 +1479,7 @@ class ExecuteCommands:
|
|
|
1618
1479
|
"and",
|
|
1619
1480
|
args=[
|
|
1620
1481
|
BinaryOperation("=", args=[Identifier("TABLE_SCHEMA"), Constant(db)]),
|
|
1621
|
-
BinaryOperation(
|
|
1622
|
-
"=", args=[Identifier("TABLE_NAME"), Constant(table_name)]
|
|
1623
|
-
),
|
|
1482
|
+
BinaryOperation("=", args=[Identifier("TABLE_NAME"), Constant(table_name)]),
|
|
1624
1483
|
],
|
|
1625
1484
|
)
|
|
1626
1485
|
if where is not None:
|
|
@@ -1657,9 +1516,7 @@ class ExecuteCommands:
|
|
|
1657
1516
|
def answer_show_create_table(self, table):
|
|
1658
1517
|
columns = [
|
|
1659
1518
|
Column(table_name="", name="Table", type=TYPES.MYSQL_TYPE_VAR_STRING),
|
|
1660
|
-
Column(
|
|
1661
|
-
table_name="", name="Create Table", type=TYPES.MYSQL_TYPE_VAR_STRING
|
|
1662
|
-
),
|
|
1519
|
+
Column(table_name="", name="Create Table", type=TYPES.MYSQL_TYPE_VAR_STRING),
|
|
1663
1520
|
]
|
|
1664
1521
|
return ExecuteAnswer(
|
|
1665
1522
|
data=ResultSet(
|
|
@@ -1987,14 +1844,14 @@ class ExecuteCommands:
|
|
|
1987
1844
|
|
|
1988
1845
|
def answer_update_model_version(self, model_version, database_name):
|
|
1989
1846
|
if not isinstance(model_version, Identifier):
|
|
1990
|
-
raise ExecutorException(f
|
|
1847
|
+
raise ExecutorException(f"Please define version: {model_version}")
|
|
1991
1848
|
|
|
1992
1849
|
model_parts = model_version.parts
|
|
1993
1850
|
version = model_parts[-1]
|
|
1994
1851
|
if version.isdigit():
|
|
1995
1852
|
version = int(version)
|
|
1996
1853
|
else:
|
|
1997
|
-
raise ExecutorException(f
|
|
1854
|
+
raise ExecutorException(f"Unknown version: {version}")
|
|
1998
1855
|
|
|
1999
1856
|
if len(model_parts) == 3:
|
|
2000
1857
|
project_name, model_name = model_parts[:2]
|
|
@@ -2002,13 +1859,12 @@ class ExecuteCommands:
|
|
|
2002
1859
|
model_name = model_parts[0]
|
|
2003
1860
|
project_name = database_name
|
|
2004
1861
|
else:
|
|
2005
|
-
raise ExecutorException(f
|
|
1862
|
+
raise ExecutorException(f"Unknown model: {model_version}")
|
|
2006
1863
|
|
|
2007
1864
|
self.session.model_controller.set_model_active_version(project_name, model_name, version)
|
|
2008
1865
|
return ExecuteAnswer()
|
|
2009
1866
|
|
|
2010
1867
|
def answer_drop_model(self, statement, database_name):
|
|
2011
|
-
|
|
2012
1868
|
model_parts = statement.name.parts
|
|
2013
1869
|
version = None
|
|
2014
1870
|
|
|
@@ -2023,7 +1879,7 @@ class ExecuteCommands:
|
|
|
2023
1879
|
model_name = model_parts[0]
|
|
2024
1880
|
project_name = database_name
|
|
2025
1881
|
else:
|
|
2026
|
-
raise ExecutorException(f
|
|
1882
|
+
raise ExecutorException(f"Unknown model: {statement.name}")
|
|
2027
1883
|
|
|
2028
1884
|
if version is not None:
|
|
2029
1885
|
# delete version
|