MindsDB 25.3.4.2__py3-none-any.whl → 25.4.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of MindsDB might be problematic. Click here for more details.

Files changed (53) hide show
  1. mindsdb/__about__.py +1 -1
  2. mindsdb/__main__.py +21 -4
  3. mindsdb/api/executor/command_executor.py +62 -61
  4. mindsdb/api/executor/data_types/answer.py +9 -12
  5. mindsdb/api/executor/datahub/classes/response.py +11 -0
  6. mindsdb/api/executor/datahub/datanodes/datanode.py +4 -4
  7. mindsdb/api/executor/datahub/datanodes/information_schema_datanode.py +7 -9
  8. mindsdb/api/executor/datahub/datanodes/integration_datanode.py +22 -16
  9. mindsdb/api/executor/datahub/datanodes/project_datanode.py +20 -20
  10. mindsdb/api/executor/planner/plan_join.py +1 -1
  11. mindsdb/api/executor/planner/steps.py +2 -1
  12. mindsdb/api/executor/sql_query/result_set.py +10 -7
  13. mindsdb/api/executor/sql_query/sql_query.py +36 -82
  14. mindsdb/api/executor/sql_query/steps/delete_step.py +2 -3
  15. mindsdb/api/executor/sql_query/steps/fetch_dataframe.py +5 -3
  16. mindsdb/api/executor/sql_query/steps/insert_step.py +2 -2
  17. mindsdb/api/executor/sql_query/steps/prepare_steps.py +2 -2
  18. mindsdb/api/executor/sql_query/steps/subselect_step.py +20 -8
  19. mindsdb/api/executor/sql_query/steps/update_step.py +4 -6
  20. mindsdb/api/http/namespaces/sql.py +4 -1
  21. mindsdb/api/mcp/__init__.py +0 -0
  22. mindsdb/api/mcp/start.py +152 -0
  23. mindsdb/api/mysql/mysql_proxy/data_types/mysql_packets/ok_packet.py +1 -1
  24. mindsdb/api/mysql/mysql_proxy/executor/mysql_executor.py +4 -27
  25. mindsdb/api/mysql/mysql_proxy/libs/constants/mysql.py +1 -0
  26. mindsdb/api/mysql/mysql_proxy/mysql_proxy.py +38 -37
  27. mindsdb/integrations/handlers/chromadb_handler/chromadb_handler.py +23 -13
  28. mindsdb/integrations/handlers/mssql_handler/mssql_handler.py +1 -1
  29. mindsdb/integrations/handlers/mysql_handler/mysql_handler.py +3 -2
  30. mindsdb/integrations/handlers/oracle_handler/oracle_handler.py +4 -4
  31. mindsdb/integrations/handlers/pgvector_handler/pgvector_handler.py +19 -5
  32. mindsdb/integrations/handlers/postgres_handler/postgres_handler.py +9 -4
  33. mindsdb/integrations/handlers/redshift_handler/redshift_handler.py +1 -1
  34. mindsdb/integrations/handlers/snowflake_handler/snowflake_handler.py +18 -11
  35. mindsdb/integrations/libs/ml_handler_process/learn_process.py +1 -2
  36. mindsdb/integrations/libs/response.py +9 -4
  37. mindsdb/integrations/libs/vectordatabase_handler.py +37 -25
  38. mindsdb/integrations/utilities/rag/rerankers/reranker_compressor.py +35 -15
  39. mindsdb/interfaces/database/log.py +8 -9
  40. mindsdb/interfaces/database/projects.py +16 -5
  41. mindsdb/interfaces/functions/controller.py +59 -17
  42. mindsdb/interfaces/functions/to_markdown.py +194 -0
  43. mindsdb/interfaces/jobs/jobs_controller.py +3 -3
  44. mindsdb/interfaces/knowledge_base/controller.py +143 -26
  45. mindsdb/interfaces/knowledge_base/preprocessing/document_preprocessor.py +3 -14
  46. mindsdb/interfaces/query_context/context_controller.py +3 -1
  47. mindsdb/utilities/config.py +8 -0
  48. mindsdb/utilities/starters.py +7 -0
  49. {mindsdb-25.3.4.2.dist-info → mindsdb-25.4.2.0.dist-info}/METADATA +233 -231
  50. {mindsdb-25.3.4.2.dist-info → mindsdb-25.4.2.0.dist-info}/RECORD +53 -49
  51. {mindsdb-25.3.4.2.dist-info → mindsdb-25.4.2.0.dist-info}/WHEEL +0 -0
  52. {mindsdb-25.3.4.2.dist-info → mindsdb-25.4.2.0.dist-info}/licenses/LICENSE +0 -0
  53. {mindsdb-25.3.4.2.dist-info → mindsdb-25.4.2.0.dist-info}/top_level.txt +0 -0
mindsdb/__about__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  __title__ = 'MindsDB'
2
2
  __package_name__ = 'mindsdb'
3
- __version__ = '25.3.4.2'
3
+ __version__ = '25.4.2.0'
4
4
  __description__ = "MindsDB's AI SQL Server enables developers to build AI tools that need access to real-time data to perform their tasks"
5
5
  __email__ = "jorge@mindsdb.com"
6
6
  __author__ = 'MindsDB Inc'
mindsdb/__main__.py CHANGED
@@ -24,7 +24,8 @@ from mindsdb.__about__ import __version__ as mindsdb_version
24
24
  from mindsdb.utilities.config import config
25
25
  from mindsdb.utilities.exception import EntityNotExistsError
26
26
  from mindsdb.utilities.starters import (
27
- start_http, start_mysql, start_mongo, start_postgres, start_ml_task_queue, start_scheduler, start_tasks
27
+ start_http, start_mysql, start_mongo, start_postgres, start_ml_task_queue, start_scheduler, start_tasks,
28
+ start_mcp
28
29
  )
29
30
  from mindsdb.utilities.ps import is_pid_listen_port, get_child_pids
30
31
  from mindsdb.utilities.functions import get_versions_where_predictors_become_obsolete
@@ -57,6 +58,7 @@ class TrunkProcessEnum(Enum):
57
58
  JOBS = 'jobs'
58
59
  TASKS = 'tasks'
59
60
  ML_TASK_QUEUE = 'ml_task_queue'
61
+ MCP = 'mcp'
60
62
 
61
63
  @classmethod
62
64
  def _missing_(cls, value):
@@ -221,9 +223,9 @@ if __name__ == '__main__':
221
223
  ctx.set_default()
222
224
 
223
225
  # ---- CHECK SYSTEM ----
224
- if not (sys.version_info[0] >= 3 and sys.version_info[1] >= 9):
226
+ if not (sys.version_info[0] >= 3 and sys.version_info[1] >= 10):
225
227
  print("""
226
- MindsDB requires Python >= 3.9 to run
228
+ MindsDB requires Python >= 3.10 to run
227
229
 
228
230
  Once you have supported Python version installed you can start mindsdb as follows:
229
231
 
@@ -385,6 +387,7 @@ if __name__ == '__main__':
385
387
 
386
388
  http_api_config = config['api']['http']
387
389
  mysql_api_config = config['api']['mysql']
390
+ mcp_api_config = config['api']['mcp']
388
391
  trunc_processes_struct = {
389
392
  TrunkProcessEnum.HTTP: TrunkProcessData(
390
393
  name=TrunkProcessEnum.HTTP.value,
@@ -434,11 +437,25 @@ if __name__ == '__main__':
434
437
  name=TrunkProcessEnum.ML_TASK_QUEUE.value,
435
438
  entrypoint=start_ml_task_queue,
436
439
  args=(config.cmd_args.verbose,)
440
+ ),
441
+ TrunkProcessEnum.MCP: TrunkProcessData(
442
+ name=TrunkProcessEnum.MCP.value,
443
+ entrypoint=start_mcp,
444
+ port=mcp_api_config.get('port', 47337),
445
+ args=(config.cmd_args.verbose,),
446
+ restart_on_failure=mcp_api_config.get('restart_on_failure', False),
447
+ max_restart_count=mcp_api_config.get('max_restart_count', TrunkProcessData.max_restart_count),
448
+ max_restart_interval_seconds=mcp_api_config.get(
449
+ 'max_restart_interval_seconds', TrunkProcessData.max_restart_interval_seconds
450
+ )
437
451
  )
438
452
  }
439
453
 
440
454
  for api_enum in api_arr:
441
- trunc_processes_struct[api_enum].need_to_run = True
455
+ if api_enum in trunc_processes_struct:
456
+ trunc_processes_struct[api_enum].need_to_run = True
457
+ else:
458
+ logger.error(f"ERROR: {api_enum} API is not a valid api in config")
442
459
 
443
460
  if config['jobs']['disable'] is False:
444
461
  trunc_processes_struct[TrunkProcessEnum.JOBS].need_to_run = True
@@ -164,18 +164,17 @@ class ExecuteCommands:
164
164
  self.datahub = session.datahub
165
165
 
166
166
  @profiler.profile()
167
- def execute_command(self, statement, database_name: str = None) -> ExecuteAnswer:
168
- sql = None
169
- if isinstance(statement, ASTNode):
170
- sql = statement.to_string()
171
- sql_lower = sql.lower()
167
+ def execute_command(self, statement: ASTNode, database_name: str = None) -> ExecuteAnswer:
168
+ sql: str = statement.to_string()
169
+ sql_lower: str = sql.lower()
172
170
 
173
171
  if database_name is None:
174
172
  database_name = self.session.database
175
173
 
176
- if type(statement) is CreateDatabase:
174
+ statement_type = type(statement)
175
+ if statement_type is CreateDatabase:
177
176
  return self.answer_create_database(statement)
178
- elif type(statement) is CreateMLEngine:
177
+ elif statement_type is CreateMLEngine:
179
178
  name = statement.name.parts[-1]
180
179
 
181
180
  return self.answer_create_ml_engine(
@@ -184,16 +183,16 @@ class ExecuteCommands:
184
183
  params=statement.params,
185
184
  if_not_exists=getattr(statement, "if_not_exists", False)
186
185
  )
187
- elif type(statement) is DropMLEngine:
186
+ elif statement_type is DropMLEngine:
188
187
  return self.answer_drop_ml_engine(statement)
189
- elif type(statement) is DropPredictor:
188
+ elif statement_type is DropPredictor:
190
189
  return self.answer_drop_model(statement, database_name)
191
190
 
192
- elif type(statement) is DropTables:
191
+ elif statement_type is DropTables:
193
192
  return self.answer_drop_tables(statement, database_name)
194
- elif type(statement) is DropDatasource or type(statement) is DropDatabase:
193
+ elif statement_type is DropDatasource or statement_type is DropDatabase:
195
194
  return self.answer_drop_database(statement)
196
- elif type(statement) is Describe:
195
+ elif statement_type is Describe:
197
196
  # NOTE in sql 'describe table' is same as 'show columns'
198
197
  obj_type = statement.type
199
198
 
@@ -202,11 +201,11 @@ class ExecuteCommands:
202
201
  else:
203
202
  return self.answer_describe_object(obj_type.upper(), statement.value, database_name)
204
203
 
205
- elif type(statement) is RetrainPredictor:
204
+ elif statement_type is RetrainPredictor:
206
205
  return self.answer_retrain_predictor(statement, database_name)
207
- elif type(statement) is FinetunePredictor:
206
+ elif statement_type is FinetunePredictor:
208
207
  return self.answer_finetune_predictor(statement, database_name)
209
- elif type(statement) is Show:
208
+ elif statement_type is Show:
210
209
  sql_category = statement.category.lower()
211
210
  if hasattr(statement, "modes"):
212
211
  if isinstance(statement.modes, list) is False:
@@ -504,13 +503,13 @@ class ExecuteCommands:
504
503
  return self.answer_select(query)
505
504
  else:
506
505
  raise NotSupportedYet(f"Statement not implemented: {sql}")
507
- elif type(statement) in (
506
+ elif statement_type in (
508
507
  StartTransaction,
509
508
  CommitTransaction,
510
509
  RollbackTransaction,
511
510
  ):
512
511
  return ExecuteAnswer()
513
- elif type(statement) is Set:
512
+ elif statement_type is Set:
514
513
  category = (statement.category or "").lower()
515
514
  if category == "" and isinstance(statement.name, Identifier):
516
515
  param = statement.name.parts[0].lower()
@@ -565,79 +564,84 @@ class ExecuteCommands:
565
564
  f"SQL statement is not processable, return OK package: {sql}"
566
565
  )
567
566
  return ExecuteAnswer()
568
- elif type(statement) is Use:
567
+ elif statement_type is Use:
569
568
  db_name = statement.value.parts[-1]
570
569
  self.change_default_db(db_name)
571
570
  return ExecuteAnswer()
572
- elif type(statement) in (
571
+ elif statement_type in (
573
572
  CreatePredictor,
574
573
  CreateAnomalyDetectionModel, # we may want to specialize these in the future
575
574
  ):
576
575
  return self.answer_create_predictor(statement, database_name)
577
- elif type(statement) is CreateView:
576
+ elif statement_type is CreateView:
578
577
  return self.answer_create_view(statement, database_name)
579
- elif type(statement) is DropView:
578
+ elif statement_type is DropView:
580
579
  return self.answer_drop_view(statement, database_name)
581
- elif type(statement) is Delete:
582
- SQLQuery(statement, session=self.session, execute=True, database=database_name)
583
- return ExecuteAnswer()
584
-
585
- elif type(statement) is Insert:
586
- SQLQuery(statement, session=self.session, execute=True, database=database_name)
587
- return ExecuteAnswer()
588
- elif type(statement) is Update:
589
- SQLQuery(statement, session=self.session, execute=True, database=database_name)
590
- return ExecuteAnswer()
580
+ elif statement_type is Delete:
581
+ query = SQLQuery(statement, session=self.session, database=database_name)
582
+ return ExecuteAnswer(
583
+ affected_rows=query.fetched_data.affected_rows
584
+ )
585
+ elif statement_type is Insert:
586
+ query = SQLQuery(statement, session=self.session, database=database_name)
587
+ return ExecuteAnswer(
588
+ affected_rows=query.fetched_data.affected_rows
589
+ )
590
+ elif statement_type is Update:
591
+ query = SQLQuery(statement, session=self.session, database=database_name)
592
+ return ExecuteAnswer(
593
+ affected_rows=query.fetched_data.affected_rows
594
+ )
591
595
  elif (
592
- type(statement) is Alter
596
+ statement_type is Alter
593
597
  and ("disable keys" in sql_lower)
594
598
  or ("enable keys" in sql_lower)
595
599
  ):
596
600
  return ExecuteAnswer()
597
- elif type(statement) is Select:
601
+ elif statement_type is Select:
598
602
  query = SQLQuery(statement, session=self.session, database=database_name)
599
603
  return self.answer_select(query)
600
- elif type(statement) is Union:
604
+ elif statement_type is Union:
601
605
  query = SQLQuery(statement, session=self.session, database=database_name)
602
606
  return self.answer_select(query)
603
- elif type(statement) is Explain:
607
+ elif statement_type is Explain:
604
608
  return self.answer_show_columns(statement.target, database_name=database_name)
605
- elif type(statement) is CreateTable:
609
+ elif statement_type is CreateTable:
606
610
  return self.answer_create_table(statement, database_name)
607
611
  # -- jobs --
608
- elif type(statement) is CreateJob:
612
+ elif statement_type is CreateJob:
609
613
  return self.answer_create_job(statement, database_name)
610
- elif type(statement) is DropJob:
614
+ elif statement_type is DropJob:
611
615
  return self.answer_drop_job(statement, database_name)
612
616
  # -- triggers --
613
- elif type(statement) is CreateTrigger:
617
+ elif statement_type is CreateTrigger:
614
618
  return self.answer_create_trigger(statement, database_name)
615
- elif type(statement) is DropTrigger:
619
+ elif statement_type is DropTrigger:
616
620
  return self.answer_drop_trigger(statement, database_name)
617
621
  # -- chatbots
618
- elif type(statement) is CreateChatBot:
622
+ elif statement_type is CreateChatBot:
619
623
  return self.answer_create_chatbot(statement, database_name)
620
- elif type(statement) is UpdateChatBot:
624
+ elif statement_type is UpdateChatBot:
621
625
  return self.answer_update_chatbot(statement, database_name)
622
- elif type(statement) is DropChatBot:
626
+ elif statement_type is DropChatBot:
623
627
  return self.answer_drop_chatbot(statement, database_name)
624
- elif type(statement) is CreateKnowledgeBase:
628
+ elif statement_type is CreateKnowledgeBase:
625
629
  return self.answer_create_kb(statement, database_name)
626
- elif type(statement) is DropKnowledgeBase:
630
+ elif statement_type is DropKnowledgeBase:
627
631
  return self.answer_drop_kb(statement, database_name)
628
- elif type(statement) is CreateSkill:
632
+ elif statement_type is CreateSkill:
629
633
  return self.answer_create_skill(statement, database_name)
630
- elif type(statement) is DropSkill:
634
+ elif statement_type is DropSkill:
631
635
  return self.answer_drop_skill(statement, database_name)
632
- elif type(statement) is UpdateSkill:
636
+ elif statement_type is UpdateSkill:
633
637
  return self.answer_update_skill(statement, database_name)
634
- elif type(statement) is CreateAgent:
638
+ elif statement_type is CreateAgent:
635
639
  return self.answer_create_agent(statement, database_name)
636
- elif type(statement) is DropAgent:
640
+ elif statement_type is DropAgent:
637
641
  return self.answer_drop_agent(statement, database_name)
638
- elif type(statement) is UpdateAgent:
642
+ elif statement_type is UpdateAgent:
639
643
  return self.answer_update_agent(statement, database_name)
640
- elif type(statement) is Evaluate:
644
+ elif statement_type is Evaluate:
641
645
  statement.data = parse_sql(statement.query_str)
642
646
  return self.answer_evaluate_metric(statement, database_name)
643
647
  else:
@@ -785,8 +789,7 @@ class ExecuteCommands:
785
789
  raise Exception(
786
790
  f'Nested query failed to execute with error: "{e}", please check and try again.'
787
791
  )
788
- result = sqlquery.fetch('dataframe')
789
- df = result["result"]
792
+ df = sqlquery.fetched_data.to_df()
790
793
  df.columns = [
791
794
  str(t.alias) if hasattr(t, "alias") else str(t.parts[-1])
792
795
  for t in statement.data.targets
@@ -1253,7 +1256,6 @@ class ExecuteCommands:
1253
1256
  project_name = parts[0]
1254
1257
 
1255
1258
  query_str = statement.query_str
1256
- query = parse_sql(query_str)
1257
1259
 
1258
1260
  if isinstance(statement.from_table, Identifier):
1259
1261
  query = Select(
@@ -1263,6 +1265,8 @@ class ExecuteCommands:
1263
1265
  ),
1264
1266
  )
1265
1267
  query_str = str(query)
1268
+ else:
1269
+ query = parse_sql(query_str)
1266
1270
 
1267
1271
  if isinstance(query, Select):
1268
1272
  # check create view sql
@@ -1272,9 +1276,7 @@ class ExecuteCommands:
1272
1276
  query_context_controller.IGNORE_CONTEXT
1273
1277
  )
1274
1278
  try:
1275
- sqlquery = SQLQuery(query, session=self.session, database=database_name)
1276
- if sqlquery.fetch()["success"] is not True:
1277
- raise ExecutorException("Wrong view query")
1279
+ SQLQuery(query, session=self.session, database=database_name)
1278
1280
  finally:
1279
1281
  query_context_controller.release_context(
1280
1282
  query_context_controller.IGNORE_CONTEXT
@@ -1920,9 +1922,8 @@ class ExecuteCommands:
1920
1922
  return ExecuteAnswer()
1921
1923
 
1922
1924
  def answer_select(self, query):
1923
- data = query.fetch()
1924
-
1925
- return ExecuteAnswer(data=data["result"])
1925
+ data = query.fetched_data
1926
+ return ExecuteAnswer(data=data)
1926
1927
 
1927
1928
  def answer_update_model_version(self, model_version, database_name):
1928
1929
  if not isinstance(model_version, Identifier):
@@ -1,16 +1,13 @@
1
- from typing import List
1
+ from dataclasses import dataclass
2
+ from typing import List, Optional
3
+
2
4
  from mindsdb.api.executor.sql_query.result_set import ResultSet
3
5
 
4
6
 
7
+ @dataclass(kw_only=True, slots=True)
5
8
  class ExecuteAnswer:
6
- def __init__(
7
- self,
8
- data: ResultSet = None,
9
- state_track: List[List] = None,
10
- error_code: int = None,
11
- error_message: str = None,
12
- ):
13
- self.data = data
14
- self.state_track = state_track
15
- self.error_code = error_code
16
- self.error_message = error_message
9
+ data: Optional[ResultSet] = None
10
+ state_track: Optional[List[List]] = None
11
+ error_code: Optional[int] = None
12
+ error_message: Optional[str] = None
13
+ affected_rows: Optional[int] = None
@@ -0,0 +1,11 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import Optional, List, Dict
3
+
4
+ import pandas as pd
5
+
6
+
7
+ @dataclass
8
+ class DataHubResponse:
9
+ data_frame: pd.DataFrame = field(default_factory=pd.DataFrame)
10
+ columns: List[Dict] = field(default_factory=list)
11
+ affected_rows: Optional[int] = None
@@ -1,3 +1,6 @@
1
+ from mindsdb.api.executor.datahub.classes.response import DataHubResponse
2
+
3
+
1
4
  class DataNode:
2
5
  type = 'meta'
3
6
 
@@ -10,11 +13,8 @@ class DataNode:
10
13
  def get_tables(self):
11
14
  pass
12
15
 
13
- def has_table(self, tableName):
14
- pass
15
-
16
16
  def get_table_columns(self, tableName, schema_name=None):
17
17
  pass
18
18
 
19
- def query(self, query=None, native_query=None, session=None):
19
+ def query(self, query=None, native_query=None, session=None) -> DataHubResponse:
20
20
  return []
@@ -9,7 +9,7 @@ from mindsdb.api.executor import exceptions as exc
9
9
  from mindsdb.api.executor.utilities.sql import query_df
10
10
  from mindsdb.api.executor.utilities.sql import get_query_tables
11
11
  from mindsdb.interfaces.database.projects import ProjectController
12
-
12
+ from mindsdb.api.executor.datahub.classes.response import DataHubResponse
13
13
  from mindsdb.utilities import log
14
14
 
15
15
  from .system_tables import (
@@ -110,12 +110,6 @@ class InformationSchemaDataNode(DataNode):
110
110
 
111
111
  return None
112
112
 
113
- def has_table(self, tableName):
114
- tn = tableName.upper()
115
- if tn in self.tables:
116
- return True
117
- return False
118
-
119
113
  def get_table_columns(self, tableName, schema_name=None):
120
114
  tn = tableName.upper()
121
115
  if tn in self.tables:
@@ -143,7 +137,7 @@ class InformationSchemaDataNode(DataNode):
143
137
  if table.visible
144
138
  }
145
139
 
146
- def query(self, query: ASTNode, session=None):
140
+ def query(self, query: ASTNode, session=None) -> DataHubResponse:
147
141
  query_tables = [x[1] for x in get_query_tables(query)]
148
142
 
149
143
  if len(query_tables) != 1:
@@ -166,7 +160,11 @@ class InformationSchemaDataNode(DataNode):
166
160
 
167
161
  columns_info = [{"name": k, "type": v} for k, v in data.dtypes.items()]
168
162
 
169
- return data, columns_info
163
+ return DataHubResponse(
164
+ data_frame=data,
165
+ columns=columns_info,
166
+ affected_rows=0
167
+ )
170
168
 
171
169
  def _get_empty_table(self, table):
172
170
  columns = table.columns
@@ -10,16 +10,19 @@ from sqlalchemy.types import (
10
10
  Integer, Float, Text
11
11
  )
12
12
 
13
+ from mindsdb_sql_parser.ast.base import ASTNode
13
14
  from mindsdb_sql_parser.ast import Insert, Identifier, CreateTable, TableColumn, DropTables
14
15
 
16
+ from mindsdb.api.executor.datahub.classes.response import DataHubResponse
15
17
  from mindsdb.api.executor.datahub.datanodes.datanode import DataNode
16
- from mindsdb.api.executor.data_types.response_type import RESPONSE_TYPE
17
18
  from mindsdb.api.executor.datahub.classes.tables_row import TablesRow
19
+ from mindsdb.api.executor.data_types.response_type import RESPONSE_TYPE
18
20
  from mindsdb.api.executor.sql_query.result_set import ResultSet
19
21
  from mindsdb.integrations.utilities.utils import get_class_name
20
22
  from mindsdb.metrics import metrics
21
23
  from mindsdb.utilities import log
22
24
  from mindsdb.utilities.profiler import profiler
25
+ from mindsdb.integrations.libs.response import HandlerResponse
23
26
 
24
27
  logger = log.getLogger(__name__)
25
28
 
@@ -52,9 +55,6 @@ class IntegrationDataNode(DataNode):
52
55
  else:
53
56
  raise Exception(f"Can't get tables: {response.error_message}")
54
57
 
55
- def has_table(self, tableName):
56
- return True
57
-
58
58
  def get_table_columns(self, table_name: str, schema_name: Optional[str] = None):
59
59
  if 'schema_name' in inspect.signature(self.integration_handler.get_columns).parameters:
60
60
  response = self.integration_handler.get_columns(table_name, schema_name)
@@ -107,7 +107,7 @@ class IntegrationDataNode(DataNode):
107
107
  raise Exception(result.error_message)
108
108
 
109
109
  def create_table(self, table_name: Identifier, result_set: ResultSet = None, columns=None,
110
- is_replace=False, is_create=False):
110
+ is_replace=False, is_create=False) -> DataHubResponse:
111
111
  # is_create - create table
112
112
  # is_replace - drop table if exists
113
113
  # is_create==False and is_replace==False: just insert
@@ -164,14 +164,14 @@ class IntegrationDataNode(DataNode):
164
164
 
165
165
  if result_set is None:
166
166
  # it is just a 'create table'
167
- return
167
+ return DataHubResponse()
168
168
 
169
169
  # native insert
170
170
  if hasattr(self.integration_handler, 'insert'):
171
171
  df = result_set.to_df()
172
172
 
173
- self.integration_handler.insert(table_name.parts[-1], df)
174
- return
173
+ result: HandlerResponse = self.integration_handler.insert(table_name.parts[-1], df)
174
+ return DataHubResponse(affected_rows=result.affected_rows)
175
175
 
176
176
  insert_columns = [Identifier(parts=[x.alias]) for x in result_set.columns]
177
177
 
@@ -195,7 +195,7 @@ class IntegrationDataNode(DataNode):
195
195
 
196
196
  if len(values) == 0:
197
197
  # not need to insert
198
- return
198
+ return DataHubResponse()
199
199
 
200
200
  insert_ast = Insert(
201
201
  table=table_name,
@@ -213,7 +213,9 @@ class IntegrationDataNode(DataNode):
213
213
  if result.type == RESPONSE_TYPE.ERROR:
214
214
  raise Exception(result.error_message)
215
215
 
216
- def _query(self, query):
216
+ return DataHubResponse(affected_rows=result.affected_rows)
217
+
218
+ def _query(self, query) -> HandlerResponse:
217
219
  time_before_query = time.perf_counter()
218
220
  result = self.integration_handler.query(query)
219
221
  elapsed_seconds = time.perf_counter() - time_before_query
@@ -229,7 +231,7 @@ class IntegrationDataNode(DataNode):
229
231
  response_size_with_labels.observe(num_rows)
230
232
  return result
231
233
 
232
- def _native_query(self, native_query):
234
+ def _native_query(self, native_query) -> HandlerResponse:
233
235
  time_before_query = time.perf_counter()
234
236
  result = self.integration_handler.native_query(native_query)
235
237
  elapsed_seconds = time.perf_counter() - time_before_query
@@ -246,13 +248,13 @@ class IntegrationDataNode(DataNode):
246
248
  return result
247
249
 
248
250
  @profiler.profile()
249
- def query(self, query=None, native_query=None, session=None):
251
+ def query(self, query: Optional[ASTNode] = None, native_query: Optional[str] = None, session=None) -> DataHubResponse:
250
252
  try:
251
253
  if query is not None:
252
- result = self._query(query)
254
+ result: HandlerResponse = self._query(query)
253
255
  else:
254
256
  # try to fetch native query
255
- result = self._native_query(native_query)
257
+ result: HandlerResponse = self._native_query(native_query)
256
258
  except Exception as e:
257
259
  msg = str(e).strip()
258
260
  if msg == '':
@@ -263,7 +265,7 @@ class IntegrationDataNode(DataNode):
263
265
  if result.type == RESPONSE_TYPE.ERROR:
264
266
  raise Exception(f'Error in {self.integration_name}: {result.error_message}')
265
267
  if result.type == RESPONSE_TYPE.OK:
266
- return pd.DataFrame(), []
268
+ return DataHubResponse(affected_rows=result.affected_rows)
267
269
 
268
270
  df = result.data_frame
269
271
  # region clearing df from NaN values
@@ -286,4 +288,8 @@ class IntegrationDataNode(DataNode):
286
288
  for k, v in df.dtypes.items()
287
289
  ]
288
290
 
289
- return df, columns_info
291
+ return DataHubResponse(
292
+ data_frame=df,
293
+ columns=columns_info,
294
+ affected_rows=result.affected_rows
295
+ )
@@ -14,6 +14,7 @@ from mindsdb_sql_parser.ast import (
14
14
  from mindsdb.utilities.exception import EntityNotExistsError
15
15
  from mindsdb.api.executor.datahub.datanodes.datanode import DataNode
16
16
  from mindsdb.api.executor.datahub.classes.tables_row import TablesRow
17
+ from mindsdb.api.executor.datahub.classes.response import DataHubResponse
17
18
  from mindsdb.utilities.partitioning import process_dataframe_in_partitions
18
19
 
19
20
 
@@ -45,10 +46,6 @@ class ProjectDataNode(DataNode):
45
46
  result = [TablesRow.from_dict(row) for row in tables]
46
47
  return result
47
48
 
48
- def has_table(self, table_name):
49
- tables = self.project.get_tables()
50
- return table_name in tables
51
-
52
49
  def get_table_columns(self, table_name, schema_name=None):
53
50
  return [
54
51
  {'name': name}
@@ -71,7 +68,7 @@ class ProjectDataNode(DataNode):
71
68
 
72
69
  return ml_handler.predict(model_name, df, project_name=self.project.name, version=version, params=params)
73
70
 
74
- def query(self, query=None, native_query=None, session=None):
71
+ def query(self, query=None, native_query=None, session=None) -> DataHubResponse:
75
72
  if query is None and native_query is not None:
76
73
  query = parse_sql(native_query)
77
74
 
@@ -81,7 +78,7 @@ class ProjectDataNode(DataNode):
81
78
  if kb_table:
82
79
  # this is the knowledge db
83
80
  kb_table.update_query(query)
84
- return pd.DataFrame(), []
81
+ return DataHubResponse()
85
82
 
86
83
  raise NotImplementedError(f"Can't update object: {query_table}")
87
84
 
@@ -91,7 +88,7 @@ class ProjectDataNode(DataNode):
91
88
  if kb_table:
92
89
  # this is the knowledge db
93
90
  kb_table.delete_query(query)
94
- return pd.DataFrame(), []
91
+ return DataHubResponse()
95
92
 
96
93
  raise NotImplementedError(f"Can't delete object: {query_table}")
97
94
 
@@ -111,8 +108,7 @@ class ProjectDataNode(DataNode):
111
108
  new_query.where,
112
109
  project_filter
113
110
  ])
114
- df, columns_info = self.information_schema.query(new_query)
115
- return df, columns_info
111
+ return self.information_schema.query(new_query)
116
112
  # endregion
117
113
 
118
114
  # other table from project
@@ -121,15 +117,15 @@ class ProjectDataNode(DataNode):
121
117
  # this is the view
122
118
  df = self.project.query_view(query, session)
123
119
 
124
- columns_info = [
125
- {
126
- 'name': k,
127
- 'type': v
128
- }
129
- for k, v in df.dtypes.items()
130
- ]
120
+ columns_info = [{
121
+ 'name': k,
122
+ 'type': v
123
+ } for k, v in df.dtypes.items()]
131
124
 
132
- return df, columns_info
125
+ return DataHubResponse(
126
+ data_frame=df,
127
+ columns=columns_info
128
+ )
133
129
 
134
130
  kb_table = session.kb_controller.get_table(query_table, self.project.id)
135
131
  if kb_table:
@@ -143,13 +139,16 @@ class ProjectDataNode(DataNode):
143
139
  for k, v in df.dtypes.items()
144
140
  ]
145
141
 
146
- return df, columns_info
142
+ return DataHubResponse(
143
+ data_frame=df,
144
+ columns=columns_info
145
+ )
147
146
 
148
147
  raise EntityNotExistsError(f"Can't select from {query_table} in project")
149
148
  else:
150
149
  raise NotImplementedError(f"Query not supported {query}")
151
150
 
152
- def create_table(self, table_name: Identifier, result_set=None, is_replace=False, **kwargs):
151
+ def create_table(self, table_name: Identifier, result_set=None, is_replace=False, **kwargs) -> DataHubResponse:
153
152
  # is_create - create table
154
153
  # is_replace - drop table if exists
155
154
  # is_create==False and is_replace==False: just insert
@@ -165,5 +164,6 @@ class ProjectDataNode(DataNode):
165
164
  kb_table.clear()
166
165
 
167
166
  df = result_set.to_df()
168
- return kb_table.insert(df)
167
+ kb_table.insert(df)
168
+ return DataHubResponse()
169
169
  raise NotImplementedError(f"Can't create table {table_name}")
@@ -119,7 +119,7 @@ class PlanJoinTablesQuery:
119
119
  query2.from_table = None
120
120
  query2.using = None
121
121
  query2.cte = None
122
- sup_select = QueryStep(query2, from_table=join_step.result)
122
+ sup_select = QueryStep(query2, from_table=join_step.result, strict_where=False)
123
123
  self.planner.plan.add_step(sup_select)
124
124
  return sup_select
125
125
  return join_step