MindsDB 25.9.3rc1__py3-none-any.whl → 25.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of MindsDB might be problematic. Click here for more details.
- mindsdb/__about__.py +1 -1
- mindsdb/__main__.py +1 -9
- mindsdb/api/a2a/__init__.py +1 -1
- mindsdb/api/a2a/agent.py +9 -1
- mindsdb/api/a2a/common/server/server.py +4 -0
- mindsdb/api/a2a/common/server/task_manager.py +8 -1
- mindsdb/api/a2a/common/types.py +66 -0
- mindsdb/api/a2a/task_manager.py +50 -0
- mindsdb/api/common/middleware.py +1 -1
- mindsdb/api/executor/command_executor.py +49 -36
- mindsdb/api/executor/datahub/datanodes/information_schema_datanode.py +7 -13
- mindsdb/api/executor/datahub/datanodes/integration_datanode.py +2 -2
- mindsdb/api/executor/datahub/datanodes/system_tables.py +2 -1
- mindsdb/api/executor/planner/query_prepare.py +2 -20
- mindsdb/api/executor/utilities/sql.py +5 -4
- mindsdb/api/http/initialize.py +76 -60
- mindsdb/api/http/namespaces/agents.py +0 -3
- mindsdb/api/http/namespaces/chatbots.py +0 -5
- mindsdb/api/http/namespaces/file.py +2 -0
- mindsdb/api/http/namespaces/handlers.py +10 -5
- mindsdb/api/http/namespaces/knowledge_bases.py +20 -0
- mindsdb/api/http/namespaces/sql.py +2 -2
- mindsdb/api/http/start.py +2 -2
- mindsdb/api/mysql/mysql_proxy/utilities/dump.py +8 -2
- mindsdb/integrations/handlers/byom_handler/byom_handler.py +2 -10
- mindsdb/integrations/handlers/databricks_handler/databricks_handler.py +98 -46
- mindsdb/integrations/handlers/druid_handler/druid_handler.py +32 -40
- mindsdb/integrations/handlers/gitlab_handler/gitlab_handler.py +5 -2
- mindsdb/integrations/handlers/mssql_handler/mssql_handler.py +438 -100
- mindsdb/integrations/handlers/mssql_handler/requirements_odbc.txt +3 -0
- mindsdb/integrations/handlers/mysql_handler/mysql_handler.py +235 -3
- mindsdb/integrations/handlers/oracle_handler/__init__.py +2 -0
- mindsdb/integrations/handlers/oracle_handler/connection_args.py +7 -1
- mindsdb/integrations/handlers/oracle_handler/oracle_handler.py +321 -16
- mindsdb/integrations/handlers/oracle_handler/requirements.txt +1 -1
- mindsdb/integrations/handlers/postgres_handler/postgres_handler.py +2 -2
- mindsdb/integrations/handlers/shopify_handler/requirements.txt +1 -0
- mindsdb/integrations/handlers/shopify_handler/shopify_handler.py +57 -3
- mindsdb/integrations/handlers/zendesk_handler/zendesk_tables.py +144 -111
- mindsdb/integrations/libs/response.py +2 -2
- mindsdb/integrations/utilities/handlers/auth_utilities/snowflake/__init__.py +1 -0
- mindsdb/integrations/utilities/handlers/auth_utilities/snowflake/snowflake_jwt_gen.py +151 -0
- mindsdb/integrations/utilities/rag/rerankers/base_reranker.py +24 -21
- mindsdb/interfaces/agents/agents_controller.py +0 -2
- mindsdb/interfaces/data_catalog/data_catalog_loader.py +6 -7
- mindsdb/interfaces/data_catalog/data_catalog_reader.py +15 -4
- mindsdb/interfaces/database/data_handlers_cache.py +190 -0
- mindsdb/interfaces/database/database.py +3 -3
- mindsdb/interfaces/database/integrations.py +1 -121
- mindsdb/interfaces/database/projects.py +2 -6
- mindsdb/interfaces/database/views.py +1 -4
- mindsdb/interfaces/jobs/jobs_controller.py +0 -4
- mindsdb/interfaces/jobs/scheduler.py +0 -1
- mindsdb/interfaces/knowledge_base/controller.py +197 -108
- mindsdb/interfaces/knowledge_base/evaluate.py +36 -41
- mindsdb/interfaces/knowledge_base/executor.py +11 -0
- mindsdb/interfaces/knowledge_base/llm_client.py +51 -17
- mindsdb/interfaces/model/model_controller.py +4 -4
- mindsdb/interfaces/skills/custom/text2sql/mindsdb_sql_toolkit.py +4 -10
- mindsdb/interfaces/skills/skills_controller.py +1 -4
- mindsdb/interfaces/storage/db.py +16 -6
- mindsdb/interfaces/triggers/triggers_controller.py +1 -3
- mindsdb/utilities/config.py +19 -2
- mindsdb/utilities/exception.py +2 -2
- mindsdb/utilities/json_encoder.py +24 -10
- mindsdb/utilities/render/sqlalchemy_render.py +15 -14
- mindsdb/utilities/starters.py +0 -10
- {mindsdb-25.9.3rc1.dist-info → mindsdb-25.10.0.dist-info}/METADATA +278 -264
- {mindsdb-25.9.3rc1.dist-info → mindsdb-25.10.0.dist-info}/RECORD +72 -86
- mindsdb/api/postgres/__init__.py +0 -0
- mindsdb/api/postgres/postgres_proxy/__init__.py +0 -0
- mindsdb/api/postgres/postgres_proxy/executor/__init__.py +0 -1
- mindsdb/api/postgres/postgres_proxy/executor/executor.py +0 -182
- mindsdb/api/postgres/postgres_proxy/postgres_packets/__init__.py +0 -0
- mindsdb/api/postgres/postgres_proxy/postgres_packets/errors.py +0 -322
- mindsdb/api/postgres/postgres_proxy/postgres_packets/postgres_fields.py +0 -34
- mindsdb/api/postgres/postgres_proxy/postgres_packets/postgres_message.py +0 -31
- mindsdb/api/postgres/postgres_proxy/postgres_packets/postgres_message_formats.py +0 -1265
- mindsdb/api/postgres/postgres_proxy/postgres_packets/postgres_message_identifiers.py +0 -31
- mindsdb/api/postgres/postgres_proxy/postgres_packets/postgres_packets.py +0 -265
- mindsdb/api/postgres/postgres_proxy/postgres_proxy.py +0 -477
- mindsdb/api/postgres/postgres_proxy/utilities/__init__.py +0 -10
- mindsdb/api/postgres/start.py +0 -11
- mindsdb/integrations/handlers/mssql_handler/tests/__init__.py +0 -0
- mindsdb/integrations/handlers/mssql_handler/tests/test_mssql_handler.py +0 -169
- mindsdb/integrations/handlers/oracle_handler/tests/__init__.py +0 -0
- mindsdb/integrations/handlers/oracle_handler/tests/test_oracle_handler.py +0 -32
- {mindsdb-25.9.3rc1.dist-info → mindsdb-25.10.0.dist-info}/WHEEL +0 -0
- {mindsdb-25.9.3rc1.dist-info → mindsdb-25.10.0.dist-info}/licenses/LICENSE +0 -0
- {mindsdb-25.9.3rc1.dist-info → mindsdb-25.10.0.dist-info}/top_level.txt +0 -0
mindsdb/__about__.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
__title__ = "MindsDB"
|
|
2
2
|
__package_name__ = "mindsdb"
|
|
3
|
-
__version__ = "25.
|
|
3
|
+
__version__ = "25.10.0"
|
|
4
4
|
__description__ = "MindsDB's AI SQL Server enables developers to build AI tools that need access to real-time data to perform their tasks"
|
|
5
5
|
__email__ = "jorge@mindsdb.com"
|
|
6
6
|
__author__ = "MindsDB Inc"
|
mindsdb/__main__.py
CHANGED
|
@@ -27,7 +27,6 @@ from mindsdb.utilities.config import config
|
|
|
27
27
|
from mindsdb.utilities.starters import (
|
|
28
28
|
start_http,
|
|
29
29
|
start_mysql,
|
|
30
|
-
start_postgres,
|
|
31
30
|
start_ml_task_queue,
|
|
32
31
|
start_scheduler,
|
|
33
32
|
start_tasks,
|
|
@@ -58,7 +57,6 @@ _stop_event = threading.Event()
|
|
|
58
57
|
class TrunkProcessEnum(Enum):
|
|
59
58
|
HTTP = "http"
|
|
60
59
|
MYSQL = "mysql"
|
|
61
|
-
POSTGRES = "postgres"
|
|
62
60
|
JOBS = "jobs"
|
|
63
61
|
TASKS = "tasks"
|
|
64
62
|
ML_TASK_QUEUE = "ml_task_queue"
|
|
@@ -447,12 +445,6 @@ if __name__ == "__main__":
|
|
|
447
445
|
"max_restart_interval_seconds", TrunkProcessData.max_restart_interval_seconds
|
|
448
446
|
),
|
|
449
447
|
),
|
|
450
|
-
TrunkProcessEnum.POSTGRES: TrunkProcessData(
|
|
451
|
-
name=TrunkProcessEnum.POSTGRES.value,
|
|
452
|
-
entrypoint=start_postgres,
|
|
453
|
-
port=config["api"]["postgres"]["port"],
|
|
454
|
-
args=(config.cmd_args.verbose,),
|
|
455
|
-
),
|
|
456
448
|
TrunkProcessEnum.JOBS: TrunkProcessData(
|
|
457
449
|
name=TrunkProcessEnum.JOBS.value, entrypoint=start_scheduler, args=(config.cmd_args.verbose,)
|
|
458
450
|
),
|
|
@@ -550,7 +542,7 @@ if __name__ == "__main__":
|
|
|
550
542
|
trunc_process_data.process = None
|
|
551
543
|
if trunc_process_data.name == TrunkProcessEnum.HTTP.value:
|
|
552
544
|
# do not open GUI on HTTP API restart
|
|
553
|
-
trunc_process_data.args = (config.cmd_args.verbose, True)
|
|
545
|
+
trunc_process_data.args = (config.cmd_args.verbose, None, True)
|
|
554
546
|
start_process(trunc_process_data)
|
|
555
547
|
api_name, port, started = await wait_api_start(
|
|
556
548
|
trunc_process_data.name,
|
mindsdb/api/a2a/__init__.py
CHANGED
|
@@ -33,7 +33,7 @@ def get_a2a_app(
|
|
|
33
33
|
agent_card = AgentCard(
|
|
34
34
|
name="MindsDB Agent Connector",
|
|
35
35
|
description=(f"A2A connector that proxies requests to MindsDB agents in project '{project_name}'."),
|
|
36
|
-
url=f"http://127.0.0.1:{mindsdb_port}",
|
|
36
|
+
url=f"http://127.0.0.1:{mindsdb_port}/a2a/",
|
|
37
37
|
version="1.0.0",
|
|
38
38
|
defaultInputModes=MindsDBAgent.SUPPORTED_CONTENT_TYPES,
|
|
39
39
|
defaultOutputModes=MindsDBAgent.SUPPORTED_CONTENT_TYPES,
|
mindsdb/api/a2a/agent.py
CHANGED
|
@@ -24,7 +24,15 @@ class MindsDBAgent:
|
|
|
24
24
|
self.agent_name = agent_name
|
|
25
25
|
self.project_name = project_name
|
|
26
26
|
port = config.get("api", {}).get("http", {}).get("port", 47334)
|
|
27
|
-
|
|
27
|
+
host = config.get("api", {}).get("http", {}).get("host", "127.0.0.1")
|
|
28
|
+
|
|
29
|
+
# Use 127.0.0.1 instead of localhost for better compatibility
|
|
30
|
+
if host in ("0.0.0.0", ""):
|
|
31
|
+
url = f"http://127.0.0.1:{port}/"
|
|
32
|
+
else:
|
|
33
|
+
url = f"http://{host}:{port}/"
|
|
34
|
+
|
|
35
|
+
self.base_url = url
|
|
28
36
|
self.agent_url = f"{self.base_url}/api/projects/{project_name}/agents/{agent_name}"
|
|
29
37
|
self.sql_url = f"{self.base_url}/api/sql/query"
|
|
30
38
|
self.headers = {k: v for k, v in user_info.items() if v is not None} or {}
|
|
@@ -22,6 +22,7 @@ from ...common.types import (
|
|
|
22
22
|
AgentCard,
|
|
23
23
|
TaskResubscriptionRequest,
|
|
24
24
|
SendTaskStreamingRequest,
|
|
25
|
+
MessageStreamRequest,
|
|
25
26
|
)
|
|
26
27
|
from pydantic import ValidationError
|
|
27
28
|
from ...common.server.task_manager import TaskManager
|
|
@@ -43,6 +44,7 @@ class A2AServer:
|
|
|
43
44
|
routes=[
|
|
44
45
|
Route("/", self._process_request, methods=["POST"]),
|
|
45
46
|
Route("/.well-known/agent.json", self._get_agent_card, methods=["GET"]),
|
|
47
|
+
Route("/.well-known/agent-card.json", self._get_agent_card, methods=["GET"]),
|
|
46
48
|
Route("/status", self._get_status, methods=["GET"]),
|
|
47
49
|
]
|
|
48
50
|
)
|
|
@@ -103,6 +105,8 @@ class A2AServer:
|
|
|
103
105
|
result = await self.task_manager.on_get_task_push_notification(json_rpc_request)
|
|
104
106
|
elif isinstance(json_rpc_request, TaskResubscriptionRequest):
|
|
105
107
|
result = await self.task_manager.on_resubscribe_to_task(json_rpc_request)
|
|
108
|
+
elif isinstance(json_rpc_request, MessageStreamRequest):
|
|
109
|
+
result = await self.task_manager.on_message_stream(json_rpc_request, user_info)
|
|
106
110
|
else:
|
|
107
111
|
logger.warning(f"Unexpected request type: {type(json_rpc_request)}")
|
|
108
112
|
raise ValueError(f"Unexpected request type: {type(request)}")
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from abc import ABC, abstractmethod
|
|
2
2
|
from typing import Union, AsyncIterable, List, Dict
|
|
3
|
-
from ...common.types import Task
|
|
4
3
|
from ...common.types import (
|
|
4
|
+
Task,
|
|
5
5
|
JSONRPCResponse,
|
|
6
6
|
TaskIdParams,
|
|
7
7
|
TaskQueryParams,
|
|
@@ -29,6 +29,7 @@ from ...common.types import (
|
|
|
29
29
|
JSONRPCError,
|
|
30
30
|
TaskPushNotificationConfig,
|
|
31
31
|
InternalError,
|
|
32
|
+
MessageStreamRequest,
|
|
32
33
|
)
|
|
33
34
|
from ...common.server.utils import new_not_implemented_error
|
|
34
35
|
from mindsdb.utilities import log
|
|
@@ -74,6 +75,12 @@ class TaskManager(ABC):
|
|
|
74
75
|
) -> Union[AsyncIterable[SendTaskResponse], JSONRPCResponse]:
|
|
75
76
|
pass
|
|
76
77
|
|
|
78
|
+
@abstractmethod
|
|
79
|
+
async def on_message_stream(
|
|
80
|
+
self, request: MessageStreamRequest, user_info: Dict
|
|
81
|
+
) -> Union[AsyncIterable[SendTaskStreamingResponse], JSONRPCResponse]:
|
|
82
|
+
pass
|
|
83
|
+
|
|
77
84
|
|
|
78
85
|
class InMemoryTaskManager(TaskManager):
|
|
79
86
|
def __init__(self):
|
mindsdb/api/a2a/common/types.py
CHANGED
|
@@ -59,6 +59,47 @@ class Message(BaseModel):
|
|
|
59
59
|
parts: List[Part]
|
|
60
60
|
metadata: dict[str, Any] | None = None
|
|
61
61
|
history: Optional[List["Message"]] = None
|
|
62
|
+
messageId: str | None = None
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class FlexibleMessage(BaseModel):
|
|
66
|
+
"""Message that can handle both 'type' and 'kind' in parts."""
|
|
67
|
+
|
|
68
|
+
role: Literal["user", "agent", "assistant"]
|
|
69
|
+
parts: List[dict[str, Any]] # Raw parts that we'll process manually
|
|
70
|
+
metadata: dict[str, Any] | None = None
|
|
71
|
+
history: Optional[List["FlexibleMessage"]] = None
|
|
72
|
+
|
|
73
|
+
@model_validator(mode="after")
|
|
74
|
+
def normalize_parts(self):
|
|
75
|
+
"""Convert parts with 'kind' to parts with 'type'."""
|
|
76
|
+
normalized_parts = []
|
|
77
|
+
for part in self.parts:
|
|
78
|
+
if isinstance(part, dict):
|
|
79
|
+
# Convert 'kind' to 'type' if needed
|
|
80
|
+
if "kind" in part and "type" not in part:
|
|
81
|
+
normalized_part = part.copy()
|
|
82
|
+
normalized_part["type"] = normalized_part.pop("kind")
|
|
83
|
+
else:
|
|
84
|
+
normalized_part = part
|
|
85
|
+
|
|
86
|
+
# Validate the normalized part
|
|
87
|
+
try:
|
|
88
|
+
if normalized_part.get("type") == "text":
|
|
89
|
+
normalized_parts.append(TextPart.model_validate(normalized_part))
|
|
90
|
+
elif normalized_part.get("type") == "file":
|
|
91
|
+
normalized_parts.append(FilePart.model_validate(normalized_part))
|
|
92
|
+
elif normalized_part.get("type") == "data":
|
|
93
|
+
normalized_parts.append(DataPart.model_validate(normalized_part))
|
|
94
|
+
else:
|
|
95
|
+
raise ValueError(f"Unknown part type: {normalized_part.get('type')}")
|
|
96
|
+
except Exception as e:
|
|
97
|
+
raise ValueError(f"Invalid part: {normalized_part}, error: {e}")
|
|
98
|
+
else:
|
|
99
|
+
normalized_parts.append(part)
|
|
100
|
+
|
|
101
|
+
self.parts = normalized_parts
|
|
102
|
+
return self
|
|
62
103
|
|
|
63
104
|
|
|
64
105
|
class TaskStatus(BaseModel):
|
|
@@ -88,6 +129,7 @@ class Task(BaseModel):
|
|
|
88
129
|
artifacts: List[Artifact] | None = None
|
|
89
130
|
history: List[Message] | None = None
|
|
90
131
|
metadata: dict[str, Any] | None = None
|
|
132
|
+
contextId: str | None = None
|
|
91
133
|
|
|
92
134
|
|
|
93
135
|
class TaskStatusUpdateEvent(BaseModel):
|
|
@@ -95,12 +137,16 @@ class TaskStatusUpdateEvent(BaseModel):
|
|
|
95
137
|
status: TaskStatus
|
|
96
138
|
final: bool = False
|
|
97
139
|
metadata: dict[str, Any] | None = None
|
|
140
|
+
contextId: str | None = None
|
|
141
|
+
taskId: str | None = None
|
|
98
142
|
|
|
99
143
|
|
|
100
144
|
class TaskArtifactUpdateEvent(BaseModel):
|
|
101
145
|
id: str
|
|
102
146
|
artifact: Artifact
|
|
103
147
|
metadata: dict[str, Any] | None = None
|
|
148
|
+
contextId: str | None = None
|
|
149
|
+
taskId: str | None = None
|
|
104
150
|
|
|
105
151
|
|
|
106
152
|
class AuthenticationInfo(BaseModel):
|
|
@@ -182,6 +228,25 @@ class SendTaskStreamingResponse(JSONRPCResponse):
|
|
|
182
228
|
result: TaskStatusUpdateEvent | TaskArtifactUpdateEvent | None = None
|
|
183
229
|
|
|
184
230
|
|
|
231
|
+
class MessageStreamParams(BaseModel):
|
|
232
|
+
sessionId: str = Field(default_factory=lambda: uuid4().hex)
|
|
233
|
+
message: FlexibleMessage
|
|
234
|
+
metadata: dict[str, Any] | None = None
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
class MessageStreamRequest(JSONRPCRequest):
|
|
238
|
+
method: Literal["message/stream"] = "message/stream"
|
|
239
|
+
params: MessageStreamParams
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
class MessageStreamResponse(JSONRPCResponse):
|
|
243
|
+
result: Message | None = None
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
class SendStreamingMessageSuccessResponse(JSONRPCResponse):
|
|
247
|
+
result: Union[Task, TaskStatusUpdateEvent, TaskArtifactUpdateEvent] | None = None
|
|
248
|
+
|
|
249
|
+
|
|
185
250
|
class GetTaskRequest(JSONRPCRequest):
|
|
186
251
|
method: Literal["tasks/get"] = "tasks/get"
|
|
187
252
|
params: TaskQueryParams
|
|
@@ -233,6 +298,7 @@ A2ARequest = TypeAdapter(
|
|
|
233
298
|
GetTaskPushNotificationRequest,
|
|
234
299
|
TaskResubscriptionRequest,
|
|
235
300
|
SendTaskStreamingRequest,
|
|
301
|
+
MessageStreamRequest,
|
|
236
302
|
],
|
|
237
303
|
Field(discriminator="method"),
|
|
238
304
|
]
|
mindsdb/api/a2a/task_manager.py
CHANGED
|
@@ -19,10 +19,13 @@ from mindsdb.api.a2a.common.types import (
|
|
|
19
19
|
SendTaskStreamingRequest,
|
|
20
20
|
SendTaskStreamingResponse,
|
|
21
21
|
InvalidRequestError,
|
|
22
|
+
MessageStreamRequest,
|
|
23
|
+
SendStreamingMessageSuccessResponse,
|
|
22
24
|
)
|
|
23
25
|
from mindsdb.api.a2a.common.server.task_manager import InMemoryTaskManager
|
|
24
26
|
from mindsdb.api.a2a.agent import MindsDBAgent
|
|
25
27
|
from mindsdb.api.a2a.utils import to_serializable, convert_a2a_message_to_qa_format
|
|
28
|
+
from mindsdb.interfaces.agents.agents_controller import AgentsController
|
|
26
29
|
|
|
27
30
|
|
|
28
31
|
logger = logging.getLogger(__name__)
|
|
@@ -472,3 +475,50 @@ class AgentTaskManager(InMemoryTaskManager):
|
|
|
472
475
|
[Artifact(parts=parts)],
|
|
473
476
|
)
|
|
474
477
|
return to_serializable(SendTaskResponse(id=request.id, result=task))
|
|
478
|
+
|
|
479
|
+
async def on_message_stream(
|
|
480
|
+
self, request: MessageStreamRequest, user_info: Dict
|
|
481
|
+
) -> Union[AsyncIterable[SendStreamingMessageSuccessResponse], JSONRPCResponse]:
|
|
482
|
+
"""
|
|
483
|
+
Handle message streaming requests.
|
|
484
|
+
"""
|
|
485
|
+
logger.info(f"Processing message stream request for session {request.params.sessionId}")
|
|
486
|
+
|
|
487
|
+
query = self._get_user_query(request.params)
|
|
488
|
+
params = self._get_task_params(request.params)
|
|
489
|
+
|
|
490
|
+
try:
|
|
491
|
+
task_id = f"msg_stream_{request.params.sessionId}_{request.id}"
|
|
492
|
+
context_id = f"ctx_{request.params.sessionId}"
|
|
493
|
+
message_id = f"msg_{request.id}"
|
|
494
|
+
|
|
495
|
+
agents_controller = AgentsController()
|
|
496
|
+
existing_agent = agents_controller.get_agent(params["agent_name"])
|
|
497
|
+
resp = agents_controller.get_completion(existing_agent, [{"question": query}])
|
|
498
|
+
response_message = resp["answer"][0]
|
|
499
|
+
|
|
500
|
+
response_message = Message(
|
|
501
|
+
role="agent", parts=[{"type": "text", "text": response_message}], metadata={}, messageId=message_id
|
|
502
|
+
)
|
|
503
|
+
|
|
504
|
+
task_status = TaskStatus(state=TaskState.COMPLETED, message=response_message)
|
|
505
|
+
|
|
506
|
+
task_status_update = TaskStatusUpdateEvent(
|
|
507
|
+
id=task_id,
|
|
508
|
+
status=task_status,
|
|
509
|
+
final=True,
|
|
510
|
+
metadata={"message_stream": True},
|
|
511
|
+
contextId=context_id,
|
|
512
|
+
taskId=task_id,
|
|
513
|
+
)
|
|
514
|
+
|
|
515
|
+
async def message_stream_generator():
|
|
516
|
+
yield to_serializable(SendStreamingMessageSuccessResponse(id=request.id, result=task_status_update))
|
|
517
|
+
|
|
518
|
+
return message_stream_generator()
|
|
519
|
+
|
|
520
|
+
except Exception as e:
|
|
521
|
+
logger.error(f"Error processing message stream: {e}")
|
|
522
|
+
return SendStreamingMessageSuccessResponse(
|
|
523
|
+
id=request.id, error=InternalError(message=f"Error processing message stream: {str(e)}")
|
|
524
|
+
)
|
mindsdb/api/common/middleware.py
CHANGED
|
@@ -75,7 +75,7 @@ class PATAuthMiddleware(BaseHTTPMiddleware):
|
|
|
75
75
|
return await call_next(request)
|
|
76
76
|
|
|
77
77
|
|
|
78
|
-
# Used by mysql
|
|
78
|
+
# Used by mysql protocol
|
|
79
79
|
def check_auth(username, password, scramble_func, salt, company_id, config):
|
|
80
80
|
try:
|
|
81
81
|
hardcoded_user = config["auth"].get("username")
|
|
@@ -49,6 +49,7 @@ from mindsdb_sql_parser.ast.mindsdb import (
|
|
|
49
49
|
CreateDatabase,
|
|
50
50
|
CreateJob,
|
|
51
51
|
CreateKnowledgeBase,
|
|
52
|
+
AlterKnowledgeBase,
|
|
52
53
|
CreateMLEngine,
|
|
53
54
|
CreatePredictor,
|
|
54
55
|
CreateSkill,
|
|
@@ -196,11 +197,13 @@ def match_two_part_name(
|
|
|
196
197
|
ValueError: If the identifier does not contain one or two parts, or if ensure_lower_case is True and the name is not lowercase.
|
|
197
198
|
"""
|
|
198
199
|
db_name = None
|
|
200
|
+
|
|
199
201
|
match identifier.parts, identifier.is_quoted:
|
|
200
202
|
case [name], [is_quoted]:
|
|
201
203
|
...
|
|
202
|
-
case [db_name, name], [
|
|
203
|
-
|
|
204
|
+
case [db_name, name], [db_is_quoted, is_quoted]:
|
|
205
|
+
if not db_is_quoted:
|
|
206
|
+
db_name = db_name.lower()
|
|
204
207
|
case _:
|
|
205
208
|
raise ValueError(f"Only single-part or two-part names are allowed: {identifier}")
|
|
206
209
|
if not is_quoted:
|
|
@@ -655,6 +658,8 @@ class ExecuteCommands:
|
|
|
655
658
|
return self.answer_drop_chatbot(statement, database_name)
|
|
656
659
|
elif statement_type is CreateKnowledgeBase:
|
|
657
660
|
return self.answer_create_kb(statement, database_name)
|
|
661
|
+
elif statement_type is AlterKnowledgeBase:
|
|
662
|
+
return self.answer_alter_kb(statement, database_name)
|
|
658
663
|
elif statement_type is DropKnowledgeBase:
|
|
659
664
|
return self.answer_drop_kb(statement, database_name)
|
|
660
665
|
elif statement_type is CreateSkill:
|
|
@@ -710,9 +715,7 @@ class ExecuteCommands:
|
|
|
710
715
|
|
|
711
716
|
def answer_create_trigger(self, statement, database_name):
|
|
712
717
|
triggers_controller = TriggersController()
|
|
713
|
-
project_name, trigger_name = match_two_part_name(
|
|
714
|
-
statement.name, ensure_lower_case=True, default_db_name=database_name
|
|
715
|
-
)
|
|
718
|
+
project_name, trigger_name = match_two_part_name(statement.name, default_db_name=database_name)
|
|
716
719
|
|
|
717
720
|
triggers_controller.add(
|
|
718
721
|
trigger_name,
|
|
@@ -726,9 +729,7 @@ class ExecuteCommands:
|
|
|
726
729
|
def answer_drop_trigger(self, statement, database_name):
|
|
727
730
|
triggers_controller = TriggersController()
|
|
728
731
|
|
|
729
|
-
|
|
730
|
-
trigger_name = statement.name.parts[-1]
|
|
731
|
-
project_name = name.parts[-2] if len(name.parts) > 1 else database_name
|
|
732
|
+
project_name, trigger_name = match_two_part_name(statement.name, default_db_name=database_name)
|
|
732
733
|
|
|
733
734
|
triggers_controller.delete(trigger_name, project_name)
|
|
734
735
|
|
|
@@ -736,9 +737,7 @@ class ExecuteCommands:
|
|
|
736
737
|
|
|
737
738
|
def answer_create_job(self, statement: CreateJob, database_name):
|
|
738
739
|
jobs_controller = JobsController()
|
|
739
|
-
project_name, job_name = match_two_part_name(
|
|
740
|
-
statement.name, ensure_lower_case=True, default_db_name=database_name
|
|
741
|
-
)
|
|
740
|
+
project_name, job_name = match_two_part_name(statement.name, default_db_name=database_name)
|
|
742
741
|
|
|
743
742
|
try:
|
|
744
743
|
jobs_controller.create(job_name, project_name, statement)
|
|
@@ -762,7 +761,7 @@ class ExecuteCommands:
|
|
|
762
761
|
|
|
763
762
|
def answer_create_chatbot(self, statement, database_name):
|
|
764
763
|
chatbot_controller = ChatBotController()
|
|
765
|
-
project_name, name = match_two_part_name(statement.name,
|
|
764
|
+
project_name, name = match_two_part_name(statement.name, default_db_name=database_name)
|
|
766
765
|
|
|
767
766
|
is_running = statement.params.pop("is_running", True)
|
|
768
767
|
|
|
@@ -794,9 +793,7 @@ class ExecuteCommands:
|
|
|
794
793
|
def answer_update_chatbot(self, statement, database_name):
|
|
795
794
|
chatbot_controller = ChatBotController()
|
|
796
795
|
|
|
797
|
-
name = statement.name
|
|
798
|
-
name_no_project = name.parts[-1]
|
|
799
|
-
project_name = name.parts[-2] if len(name.parts) > 1 else database_name
|
|
796
|
+
project_name, name = match_two_part_name(statement.name, default_db_name=database_name)
|
|
800
797
|
|
|
801
798
|
# From SET keyword parameters
|
|
802
799
|
updated_name = statement.params.pop("name", None)
|
|
@@ -813,7 +810,7 @@ class ExecuteCommands:
|
|
|
813
810
|
database_id = database["id"]
|
|
814
811
|
|
|
815
812
|
updated_chatbot = chatbot_controller.update_chatbot(
|
|
816
|
-
|
|
813
|
+
name,
|
|
817
814
|
project_name=project_name,
|
|
818
815
|
name=updated_name,
|
|
819
816
|
model_name=model_name,
|
|
@@ -823,16 +820,15 @@ class ExecuteCommands:
|
|
|
823
820
|
params=statement.params,
|
|
824
821
|
)
|
|
825
822
|
if updated_chatbot is None:
|
|
826
|
-
raise ExecutorException(f"Chatbot with name {
|
|
823
|
+
raise ExecutorException(f"Chatbot with name {name} not found")
|
|
827
824
|
return ExecuteAnswer()
|
|
828
825
|
|
|
829
826
|
def answer_drop_chatbot(self, statement, database_name):
|
|
830
827
|
chatbot_controller = ChatBotController()
|
|
831
828
|
|
|
832
|
-
name = statement.name
|
|
833
|
-
project_name = name.parts[-2] if len(name.parts) > 1 else database_name
|
|
829
|
+
project_name, name = match_two_part_name(statement.name, default_db_name=database_name)
|
|
834
830
|
|
|
835
|
-
chatbot_controller.delete_chatbot(name
|
|
831
|
+
chatbot_controller.delete_chatbot(name, project_name=project_name)
|
|
836
832
|
return ExecuteAnswer()
|
|
837
833
|
|
|
838
834
|
def answer_evaluate_metric(self, statement, database_name):
|
|
@@ -972,6 +968,10 @@ class ExecuteCommands:
|
|
|
972
968
|
identifier.is_quoted = [False] + identifier.is_quoted
|
|
973
969
|
|
|
974
970
|
database_name, model_name, model_version = resolve_model_identifier(identifier)
|
|
971
|
+
# at least two part in identifier
|
|
972
|
+
identifier.parts[0] = database_name
|
|
973
|
+
identifier.parts[1] = model_name
|
|
974
|
+
|
|
975
975
|
if database_name is None:
|
|
976
976
|
database_name = database_name
|
|
977
977
|
|
|
@@ -1159,7 +1159,7 @@ class ExecuteCommands:
|
|
|
1159
1159
|
Raises:
|
|
1160
1160
|
ValueError: If the ml_engine name format is invalid.
|
|
1161
1161
|
"""
|
|
1162
|
-
name = match_one_part_name(statement.name
|
|
1162
|
+
name = match_one_part_name(statement.name)
|
|
1163
1163
|
|
|
1164
1164
|
handler = statement.handler
|
|
1165
1165
|
params = statement.params
|
|
@@ -1245,7 +1245,7 @@ class ExecuteCommands:
|
|
|
1245
1245
|
Returns:
|
|
1246
1246
|
ExecuteAnswer: 'ok' answer
|
|
1247
1247
|
"""
|
|
1248
|
-
database_name = match_one_part_name(statement.name
|
|
1248
|
+
database_name = match_one_part_name(statement.name)
|
|
1249
1249
|
|
|
1250
1250
|
engine = (statement.engine or "mindsdb").lower()
|
|
1251
1251
|
|
|
@@ -1334,9 +1334,7 @@ class ExecuteCommands:
|
|
|
1334
1334
|
Returns:
|
|
1335
1335
|
ExecuteAnswer: answer for the command
|
|
1336
1336
|
"""
|
|
1337
|
-
project_name, view_name = match_two_part_name(
|
|
1338
|
-
statement.name, default_db_name=database_name, ensure_lower_case=isinstance(statement, CreateView)
|
|
1339
|
-
)
|
|
1337
|
+
project_name, view_name = match_two_part_name(statement.name, default_db_name=database_name)
|
|
1340
1338
|
|
|
1341
1339
|
query_str = statement.query_str
|
|
1342
1340
|
|
|
@@ -1388,10 +1386,15 @@ class ExecuteCommands:
|
|
|
1388
1386
|
case _:
|
|
1389
1387
|
raise ValueError(f"Invalid view name: {name}")
|
|
1390
1388
|
|
|
1389
|
+
if not db_name_quoted:
|
|
1390
|
+
database_name = database_name.lower()
|
|
1391
|
+
if not view_name_quoted:
|
|
1392
|
+
view_name = view_name.lower()
|
|
1393
|
+
|
|
1391
1394
|
project = self.session.database_controller.get_project(database_name, db_name_quoted)
|
|
1392
1395
|
|
|
1393
1396
|
try:
|
|
1394
|
-
project.drop_view(view_name, strict_case=
|
|
1397
|
+
project.drop_view(view_name, strict_case=True)
|
|
1395
1398
|
except EntityNotExistsError:
|
|
1396
1399
|
if statement.if_exists is not True:
|
|
1397
1400
|
raise
|
|
@@ -1405,9 +1408,7 @@ class ExecuteCommands:
|
|
|
1405
1408
|
"Please pass the model parameters as a JSON object in the embedding_model field."
|
|
1406
1409
|
)
|
|
1407
1410
|
|
|
1408
|
-
project_name, kb_name = match_two_part_name(
|
|
1409
|
-
statement.name, ensure_lower_case=True, default_db_name=database_name
|
|
1410
|
-
)
|
|
1411
|
+
project_name, kb_name = match_two_part_name(statement.name, default_db_name=database_name)
|
|
1411
1412
|
|
|
1412
1413
|
if statement.storage is not None:
|
|
1413
1414
|
if len(statement.storage.parts) != 2:
|
|
@@ -1431,6 +1432,20 @@ class ExecuteCommands:
|
|
|
1431
1432
|
|
|
1432
1433
|
return ExecuteAnswer()
|
|
1433
1434
|
|
|
1435
|
+
def answer_alter_kb(self, statement: AlterKnowledgeBase, database_name: str):
|
|
1436
|
+
project_name, kb_name = match_two_part_name(
|
|
1437
|
+
statement.name, ensure_lower_case=True, default_db_name=database_name
|
|
1438
|
+
)
|
|
1439
|
+
|
|
1440
|
+
# update the knowledge base
|
|
1441
|
+
self.session.kb_controller.update(
|
|
1442
|
+
name=kb_name,
|
|
1443
|
+
project_name=project_name,
|
|
1444
|
+
params=statement.params,
|
|
1445
|
+
)
|
|
1446
|
+
|
|
1447
|
+
return ExecuteAnswer()
|
|
1448
|
+
|
|
1434
1449
|
def answer_drop_kb(self, statement: DropKnowledgeBase, database_name: str) -> ExecuteAnswer:
|
|
1435
1450
|
project_name, kb_name = match_two_part_name(statement.name, default_db_name=database_name)
|
|
1436
1451
|
|
|
@@ -1444,7 +1459,7 @@ class ExecuteCommands:
|
|
|
1444
1459
|
return ExecuteAnswer()
|
|
1445
1460
|
|
|
1446
1461
|
def answer_create_skill(self, statement, database_name):
|
|
1447
|
-
project_name, name = match_two_part_name(statement.name,
|
|
1462
|
+
project_name, name = match_two_part_name(statement.name, default_db_name=database_name)
|
|
1448
1463
|
|
|
1449
1464
|
try:
|
|
1450
1465
|
_ = self.session.skills_controller.add_skill(name, project_name, statement.type, statement.params)
|
|
@@ -1480,7 +1495,7 @@ class ExecuteCommands:
|
|
|
1480
1495
|
return ExecuteAnswer()
|
|
1481
1496
|
|
|
1482
1497
|
def answer_create_agent(self, statement, database_name):
|
|
1483
|
-
project_name, name = match_two_part_name(statement.name,
|
|
1498
|
+
project_name, name = match_two_part_name(statement.name, default_db_name=database_name)
|
|
1484
1499
|
|
|
1485
1500
|
skills = statement.params.pop("skills", [])
|
|
1486
1501
|
provider = statement.params.pop("provider", None)
|
|
@@ -1536,11 +1551,9 @@ class ExecuteCommands:
|
|
|
1536
1551
|
|
|
1537
1552
|
@mark_process("learn")
|
|
1538
1553
|
def answer_create_predictor(self, statement: CreatePredictor, database_name: str):
|
|
1539
|
-
integration_name, model_name = match_two_part_name(
|
|
1540
|
-
statement.name, ensure_lower_case=True, default_db_name=database_name
|
|
1541
|
-
)
|
|
1554
|
+
integration_name, model_name = match_two_part_name(statement.name, default_db_name=database_name)
|
|
1542
1555
|
|
|
1543
|
-
statement.name.parts = [integration_name
|
|
1556
|
+
statement.name.parts = [integration_name, model_name]
|
|
1544
1557
|
statement.name.is_quoted = [False, False]
|
|
1545
1558
|
|
|
1546
1559
|
ml_integration_name = "lightwood" # default
|
|
@@ -2016,7 +2029,7 @@ class ExecuteCommands:
|
|
|
2016
2029
|
else:
|
|
2017
2030
|
# drop model
|
|
2018
2031
|
try:
|
|
2019
|
-
project = self.session.database_controller.get_project(project_name)
|
|
2032
|
+
project = self.session.database_controller.get_project(project_name, strict_case=True)
|
|
2020
2033
|
project.drop_model(model_name)
|
|
2021
2034
|
except Exception as e:
|
|
2022
2035
|
if not statement.if_exists:
|
|
@@ -94,17 +94,7 @@ class InformationSchemaDataNode(DataNode):
|
|
|
94
94
|
self.integration_controller = session.integration_controller
|
|
95
95
|
self.project_controller = ProjectController()
|
|
96
96
|
self.database_controller = session.database_controller
|
|
97
|
-
|
|
98
|
-
self.persis_datanodes = {"log": self.database_controller.logs_db_controller}
|
|
99
|
-
|
|
100
|
-
databases = self.database_controller.get_dict()
|
|
101
|
-
if "files" in databases:
|
|
102
|
-
self.persis_datanodes["files"] = IntegrationDataNode(
|
|
103
|
-
"files",
|
|
104
|
-
ds_type="file",
|
|
105
|
-
integration_controller=self.session.integration_controller,
|
|
106
|
-
)
|
|
107
|
-
|
|
97
|
+
self.persist_datanodes_names = ("log", "files")
|
|
108
98
|
self.tables = {t.name: t for t in self.tables_list}
|
|
109
99
|
|
|
110
100
|
def __getitem__(self, key):
|
|
@@ -119,8 +109,12 @@ class InformationSchemaDataNode(DataNode):
|
|
|
119
109
|
if name_lower == "log":
|
|
120
110
|
return self.database_controller.get_system_db("log")
|
|
121
111
|
|
|
122
|
-
if name_lower
|
|
123
|
-
return
|
|
112
|
+
if name_lower == "files":
|
|
113
|
+
return IntegrationDataNode(
|
|
114
|
+
"files",
|
|
115
|
+
ds_type="file",
|
|
116
|
+
integration_controller=self.session.integration_controller,
|
|
117
|
+
)
|
|
124
118
|
|
|
125
119
|
existing_databases_meta = self.database_controller.get_dict() # filter_type='project'
|
|
126
120
|
database_name = None
|
|
@@ -124,7 +124,7 @@ class IntegrationDataNode(DataNode):
|
|
|
124
124
|
df.columns = [name.upper() for name in df.columns]
|
|
125
125
|
if "FIELD" not in df.columns or "TYPE" not in df.columns:
|
|
126
126
|
logger.warning(
|
|
127
|
-
f"Response from the handler's `get_columns` call does not contain required columns:
|
|
127
|
+
f"Response from the handler's `get_columns` call does not contain required columns: {list(df.columns)}"
|
|
128
128
|
)
|
|
129
129
|
return pd.DataFrame([], columns=astuple(INF_SCHEMA_COLUMNS_NAMES))
|
|
130
130
|
|
|
@@ -294,7 +294,7 @@ class IntegrationDataNode(DataNode):
|
|
|
294
294
|
db_type=self.integration_handler.__class__.name,
|
|
295
295
|
db_error_msg=result.error_message,
|
|
296
296
|
failed_query=query_str,
|
|
297
|
-
|
|
297
|
+
is_expected=result.is_expected_error,
|
|
298
298
|
)
|
|
299
299
|
|
|
300
300
|
if result.exception is None:
|
|
@@ -102,9 +102,10 @@ class TablesTable(Table):
|
|
|
102
102
|
row = TablesRow(TABLE_TYPE=TABLES_ROW_TYPE.SYSTEM_VIEW, TABLE_NAME=name)
|
|
103
103
|
data.append(row.to_list())
|
|
104
104
|
|
|
105
|
-
for ds_name
|
|
105
|
+
for ds_name in inf_schema.persist_datanodes_names:
|
|
106
106
|
if databases is not None and ds_name not in databases:
|
|
107
107
|
continue
|
|
108
|
+
ds = inf_schema.get(ds_name)
|
|
108
109
|
|
|
109
110
|
if hasattr(ds, "get_tables_rows"):
|
|
110
111
|
ds_tables = ds.get_tables_rows()
|
|
@@ -286,26 +286,8 @@ class PreparedStatementPlanner:
|
|
|
286
286
|
|
|
287
287
|
if step.result_data is not None:
|
|
288
288
|
# save results
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
table_info = step.result_data["tables"][0]
|
|
292
|
-
columns_info = step.result_data["columns"][table_info]
|
|
293
|
-
|
|
294
|
-
table.columns = []
|
|
295
|
-
table.ds = table_info[0]
|
|
296
|
-
for col in columns_info:
|
|
297
|
-
if isinstance(col, tuple):
|
|
298
|
-
# is predictor
|
|
299
|
-
col = dict(name=col[0], type="str")
|
|
300
|
-
table.columns.append(
|
|
301
|
-
Column(
|
|
302
|
-
name=col["name"],
|
|
303
|
-
type=col["type"],
|
|
304
|
-
)
|
|
305
|
-
)
|
|
306
|
-
|
|
307
|
-
# map by names
|
|
308
|
-
table.columns_map = {i.name.upper(): i for i in table.columns}
|
|
289
|
+
table.columns = step.result_data.columns
|
|
290
|
+
table.columns_map = {column.name.upper(): column for column in step.result_data.columns}
|
|
309
291
|
|
|
310
292
|
# === create columns list ===
|
|
311
293
|
columns_result = []
|
|
@@ -4,6 +4,7 @@ from typing import List
|
|
|
4
4
|
import duckdb
|
|
5
5
|
from duckdb import InvalidInputException
|
|
6
6
|
import numpy as np
|
|
7
|
+
import orjson
|
|
7
8
|
|
|
8
9
|
from mindsdb_sql_parser import parse_sql
|
|
9
10
|
from mindsdb_sql_parser.ast import ASTNode, Select, Identifier, Function, Constant
|
|
@@ -244,13 +245,13 @@ def query_df(df, query, session=None):
|
|
|
244
245
|
|
|
245
246
|
query_traversal(query_ast, adapt_query)
|
|
246
247
|
|
|
247
|
-
# convert json columns
|
|
248
|
-
encoder = CustomJSONEncoder()
|
|
249
|
-
|
|
250
248
|
def _convert(v):
|
|
251
249
|
if isinstance(v, dict) or isinstance(v, list):
|
|
252
250
|
try:
|
|
253
|
-
|
|
251
|
+
default_encoder = CustomJSONEncoder().default
|
|
252
|
+
return orjson.dumps(
|
|
253
|
+
v, default=default_encoder, option=orjson.OPT_SERIALIZE_NUMPY | orjson.OPT_PASSTHROUGH_DATETIME
|
|
254
|
+
).decode("utf-8")
|
|
254
255
|
except Exception:
|
|
255
256
|
pass
|
|
256
257
|
return v
|