MindsDB 25.7.3.0__py3-none-any.whl → 25.8.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of MindsDB might be problematic. Click here for more details.
- mindsdb/__about__.py +1 -1
- mindsdb/__main__.py +11 -1
- mindsdb/api/a2a/common/server/server.py +16 -6
- mindsdb/api/executor/command_executor.py +215 -150
- mindsdb/api/executor/datahub/datanodes/project_datanode.py +14 -3
- mindsdb/api/executor/planner/plan_join.py +3 -0
- mindsdb/api/executor/planner/plan_join_ts.py +117 -100
- mindsdb/api/executor/planner/query_planner.py +1 -0
- mindsdb/api/executor/sql_query/steps/apply_predictor_step.py +54 -85
- mindsdb/api/executor/sql_query/steps/fetch_dataframe.py +21 -24
- mindsdb/api/executor/sql_query/steps/fetch_dataframe_partition.py +9 -3
- mindsdb/api/executor/sql_query/steps/subselect_step.py +11 -8
- mindsdb/api/executor/utilities/mysql_to_duckdb_functions.py +264 -0
- mindsdb/api/executor/utilities/sql.py +30 -0
- mindsdb/api/http/initialize.py +18 -44
- mindsdb/api/http/namespaces/agents.py +23 -20
- mindsdb/api/http/namespaces/chatbots.py +83 -120
- mindsdb/api/http/namespaces/file.py +1 -1
- mindsdb/api/http/namespaces/jobs.py +38 -60
- mindsdb/api/http/namespaces/tree.py +69 -61
- mindsdb/api/http/namespaces/views.py +56 -72
- mindsdb/api/mcp/start.py +2 -0
- mindsdb/api/mysql/mysql_proxy/utilities/dump.py +3 -2
- mindsdb/integrations/handlers/autogluon_handler/requirements.txt +1 -1
- mindsdb/integrations/handlers/autosklearn_handler/requirements.txt +1 -1
- mindsdb/integrations/handlers/bigquery_handler/bigquery_handler.py +25 -5
- mindsdb/integrations/handlers/chromadb_handler/chromadb_handler.py +3 -3
- mindsdb/integrations/handlers/db2_handler/db2_handler.py +19 -23
- mindsdb/integrations/handlers/flaml_handler/requirements.txt +1 -1
- mindsdb/integrations/handlers/gong_handler/__about__.py +2 -0
- mindsdb/integrations/handlers/gong_handler/__init__.py +30 -0
- mindsdb/integrations/handlers/gong_handler/connection_args.py +37 -0
- mindsdb/integrations/handlers/gong_handler/gong_handler.py +164 -0
- mindsdb/integrations/handlers/gong_handler/gong_tables.py +508 -0
- mindsdb/integrations/handlers/gong_handler/icon.svg +25 -0
- mindsdb/integrations/handlers/gong_handler/test_gong_handler.py +125 -0
- mindsdb/integrations/handlers/google_calendar_handler/google_calendar_tables.py +82 -73
- mindsdb/integrations/handlers/hubspot_handler/requirements.txt +1 -1
- mindsdb/integrations/handlers/huggingface_handler/__init__.py +8 -12
- mindsdb/integrations/handlers/huggingface_handler/finetune.py +203 -223
- mindsdb/integrations/handlers/huggingface_handler/huggingface_handler.py +360 -383
- mindsdb/integrations/handlers/huggingface_handler/requirements.txt +7 -7
- mindsdb/integrations/handlers/huggingface_handler/requirements_cpu.txt +7 -7
- mindsdb/integrations/handlers/huggingface_handler/settings.py +25 -25
- mindsdb/integrations/handlers/langchain_handler/langchain_handler.py +83 -77
- mindsdb/integrations/handlers/lightwood_handler/requirements.txt +4 -4
- mindsdb/integrations/handlers/litellm_handler/litellm_handler.py +5 -2
- mindsdb/integrations/handlers/litellm_handler/settings.py +2 -1
- mindsdb/integrations/handlers/openai_handler/constants.py +11 -30
- mindsdb/integrations/handlers/openai_handler/helpers.py +27 -34
- mindsdb/integrations/handlers/openai_handler/openai_handler.py +14 -12
- mindsdb/integrations/handlers/pgvector_handler/pgvector_handler.py +106 -90
- mindsdb/integrations/handlers/postgres_handler/postgres_handler.py +41 -39
- mindsdb/integrations/handlers/salesforce_handler/constants.py +215 -0
- mindsdb/integrations/handlers/salesforce_handler/salesforce_handler.py +141 -80
- mindsdb/integrations/handlers/salesforce_handler/salesforce_tables.py +0 -1
- mindsdb/integrations/handlers/tpot_handler/requirements.txt +1 -1
- mindsdb/integrations/handlers/web_handler/urlcrawl_helpers.py +32 -17
- mindsdb/integrations/handlers/web_handler/web_handler.py +19 -22
- mindsdb/integrations/libs/llm/config.py +0 -14
- mindsdb/integrations/libs/llm/utils.py +0 -15
- mindsdb/integrations/libs/vectordatabase_handler.py +10 -1
- mindsdb/integrations/utilities/files/file_reader.py +5 -19
- mindsdb/integrations/utilities/handler_utils.py +32 -12
- mindsdb/integrations/utilities/rag/rerankers/base_reranker.py +1 -1
- mindsdb/interfaces/agents/agents_controller.py +246 -149
- mindsdb/interfaces/agents/constants.py +0 -1
- mindsdb/interfaces/agents/langchain_agent.py +11 -6
- mindsdb/interfaces/data_catalog/data_catalog_loader.py +4 -4
- mindsdb/interfaces/database/database.py +38 -13
- mindsdb/interfaces/database/integrations.py +20 -5
- mindsdb/interfaces/database/projects.py +174 -23
- mindsdb/interfaces/database/views.py +86 -60
- mindsdb/interfaces/jobs/jobs_controller.py +103 -110
- mindsdb/interfaces/knowledge_base/controller.py +33 -6
- mindsdb/interfaces/knowledge_base/evaluate.py +2 -1
- mindsdb/interfaces/knowledge_base/executor.py +24 -0
- mindsdb/interfaces/knowledge_base/preprocessing/document_preprocessor.py +6 -10
- mindsdb/interfaces/knowledge_base/preprocessing/text_splitter.py +73 -0
- mindsdb/interfaces/query_context/context_controller.py +111 -145
- mindsdb/interfaces/skills/skills_controller.py +18 -6
- mindsdb/interfaces/storage/db.py +40 -6
- mindsdb/interfaces/variables/variables_controller.py +8 -15
- mindsdb/utilities/config.py +5 -3
- mindsdb/utilities/fs.py +54 -17
- mindsdb/utilities/functions.py +72 -60
- mindsdb/utilities/log.py +38 -6
- mindsdb/utilities/ps.py +7 -7
- {mindsdb-25.7.3.0.dist-info → mindsdb-25.8.2.0.dist-info}/METADATA +282 -268
- {mindsdb-25.7.3.0.dist-info → mindsdb-25.8.2.0.dist-info}/RECORD +94 -92
- mindsdb/integrations/handlers/anyscale_endpoints_handler/__about__.py +0 -9
- mindsdb/integrations/handlers/anyscale_endpoints_handler/__init__.py +0 -20
- mindsdb/integrations/handlers/anyscale_endpoints_handler/anyscale_endpoints_handler.py +0 -290
- mindsdb/integrations/handlers/anyscale_endpoints_handler/creation_args.py +0 -14
- mindsdb/integrations/handlers/anyscale_endpoints_handler/icon.svg +0 -4
- mindsdb/integrations/handlers/anyscale_endpoints_handler/requirements.txt +0 -2
- mindsdb/integrations/handlers/anyscale_endpoints_handler/settings.py +0 -51
- mindsdb/integrations/handlers/anyscale_endpoints_handler/tests/test_anyscale_endpoints_handler.py +0 -212
- /mindsdb/integrations/handlers/{anyscale_endpoints_handler/tests/__init__.py → gong_handler/requirements.txt} +0 -0
- {mindsdb-25.7.3.0.dist-info → mindsdb-25.8.2.0.dist-info}/WHEEL +0 -0
- {mindsdb-25.7.3.0.dist-info → mindsdb-25.8.2.0.dist-info}/licenses/LICENSE +0 -0
- {mindsdb-25.7.3.0.dist-info → mindsdb-25.8.2.0.dist-info}/top_level.txt +0 -0
|
@@ -9,97 +9,105 @@ from mindsdb.api.http.namespaces.configs.tree import ns_conf
|
|
|
9
9
|
from mindsdb.metrics.metrics import api_endpoint_metrics
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
@ns_conf.route(
|
|
12
|
+
@ns_conf.route("/")
|
|
13
13
|
class GetRoot(Resource):
|
|
14
|
-
@ns_conf.doc(
|
|
15
|
-
@api_endpoint_metrics(
|
|
14
|
+
@ns_conf.doc("get_tree_root")
|
|
15
|
+
@api_endpoint_metrics("GET", "/tree")
|
|
16
16
|
def get(self):
|
|
17
17
|
databases = ca.database_controller.get_list()
|
|
18
|
-
result = [
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
18
|
+
result = [
|
|
19
|
+
{
|
|
20
|
+
"name": x["name"],
|
|
21
|
+
"class": "db",
|
|
22
|
+
"type": x["type"],
|
|
23
|
+
"engine": x["engine"],
|
|
24
|
+
"deletable": x["deletable"],
|
|
25
|
+
"visible": x["visible"],
|
|
26
|
+
}
|
|
27
|
+
for x in databases
|
|
28
|
+
]
|
|
26
29
|
return result
|
|
27
30
|
|
|
28
31
|
|
|
29
|
-
@ns_conf.route(
|
|
30
|
-
@ns_conf.param(
|
|
32
|
+
@ns_conf.route("/<db_name>")
|
|
33
|
+
@ns_conf.param("db_name", "Name of the database")
|
|
31
34
|
class GetLeaf(Resource):
|
|
32
|
-
@ns_conf.doc(
|
|
33
|
-
@api_endpoint_metrics(
|
|
35
|
+
@ns_conf.doc("get_tree_leaf")
|
|
36
|
+
@api_endpoint_metrics("GET", "/tree/database")
|
|
34
37
|
def get(self, db_name):
|
|
35
|
-
with_schemas = request.args.get(
|
|
38
|
+
with_schemas = request.args.get("all_schemas")
|
|
36
39
|
if isinstance(with_schemas, str):
|
|
37
|
-
with_schemas = with_schemas.lower() in (
|
|
40
|
+
with_schemas = with_schemas.lower() in ("1", "true")
|
|
38
41
|
else:
|
|
39
42
|
with_schemas = False
|
|
40
43
|
db_name = db_name.lower()
|
|
41
44
|
databases = ca.database_controller.get_dict()
|
|
42
45
|
if db_name not in databases:
|
|
43
|
-
return http_error(
|
|
44
|
-
400,
|
|
45
|
-
"Error",
|
|
46
|
-
f"There is no element with name '{db_name}'"
|
|
47
|
-
)
|
|
46
|
+
return http_error(400, "Error", f"There is no element with name '{db_name}'")
|
|
48
47
|
db = databases[db_name]
|
|
49
|
-
if db[
|
|
48
|
+
if db["type"] == "project":
|
|
50
49
|
project = ca.database_controller.get_project(db_name)
|
|
51
50
|
tables = project.get_tables()
|
|
52
|
-
tables = [
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
51
|
+
tables = [
|
|
52
|
+
{
|
|
53
|
+
"name": key,
|
|
54
|
+
"schema": None,
|
|
55
|
+
"class": "table",
|
|
56
|
+
"type": val["type"],
|
|
57
|
+
"engine": val.get("engine"),
|
|
58
|
+
"deletable": val.get("deletable"),
|
|
59
|
+
}
|
|
60
|
+
for key, val in tables.items()
|
|
61
|
+
]
|
|
62
|
+
|
|
63
|
+
jobs = ca.jobs_controller.get_list(db_name)
|
|
64
|
+
tables = tables + [
|
|
65
|
+
{"name": job["name"], "schema": None, "class": "job", "type": "job", "engine": "job", "deletable": True}
|
|
66
|
+
for job in jobs
|
|
67
|
+
]
|
|
68
|
+
elif db["type"] == "data":
|
|
61
69
|
handler = ca.integration_controller.get_data_handler(db_name)
|
|
62
|
-
if
|
|
70
|
+
if "all" in inspect.signature(handler.get_tables).parameters:
|
|
63
71
|
response = handler.get_tables(all=with_schemas)
|
|
64
72
|
else:
|
|
65
73
|
response = handler.get_tables()
|
|
66
|
-
if response.type !=
|
|
74
|
+
if response.type != "table":
|
|
67
75
|
return []
|
|
68
|
-
table_types = {
|
|
69
|
-
|
|
70
|
-
'VIEW': 'view'
|
|
71
|
-
}
|
|
72
|
-
tables = response.data_frame.to_dict(orient='records')
|
|
76
|
+
table_types = {"BASE TABLE": "table", "VIEW": "view"}
|
|
77
|
+
tables = response.data_frame.to_dict(orient="records")
|
|
73
78
|
|
|
74
79
|
schemas = defaultdict(list)
|
|
75
80
|
|
|
76
81
|
for table_meta in tables:
|
|
77
82
|
table_meta = {key.lower(): val for key, val in table_meta.items()}
|
|
78
|
-
schama = table_meta.get(
|
|
79
|
-
schemas[schama].append(
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
83
|
+
schama = table_meta.get("table_schema")
|
|
84
|
+
schemas[schama].append(
|
|
85
|
+
{
|
|
86
|
+
"name": table_meta["table_name"],
|
|
87
|
+
"class": "table",
|
|
88
|
+
"type": table_types.get(table_meta.get("table_type")),
|
|
89
|
+
"engine": None,
|
|
90
|
+
"deletable": False,
|
|
91
|
+
}
|
|
92
|
+
)
|
|
86
93
|
if len(schemas) == 1 and list(schemas.keys())[0] is None:
|
|
87
94
|
tables = schemas[None]
|
|
88
95
|
else:
|
|
89
|
-
tables = [
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
} for key, val in schemas.items()]
|
|
95
|
-
elif db['type'] == 'system':
|
|
96
|
+
tables = [
|
|
97
|
+
{"name": key, "class": "schema", "deletable": False, "children": val}
|
|
98
|
+
for key, val in schemas.items()
|
|
99
|
+
]
|
|
100
|
+
elif db["type"] == "system":
|
|
96
101
|
system_db = ca.database_controller.get_system_db(db_name)
|
|
97
102
|
tables = system_db.get_tree_tables()
|
|
98
|
-
tables = [
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
103
|
+
tables = [
|
|
104
|
+
{
|
|
105
|
+
"name": table.name,
|
|
106
|
+
"class": table.kind,
|
|
107
|
+
"type": "system view",
|
|
108
|
+
"engine": None,
|
|
109
|
+
"deletable": table.deletable,
|
|
110
|
+
}
|
|
111
|
+
for table in tables.values()
|
|
112
|
+
]
|
|
105
113
|
return tables
|
|
@@ -10,143 +10,127 @@ from mindsdb.metrics.metrics import api_endpoint_metrics
|
|
|
10
10
|
from mindsdb.utilities.exception import EntityNotExistsError
|
|
11
11
|
|
|
12
12
|
|
|
13
|
-
@ns_conf.route(
|
|
13
|
+
@ns_conf.route("/<project_name>/views")
|
|
14
14
|
class ViewsList(Resource):
|
|
15
|
-
@ns_conf.doc(
|
|
16
|
-
@api_endpoint_metrics(
|
|
15
|
+
@ns_conf.doc("list_views")
|
|
16
|
+
@api_endpoint_metrics("GET", "/views")
|
|
17
17
|
def get(self, project_name):
|
|
18
|
-
|
|
18
|
+
"""List all views"""
|
|
19
19
|
session = SessionController()
|
|
20
20
|
try:
|
|
21
21
|
project = session.database_controller.get_project(project_name)
|
|
22
22
|
except EntityNotExistsError:
|
|
23
|
-
return http_error(
|
|
24
|
-
HTTPStatus.NOT_FOUND,
|
|
25
|
-
'Project not found',
|
|
26
|
-
f'Project name {project_name} does not exist'
|
|
27
|
-
)
|
|
23
|
+
return http_error(HTTPStatus.NOT_FOUND, "Project not found", f"Project name {project_name} does not exist")
|
|
28
24
|
|
|
29
25
|
all_views = project.get_views()
|
|
30
26
|
all_view_objs = []
|
|
31
27
|
# Only want to return relevant fields to the user.
|
|
32
28
|
for view in all_views:
|
|
33
|
-
all_view_objs.append({
|
|
34
|
-
'id': view['metadata']['id'],
|
|
35
|
-
'name': view['name'],
|
|
36
|
-
'query': view['query']
|
|
37
|
-
})
|
|
29
|
+
all_view_objs.append({"id": view["metadata"]["id"], "name": view["name"], "query": view["query"]})
|
|
38
30
|
return all_view_objs
|
|
39
31
|
|
|
40
|
-
@ns_conf.doc(
|
|
41
|
-
@api_endpoint_metrics(
|
|
32
|
+
@ns_conf.doc("create_view")
|
|
33
|
+
@api_endpoint_metrics("POST", "/views")
|
|
42
34
|
def post(self, project_name):
|
|
43
|
-
|
|
44
|
-
if
|
|
45
|
-
return http_error(HTTPStatus.BAD_REQUEST,
|
|
35
|
+
"""Create a new view"""
|
|
36
|
+
if "view" not in request.json:
|
|
37
|
+
return http_error(HTTPStatus.BAD_REQUEST, "Wrong argument", 'Must provide "view" parameter in POST body')
|
|
46
38
|
session = SessionController()
|
|
47
|
-
view_obj = request.json[
|
|
48
|
-
if
|
|
49
|
-
return http_error(HTTPStatus.BAD_REQUEST,
|
|
50
|
-
if
|
|
51
|
-
return http_error(HTTPStatus.BAD_REQUEST,
|
|
52
|
-
name = view_obj[
|
|
53
|
-
query = view_obj[
|
|
39
|
+
view_obj = request.json["view"]
|
|
40
|
+
if "name" not in view_obj:
|
|
41
|
+
return http_error(HTTPStatus.BAD_REQUEST, "Wrong argument", 'Missing "name" field for view')
|
|
42
|
+
if "query" not in view_obj:
|
|
43
|
+
return http_error(HTTPStatus.BAD_REQUEST, "Wrong argument", 'Missing "query" field for view')
|
|
44
|
+
name = view_obj["name"]
|
|
45
|
+
query = view_obj["query"]
|
|
54
46
|
|
|
55
47
|
try:
|
|
56
48
|
project = session.database_controller.get_project(project_name)
|
|
57
49
|
except EntityNotExistsError:
|
|
58
|
-
return http_error(HTTPStatus.NOT_FOUND,
|
|
50
|
+
return http_error(HTTPStatus.NOT_FOUND, "Not found", f"Project name {project_name} does not exist")
|
|
59
51
|
|
|
60
52
|
if project.get_view(name) is not None:
|
|
61
|
-
return http_error(HTTPStatus.CONFLICT,
|
|
53
|
+
return http_error(HTTPStatus.CONFLICT, "Name conflict", f"View with name {name} already exists.")
|
|
62
54
|
|
|
63
|
-
project.create_view(name, query)
|
|
55
|
+
project.create_view(name, query, session)
|
|
64
56
|
created_view = project.get_view(name)
|
|
65
57
|
# Only want to return relevant fields to the user.
|
|
66
58
|
return {
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
59
|
+
"id": created_view["metadata"]["id"],
|
|
60
|
+
"name": created_view["name"],
|
|
61
|
+
"query": created_view["query"],
|
|
70
62
|
}, HTTPStatus.CREATED
|
|
71
63
|
|
|
72
64
|
|
|
73
|
-
@ns_conf.route(
|
|
74
|
-
@ns_conf.param(
|
|
75
|
-
@ns_conf.param(
|
|
65
|
+
@ns_conf.route("/<project_name>/views/<view_name>")
|
|
66
|
+
@ns_conf.param("project_name", "Name of the project")
|
|
67
|
+
@ns_conf.param("view_name", "Name of the view")
|
|
76
68
|
class ViewResource(Resource):
|
|
77
|
-
@ns_conf.doc(
|
|
78
|
-
@api_endpoint_metrics(
|
|
69
|
+
@ns_conf.doc("get_view")
|
|
70
|
+
@api_endpoint_metrics("GET", "/views/view")
|
|
79
71
|
def get(self, project_name, view_name):
|
|
80
|
-
|
|
72
|
+
"""Get a view by name"""
|
|
81
73
|
session = SessionController()
|
|
82
74
|
try:
|
|
83
75
|
project = session.database_controller.get_project(project_name)
|
|
84
76
|
except EntityNotExistsError:
|
|
85
|
-
return http_error(HTTPStatus.NOT_FOUND,
|
|
77
|
+
return http_error(HTTPStatus.NOT_FOUND, "Project not found", f"Project name {project_name} does not exist")
|
|
86
78
|
|
|
87
79
|
view = project.get_view(view_name)
|
|
88
80
|
if view is None:
|
|
89
|
-
return http_error(HTTPStatus.NOT_FOUND,
|
|
81
|
+
return http_error(HTTPStatus.NOT_FOUND, "View not found", f"View with name {view_name} does not exist")
|
|
90
82
|
|
|
91
83
|
# Only want to return relevant fields to the user.
|
|
92
|
-
return {
|
|
93
|
-
'id': view['metadata']['id'],
|
|
94
|
-
'name': view['name'],
|
|
95
|
-
'query': view['query']
|
|
96
|
-
}
|
|
84
|
+
return {"id": view["metadata"]["id"], "name": view["name"], "query": view["query"]}
|
|
97
85
|
|
|
98
|
-
@ns_conf.doc(
|
|
99
|
-
@api_endpoint_metrics(
|
|
86
|
+
@ns_conf.doc("update_view")
|
|
87
|
+
@api_endpoint_metrics("PUT", "/views/view")
|
|
100
88
|
def put(self, project_name, view_name):
|
|
101
|
-
|
|
102
|
-
if
|
|
103
|
-
return http_error(HTTPStatus.BAD_REQUEST,
|
|
104
|
-
request_view = request.json[
|
|
89
|
+
"""Updates or creates a view"""
|
|
90
|
+
if "view" not in request.json:
|
|
91
|
+
return http_error(HTTPStatus.BAD_REQUEST, "Wrong argument", 'Must provide "view" parameter in PUT body')
|
|
92
|
+
request_view = request.json["view"]
|
|
105
93
|
session = SessionController()
|
|
106
94
|
try:
|
|
107
95
|
project = session.database_controller.get_project(project_name)
|
|
108
96
|
except EntityNotExistsError:
|
|
109
|
-
return http_error(HTTPStatus.NOT_FOUND,
|
|
97
|
+
return http_error(HTTPStatus.NOT_FOUND, "Project not found", f"Project name {project_name} does not exist")
|
|
110
98
|
|
|
111
99
|
existing_view = project.get_view(view_name)
|
|
112
100
|
if existing_view is None:
|
|
113
101
|
# Create
|
|
114
|
-
if
|
|
115
|
-
return http_error(HTTPStatus.BAD_REQUEST,
|
|
116
|
-
project.create_view(view_name, request_view[
|
|
102
|
+
if "query" not in request_view:
|
|
103
|
+
return http_error(HTTPStatus.BAD_REQUEST, "Wrong argument", 'Missing "query" field for new view')
|
|
104
|
+
project.create_view(view_name, request_view["query"], session)
|
|
117
105
|
created_view = project.get_view(view_name)
|
|
118
106
|
# Only want to return relevant fields to the user.
|
|
119
107
|
return {
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
108
|
+
"id": created_view["metadata"]["id"],
|
|
109
|
+
"name": created_view["name"],
|
|
110
|
+
"query": created_view["query"],
|
|
123
111
|
}, HTTPStatus.CREATED
|
|
124
112
|
|
|
125
|
-
new_query = existing_view[
|
|
126
|
-
if
|
|
127
|
-
new_query = request_view[
|
|
113
|
+
new_query = existing_view["query"]
|
|
114
|
+
if "query" in request_view:
|
|
115
|
+
new_query = request_view["query"]
|
|
128
116
|
project.update_view(view_name, new_query)
|
|
129
117
|
|
|
130
118
|
existing_view = project.get_view(view_name)
|
|
131
119
|
# Only want to return relevant fields to the user.
|
|
132
|
-
return {
|
|
133
|
-
'id': existing_view['metadata']['id'],
|
|
134
|
-
'name': existing_view['name'],
|
|
135
|
-
'query': existing_view['query']
|
|
136
|
-
}
|
|
120
|
+
return {"id": existing_view["metadata"]["id"], "name": existing_view["name"], "query": existing_view["query"]}
|
|
137
121
|
|
|
138
|
-
@ns_conf.doc(
|
|
139
|
-
@api_endpoint_metrics(
|
|
122
|
+
@ns_conf.doc("delete_view")
|
|
123
|
+
@api_endpoint_metrics("DELETE", "/views/view")
|
|
140
124
|
def delete(self, project_name, view_name):
|
|
141
|
-
|
|
125
|
+
"""Deletes a view by name"""
|
|
142
126
|
session = SessionController()
|
|
143
127
|
try:
|
|
144
128
|
project = session.database_controller.get_project(project_name)
|
|
145
129
|
except EntityNotExistsError:
|
|
146
|
-
return http_error(HTTPStatus.NOT_FOUND,
|
|
130
|
+
return http_error(HTTPStatus.NOT_FOUND, "Project not found", f"Project name {project_name} does not exist")
|
|
147
131
|
|
|
148
132
|
if project.get_view(view_name) is None:
|
|
149
|
-
return http_error(HTTPStatus.NOT_FOUND,
|
|
133
|
+
return http_error(HTTPStatus.NOT_FOUND, "View not found", f"View with name {view_name} does not exist")
|
|
150
134
|
|
|
151
135
|
project.delete_view(view_name)
|
|
152
|
-
return
|
|
136
|
+
return "", HTTPStatus.NO_CONTENT
|
mindsdb/api/mcp/start.py
CHANGED
|
@@ -15,6 +15,7 @@ from starlette.responses import Response
|
|
|
15
15
|
from mindsdb.api.mysql.mysql_proxy.classes.fake_mysql_proxy import FakeMysqlProxy
|
|
16
16
|
from mindsdb.api.executor.data_types.response_type import RESPONSE_TYPE as SQL_RESPONSE_TYPE
|
|
17
17
|
from mindsdb.utilities import log
|
|
18
|
+
from mindsdb.utilities.log import get_uvicorn_logging_config
|
|
18
19
|
from mindsdb.utilities.config import Config
|
|
19
20
|
from mindsdb.interfaces.storage import db
|
|
20
21
|
|
|
@@ -173,6 +174,7 @@ async def run_sse_async() -> None:
|
|
|
173
174
|
host=mcp.settings.host,
|
|
174
175
|
port=mcp.settings.port,
|
|
175
176
|
log_level=mcp.settings.log_level.lower(),
|
|
177
|
+
log_config=get_uvicorn_logging_config("uvicorn_mcp"),
|
|
176
178
|
)
|
|
177
179
|
server = uvicorn.Server(config)
|
|
178
180
|
await server.serve()
|
|
@@ -118,8 +118,9 @@ def _dump_str(var: Any) -> str | None:
|
|
|
118
118
|
return json_encoder.encode(var)
|
|
119
119
|
except Exception:
|
|
120
120
|
return str(var)
|
|
121
|
-
|
|
122
|
-
|
|
121
|
+
# pd.isna returns array of bools for list
|
|
122
|
+
# and the truth value of a numpy array is ambiguous
|
|
123
|
+
if isinstance(var, (list, np.ndarray)) is False and pd.isna(var):
|
|
123
124
|
return None
|
|
124
125
|
return str(var)
|
|
125
126
|
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
autogluon
|
|
2
|
-
type_infer==0.0.
|
|
2
|
+
type_infer==0.0.23
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
auto-sklearn
|
|
2
|
-
type_infer==0.0.
|
|
2
|
+
type_infer==0.0.23
|
|
@@ -1,8 +1,10 @@
|
|
|
1
|
-
|
|
2
|
-
from
|
|
1
|
+
import json
|
|
2
|
+
from typing import Any, Dict, Optional, Text
|
|
3
|
+
|
|
4
|
+
from google.cloud.bigquery import Client, QueryJobConfig, DEFAULT_RETRY
|
|
5
|
+
from google.api_core.exceptions import BadRequest, NotFound
|
|
3
6
|
import pandas as pd
|
|
4
7
|
from sqlalchemy_bigquery.base import BigQueryDialect
|
|
5
|
-
from typing import Any, Dict, Optional, Text
|
|
6
8
|
|
|
7
9
|
from mindsdb.utilities import log
|
|
8
10
|
from mindsdb_sql_parser.ast.base import ASTNode
|
|
@@ -54,9 +56,22 @@ class BigQueryHandler(MetaDatabaseHandler):
|
|
|
54
56
|
if not all(key in self.connection_data for key in ["project_id", "dataset"]):
|
|
55
57
|
raise ValueError("Required parameters (project_id, dataset) must be provided.")
|
|
56
58
|
|
|
59
|
+
service_account_json = self.connection_data.get("service_account_json")
|
|
60
|
+
if isinstance(service_account_json, str):
|
|
61
|
+
# GUI send it as str
|
|
62
|
+
try:
|
|
63
|
+
service_account_json = json.loads(service_account_json)
|
|
64
|
+
except json.decoder.JSONDecodeError:
|
|
65
|
+
raise ValueError("'service_account_json' is not valid JSON")
|
|
66
|
+
if isinstance(service_account_json, dict) and isinstance(service_account_json.get("private_key"), str):
|
|
67
|
+
# some editors may escape new line symbol, also replace windows-like newlines
|
|
68
|
+
service_account_json["private_key"] = (
|
|
69
|
+
service_account_json["private_key"].replace("\\n", "\n").replace("\r\n", "\n")
|
|
70
|
+
)
|
|
71
|
+
|
|
57
72
|
google_sa_oauth2_manager = GoogleServiceAccountOAuth2Manager(
|
|
58
73
|
credentials_file=self.connection_data.get("service_account_keys"),
|
|
59
|
-
credentials_json=
|
|
74
|
+
credentials_json=service_account_json,
|
|
60
75
|
)
|
|
61
76
|
credentials = google_sa_oauth2_manager.get_oauth2_credentials()
|
|
62
77
|
|
|
@@ -85,7 +100,7 @@ class BigQueryHandler(MetaDatabaseHandler):
|
|
|
85
100
|
|
|
86
101
|
try:
|
|
87
102
|
connection = self.connect()
|
|
88
|
-
connection.query("SELECT 1;")
|
|
103
|
+
connection.query("SELECT 1;", timeout=10, retry=DEFAULT_RETRY.with_deadline(10))
|
|
89
104
|
|
|
90
105
|
# Check if the dataset exists
|
|
91
106
|
connection.get_dataset(self.connection_data["dataset"])
|
|
@@ -94,6 +109,11 @@ class BigQueryHandler(MetaDatabaseHandler):
|
|
|
94
109
|
except (BadRequest, ValueError) as e:
|
|
95
110
|
logger.error(f"Error connecting to BigQuery {self.connection_data['project_id']}, {e}!")
|
|
96
111
|
response.error_message = e
|
|
112
|
+
except NotFound:
|
|
113
|
+
response.error_message = (
|
|
114
|
+
f"Error connecting to BigQuery {self.connection_data['project_id']}: "
|
|
115
|
+
f"dataset '{self.connection_data['dataset']}' not found"
|
|
116
|
+
)
|
|
97
117
|
|
|
98
118
|
if response.success is False and self.is_connected is True:
|
|
99
119
|
self.is_connected = False
|
|
@@ -59,6 +59,7 @@ class ChromaDBHandler(VectorStoreHandler):
|
|
|
59
59
|
self._client = None
|
|
60
60
|
self.persist_directory = None
|
|
61
61
|
self.is_connected = False
|
|
62
|
+
self._use_handler_storage = False
|
|
62
63
|
|
|
63
64
|
config = self.validate_connection_parameters(name, **kwargs)
|
|
64
65
|
|
|
@@ -72,8 +73,6 @@ class ChromaDBHandler(VectorStoreHandler):
|
|
|
72
73
|
"hnsw:space": config.distance,
|
|
73
74
|
}
|
|
74
75
|
|
|
75
|
-
self._use_handler_storage = False
|
|
76
|
-
|
|
77
76
|
self.connect()
|
|
78
77
|
|
|
79
78
|
def validate_connection_parameters(self, name, **kwargs):
|
|
@@ -395,7 +394,7 @@ class ChromaDBHandler(VectorStoreHandler):
|
|
|
395
394
|
|
|
396
395
|
return df
|
|
397
396
|
|
|
398
|
-
def insert(self, collection_name: str, df: pd.DataFrame):
|
|
397
|
+
def insert(self, collection_name: str, df: pd.DataFrame) -> Response:
|
|
399
398
|
"""
|
|
400
399
|
Insert/Upsert data into ChromaDB collection.
|
|
401
400
|
If records with same IDs exist, they will be updated.
|
|
@@ -433,6 +432,7 @@ class ChromaDBHandler(VectorStoreHandler):
|
|
|
433
432
|
except Exception as e:
|
|
434
433
|
logger.error(f"Error during upsert operation: {str(e)}")
|
|
435
434
|
raise Exception(f"Failed to insert/update data: {str(e)}")
|
|
435
|
+
return Response(RESPONSE_TYPE.OK, affected_rows=len(df))
|
|
436
436
|
|
|
437
437
|
def upsert(self, table_name: str, data: pd.DataFrame):
|
|
438
438
|
"""
|
|
@@ -59,16 +59,20 @@ class DB2Handler(DatabaseHandler):
|
|
|
59
59
|
return self.connection
|
|
60
60
|
|
|
61
61
|
# Mandatory connection parameters.
|
|
62
|
-
if not all(key in self.connection_data for key in [
|
|
63
|
-
raise ValueError(
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
connection_string
|
|
70
|
-
|
|
71
|
-
|
|
62
|
+
if not all(key in self.connection_data for key in ["host", "user", "password", "database"]):
|
|
63
|
+
raise ValueError("Required parameters (host, user, password, database) must be provided.")
|
|
64
|
+
cloud = "databases.appdomain.cloud" in self.connection_data["host"]
|
|
65
|
+
if cloud:
|
|
66
|
+
connection_string = f"DATABASE={self.connection_data['database']};HOSTNAME={self.connection_data['host']};PORT={self.connection_data['port']};PROTOCOL=TCPIP;UID={self.connection_data['user']};PWD={self.connection_data['password']};SECURITY=SSL;"
|
|
67
|
+
connection_string += "SSLSERVERCERTIFICATE=;"
|
|
68
|
+
else:
|
|
69
|
+
connection_string = f"DRIVER={'IBM DB2 ODBC DRIVER'};DATABASE={self.connection_data['database']};HOST={self.connection_data['host']};PROTOCOL=TCPIP;UID={self.connection_data['user']};PWD={self.connection_data['password']};"
|
|
70
|
+
|
|
71
|
+
# Optional connection parameters.
|
|
72
|
+
if "port" in self.connection_data:
|
|
73
|
+
connection_string += f"PORT={self.connection_data['port']};"
|
|
74
|
+
|
|
75
|
+
if "schema" in self.connection_data:
|
|
72
76
|
connection_string += f"CURRENTSCHEMA={self.connection_data['schema']};"
|
|
73
77
|
|
|
74
78
|
try:
|
|
@@ -106,10 +110,10 @@ class DB2Handler(DatabaseHandler):
|
|
|
106
110
|
self.connect()
|
|
107
111
|
response.success = True
|
|
108
112
|
except (OperationalError, ValueError) as known_error:
|
|
109
|
-
logger.error(f
|
|
113
|
+
logger.error(f"Connection check to IBM Db2 failed, {known_error}!")
|
|
110
114
|
response.error_message = str(known_error)
|
|
111
115
|
except Exception as unknown_error:
|
|
112
|
-
logger.error(f
|
|
116
|
+
logger.error(f"Connection check to IBM Db2 failed due to an unknown error, {unknown_error}!")
|
|
113
117
|
response.error_message = str(unknown_error)
|
|
114
118
|
|
|
115
119
|
if response.success and need_to_close:
|
|
@@ -141,9 +145,7 @@ class DB2Handler(DatabaseHandler):
|
|
|
141
145
|
result = cur.fetchall()
|
|
142
146
|
response = Response(
|
|
143
147
|
RESPONSE_TYPE.TABLE,
|
|
144
|
-
data_frame=pd.DataFrame(
|
|
145
|
-
result, columns=[x[0] for x in cur.description]
|
|
146
|
-
),
|
|
148
|
+
data_frame=pd.DataFrame(result, columns=[x[0] for x in cur.description]),
|
|
147
149
|
)
|
|
148
150
|
else:
|
|
149
151
|
response = Response(RESPONSE_TYPE.OK)
|
|
@@ -198,10 +200,7 @@ class DB2Handler(DatabaseHandler):
|
|
|
198
200
|
}
|
|
199
201
|
)
|
|
200
202
|
|
|
201
|
-
response = Response(
|
|
202
|
-
RESPONSE_TYPE.TABLE,
|
|
203
|
-
data_frame=pd.DataFrame(tables)
|
|
204
|
-
)
|
|
203
|
+
response = Response(RESPONSE_TYPE.TABLE, data_frame=pd.DataFrame(tables))
|
|
205
204
|
|
|
206
205
|
return response
|
|
207
206
|
|
|
@@ -227,9 +226,6 @@ class DB2Handler(DatabaseHandler):
|
|
|
227
226
|
|
|
228
227
|
columns = [column["COLUMN_NAME"] for column in result]
|
|
229
228
|
|
|
230
|
-
response = Response(
|
|
231
|
-
RESPONSE_TYPE.TABLE,
|
|
232
|
-
data_frame=pd.DataFrame(columns, columns=["COLUMN_NAME"])
|
|
233
|
-
)
|
|
229
|
+
response = Response(RESPONSE_TYPE.TABLE, data_frame=pd.DataFrame(columns, columns=["COLUMN_NAME"]))
|
|
234
230
|
|
|
235
231
|
return response
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
flaml<=1.2.3
|
|
2
|
-
type_infer==0.0.
|
|
2
|
+
type_infer==0.0.23
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from mindsdb.integrations.libs.const import HANDLER_TYPE
|
|
2
|
+
|
|
3
|
+
from .__about__ import __version__ as version, __description__ as description
|
|
4
|
+
from .connection_args import connection_args, connection_args_example
|
|
5
|
+
|
|
6
|
+
try:
|
|
7
|
+
from .gong_handler import GongHandler as Handler
|
|
8
|
+
|
|
9
|
+
import_error = None
|
|
10
|
+
except Exception as e:
|
|
11
|
+
Handler = None
|
|
12
|
+
import_error = e
|
|
13
|
+
|
|
14
|
+
title = "Gong"
|
|
15
|
+
name = "gong"
|
|
16
|
+
type = HANDLER_TYPE.DATA
|
|
17
|
+
icon_path = "icon.svg"
|
|
18
|
+
|
|
19
|
+
__all__ = [
|
|
20
|
+
"Handler",
|
|
21
|
+
"version",
|
|
22
|
+
"name",
|
|
23
|
+
"type",
|
|
24
|
+
"title",
|
|
25
|
+
"description",
|
|
26
|
+
"import_error",
|
|
27
|
+
"icon_path",
|
|
28
|
+
"connection_args_example",
|
|
29
|
+
"connection_args",
|
|
30
|
+
]
|