MindsDB 25.5.4.1__py3-none-any.whl → 25.6.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of MindsDB might be problematic. Click here for more details.
- mindsdb/__about__.py +1 -1
- mindsdb/api/a2a/agent.py +28 -25
- mindsdb/api/a2a/common/server/server.py +32 -26
- mindsdb/api/a2a/run_a2a.py +1 -1
- mindsdb/api/executor/command_executor.py +69 -14
- mindsdb/api/executor/datahub/datanodes/integration_datanode.py +49 -65
- mindsdb/api/executor/datahub/datanodes/project_datanode.py +29 -48
- mindsdb/api/executor/datahub/datanodes/system_tables.py +35 -61
- mindsdb/api/executor/planner/plan_join.py +67 -77
- mindsdb/api/executor/planner/query_planner.py +176 -155
- mindsdb/api/executor/planner/steps.py +37 -12
- mindsdb/api/executor/sql_query/result_set.py +45 -64
- mindsdb/api/executor/sql_query/steps/fetch_dataframe.py +14 -18
- mindsdb/api/executor/sql_query/steps/fetch_dataframe_partition.py +17 -18
- mindsdb/api/executor/sql_query/steps/insert_step.py +13 -33
- mindsdb/api/executor/sql_query/steps/subselect_step.py +43 -35
- mindsdb/api/executor/utilities/sql.py +42 -48
- mindsdb/api/http/namespaces/config.py +1 -1
- mindsdb/api/http/namespaces/file.py +14 -23
- mindsdb/api/mysql/mysql_proxy/data_types/mysql_datum.py +12 -28
- mindsdb/api/mysql/mysql_proxy/data_types/mysql_packets/binary_resultset_row_package.py +59 -50
- mindsdb/api/mysql/mysql_proxy/data_types/mysql_packets/resultset_row_package.py +9 -8
- mindsdb/api/mysql/mysql_proxy/libs/constants/mysql.py +449 -461
- mindsdb/api/mysql/mysql_proxy/utilities/dump.py +87 -36
- mindsdb/integrations/handlers/file_handler/file_handler.py +15 -9
- mindsdb/integrations/handlers/file_handler/tests/test_file_handler.py +43 -24
- mindsdb/integrations/handlers/litellm_handler/litellm_handler.py +10 -3
- mindsdb/integrations/handlers/mysql_handler/mysql_handler.py +26 -33
- mindsdb/integrations/handlers/oracle_handler/oracle_handler.py +74 -51
- mindsdb/integrations/handlers/postgres_handler/postgres_handler.py +305 -98
- mindsdb/integrations/handlers/salesforce_handler/salesforce_handler.py +53 -34
- mindsdb/integrations/handlers/salesforce_handler/salesforce_tables.py +136 -6
- mindsdb/integrations/handlers/snowflake_handler/snowflake_handler.py +334 -83
- mindsdb/integrations/libs/api_handler.py +261 -57
- mindsdb/integrations/libs/base.py +100 -29
- mindsdb/integrations/utilities/files/file_reader.py +99 -73
- mindsdb/integrations/utilities/handler_utils.py +23 -8
- mindsdb/integrations/utilities/sql_utils.py +35 -40
- mindsdb/interfaces/agents/agents_controller.py +196 -192
- mindsdb/interfaces/agents/constants.py +7 -1
- mindsdb/interfaces/agents/langchain_agent.py +42 -11
- mindsdb/interfaces/agents/mcp_client_agent.py +29 -21
- mindsdb/interfaces/data_catalog/__init__.py +0 -0
- mindsdb/interfaces/data_catalog/base_data_catalog.py +54 -0
- mindsdb/interfaces/data_catalog/data_catalog_loader.py +359 -0
- mindsdb/interfaces/data_catalog/data_catalog_reader.py +34 -0
- mindsdb/interfaces/database/database.py +81 -57
- mindsdb/interfaces/database/integrations.py +220 -234
- mindsdb/interfaces/database/log.py +72 -104
- mindsdb/interfaces/database/projects.py +156 -193
- mindsdb/interfaces/file/file_controller.py +21 -65
- mindsdb/interfaces/knowledge_base/controller.py +63 -10
- mindsdb/interfaces/knowledge_base/evaluate.py +519 -0
- mindsdb/interfaces/knowledge_base/llm_client.py +75 -0
- mindsdb/interfaces/skills/custom/text2sql/mindsdb_kb_tools.py +83 -43
- mindsdb/interfaces/skills/skills_controller.py +54 -36
- mindsdb/interfaces/skills/sql_agent.py +109 -86
- mindsdb/interfaces/storage/db.py +223 -79
- mindsdb/migrations/versions/2025-05-28_a44643042fe8_added_data_catalog_tables.py +118 -0
- mindsdb/migrations/versions/2025-06-09_608e376c19a7_updated_data_catalog_data_types.py +58 -0
- mindsdb/utilities/config.py +9 -2
- mindsdb/utilities/log.py +35 -26
- mindsdb/utilities/ml_task_queue/task.py +19 -22
- mindsdb/utilities/render/sqlalchemy_render.py +129 -181
- mindsdb/utilities/starters.py +49 -1
- {mindsdb-25.5.4.1.dist-info → mindsdb-25.6.2.0.dist-info}/METADATA +268 -268
- {mindsdb-25.5.4.1.dist-info → mindsdb-25.6.2.0.dist-info}/RECORD +70 -62
- {mindsdb-25.5.4.1.dist-info → mindsdb-25.6.2.0.dist-info}/WHEEL +0 -0
- {mindsdb-25.5.4.1.dist-info → mindsdb-25.6.2.0.dist-info}/licenses/LICENSE +0 -0
- {mindsdb-25.5.4.1.dist-info → mindsdb-25.6.2.0.dist-info}/top_level.txt +0 -0
|
@@ -11,9 +11,10 @@ from mindsdb_sql_parser.ast import Select, Star, Constant, Identifier
|
|
|
11
11
|
from mindsdb_sql_parser import parse_sql
|
|
12
12
|
|
|
13
13
|
from mindsdb.interfaces.storage import db
|
|
14
|
-
from mindsdb.utilities.config import Config
|
|
15
14
|
from mindsdb.interfaces.model.model_controller import ModelController
|
|
16
15
|
from mindsdb.interfaces.database.views import ViewController
|
|
16
|
+
from mindsdb.utilities import log
|
|
17
|
+
from mindsdb.utilities.config import Config
|
|
17
18
|
from mindsdb.utilities.context import context as ctx
|
|
18
19
|
from mindsdb.utilities.exception import EntityExistsError, EntityNotExistsError
|
|
19
20
|
import mindsdb.utilities.profiler as profiler
|
|
@@ -21,6 +22,8 @@ from mindsdb.api.executor.sql_query import SQLQuery
|
|
|
21
22
|
from mindsdb.api.executor.utilities.sql import query_df
|
|
22
23
|
from mindsdb.interfaces.query_context.context_controller import query_context_controller
|
|
23
24
|
|
|
25
|
+
logger = log.getLogger(__name__)
|
|
26
|
+
|
|
24
27
|
|
|
25
28
|
class Project:
|
|
26
29
|
@staticmethod
|
|
@@ -39,11 +42,10 @@ class Project:
|
|
|
39
42
|
company_id = ctx.company_id if ctx.company_id is not None else 0
|
|
40
43
|
|
|
41
44
|
existing_record = db.Integration.query.filter(
|
|
42
|
-
sa.func.lower(db.Integration.name) == name,
|
|
43
|
-
db.Integration.company_id == ctx.company_id
|
|
45
|
+
sa.func.lower(db.Integration.name) == name, db.Integration.company_id == ctx.company_id
|
|
44
46
|
).first()
|
|
45
47
|
if existing_record is not None:
|
|
46
|
-
raise EntityExistsError(
|
|
48
|
+
raise EntityExistsError("Database exists with this name ", name)
|
|
47
49
|
|
|
48
50
|
existing_record = db.Project.query.filter(
|
|
49
51
|
(sa.func.lower(db.Project.name) == name)
|
|
@@ -51,12 +53,9 @@ class Project:
|
|
|
51
53
|
& (db.Project.deleted_at == sa.null())
|
|
52
54
|
).first()
|
|
53
55
|
if existing_record is not None:
|
|
54
|
-
raise EntityExistsError(
|
|
56
|
+
raise EntityExistsError("Project already exists", name)
|
|
55
57
|
|
|
56
|
-
record = db.Project(
|
|
57
|
-
name=name,
|
|
58
|
-
company_id=company_id
|
|
59
|
-
)
|
|
58
|
+
record = db.Project(name=name, company_id=company_id)
|
|
60
59
|
|
|
61
60
|
self.record = record
|
|
62
61
|
self.name = name
|
|
@@ -68,18 +67,20 @@ class Project:
|
|
|
68
67
|
self.id = record.id
|
|
69
68
|
|
|
70
69
|
def delete(self):
|
|
71
|
-
if self.record.metadata_ and self.record.metadata_.get(
|
|
70
|
+
if self.record.metadata_ and self.record.metadata_.get("is_default", False):
|
|
72
71
|
raise Exception(
|
|
73
72
|
f"Project '{self.name}' can not be deleted, because it is default project."
|
|
74
73
|
"The default project can be changed in the config file or by setting the environment variable MINDSDB_DEFAULT_PROJECT."
|
|
75
74
|
)
|
|
76
75
|
|
|
77
76
|
tables = self.get_tables()
|
|
78
|
-
tables = [key for key, val in tables.items() if val[
|
|
77
|
+
tables = [key for key, val in tables.items() if val["type"] != "table"]
|
|
79
78
|
if len(tables) > 0:
|
|
80
|
-
raise Exception(
|
|
79
|
+
raise Exception(
|
|
80
|
+
f"Project '{self.name}' can not be deleted, because it contains tables: {', '.join(tables)}"
|
|
81
|
+
)
|
|
81
82
|
|
|
82
|
-
is_cloud = Config().get(
|
|
83
|
+
is_cloud = Config().get("cloud", False)
|
|
83
84
|
if is_cloud is True:
|
|
84
85
|
self.record.deleted_at = datetime.datetime.now()
|
|
85
86
|
else:
|
|
@@ -91,60 +92,36 @@ class Project:
|
|
|
91
92
|
db.session.commit()
|
|
92
93
|
|
|
93
94
|
def drop_model(self, name: str):
|
|
94
|
-
ModelController().delete_model(
|
|
95
|
-
name,
|
|
96
|
-
project_name=self.name
|
|
97
|
-
)
|
|
95
|
+
ModelController().delete_model(name, project_name=self.name)
|
|
98
96
|
|
|
99
97
|
def drop_view(self, name: str):
|
|
100
|
-
ViewController().delete(
|
|
101
|
-
name,
|
|
102
|
-
project_name=self.name
|
|
103
|
-
)
|
|
98
|
+
ViewController().delete(name, project_name=self.name)
|
|
104
99
|
|
|
105
100
|
def create_view(self, name: str, query: str):
|
|
106
|
-
ViewController().add(
|
|
107
|
-
name,
|
|
108
|
-
query=query,
|
|
109
|
-
project_name=self.name
|
|
110
|
-
)
|
|
101
|
+
ViewController().add(name, query=query, project_name=self.name)
|
|
111
102
|
|
|
112
103
|
def update_view(self, name: str, query: str):
|
|
113
|
-
ViewController().update(
|
|
114
|
-
name,
|
|
115
|
-
query=query,
|
|
116
|
-
project_name=self.name
|
|
117
|
-
)
|
|
104
|
+
ViewController().update(name, query=query, project_name=self.name)
|
|
118
105
|
|
|
119
106
|
def delete_view(self, name: str):
|
|
120
|
-
ViewController().delete(
|
|
121
|
-
name,
|
|
122
|
-
project_name=self.name
|
|
123
|
-
)
|
|
107
|
+
ViewController().delete(name, project_name=self.name)
|
|
124
108
|
|
|
125
109
|
def get_view_meta(self, query: ASTNode) -> ASTNode:
|
|
126
110
|
view_name = query.from_table.parts[-1]
|
|
127
|
-
view_meta = ViewController().get(
|
|
128
|
-
|
|
129
|
-
project_name=self.name
|
|
130
|
-
)
|
|
131
|
-
view_meta['query_ast'] = parse_sql(view_meta['query'])
|
|
111
|
+
view_meta = ViewController().get(name=view_name, project_name=self.name)
|
|
112
|
+
view_meta["query_ast"] = parse_sql(view_meta["query"])
|
|
132
113
|
return view_meta
|
|
133
114
|
|
|
134
115
|
def query_view(self, query, session):
|
|
135
|
-
|
|
136
116
|
view_meta = self.get_view_meta(query)
|
|
137
117
|
|
|
138
|
-
query_context_controller.set_context(
|
|
118
|
+
query_context_controller.set_context("view", view_meta["id"])
|
|
139
119
|
|
|
140
120
|
try:
|
|
141
|
-
sqlquery = SQLQuery(
|
|
142
|
-
view_meta['query_ast'],
|
|
143
|
-
session=session
|
|
144
|
-
)
|
|
121
|
+
sqlquery = SQLQuery(view_meta["query_ast"], session=session)
|
|
145
122
|
df = sqlquery.fetched_data.to_df()
|
|
146
123
|
finally:
|
|
147
|
-
query_context_controller.release_context(
|
|
124
|
+
query_context_controller.release_context("view", view_meta["id"])
|
|
148
125
|
|
|
149
126
|
# remove duplicated columns
|
|
150
127
|
df = df.loc[:, ~df.columns.duplicated()]
|
|
@@ -160,73 +137,62 @@ class Project:
|
|
|
160
137
|
if (
|
|
161
138
|
predictor_record.training_start_at is not None
|
|
162
139
|
and predictor_record.training_stop_at is None
|
|
163
|
-
and predictor_record.status !=
|
|
140
|
+
and predictor_record.status != "error"
|
|
164
141
|
):
|
|
165
142
|
training_time = round((datetime.datetime.now() - predictor_record.training_start_at).total_seconds(), 3)
|
|
166
|
-
elif
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
training_time = round((predictor_record.training_stop_at - predictor_record.training_start_at).total_seconds(), 3)
|
|
143
|
+
elif predictor_record.training_start_at is not None and predictor_record.training_stop_at is not None:
|
|
144
|
+
training_time = round(
|
|
145
|
+
(predictor_record.training_stop_at - predictor_record.training_start_at).total_seconds(), 3
|
|
146
|
+
)
|
|
171
147
|
|
|
172
148
|
# regon Hide sensitive info
|
|
173
149
|
training_options = predictor_record.learn_args
|
|
174
150
|
handler_module = integration_controller.get_handler_module(integraion_record.engine)
|
|
175
151
|
|
|
176
152
|
if with_secrets is False and handler_module:
|
|
177
|
-
|
|
178
|
-
model_using_args = getattr(handler_module, 'model_using_args', None)
|
|
153
|
+
model_using_args = getattr(handler_module, "model_using_args", None)
|
|
179
154
|
if (
|
|
180
155
|
isinstance(model_using_args, dict)
|
|
181
156
|
and isinstance(training_options, dict)
|
|
182
|
-
and isinstance(training_options.get(
|
|
157
|
+
and isinstance(training_options.get("using"), dict)
|
|
183
158
|
):
|
|
184
|
-
training_options[
|
|
159
|
+
training_options["using"] = deepcopy(training_options["using"])
|
|
185
160
|
for key, value in model_using_args.items():
|
|
186
|
-
if key in training_options[
|
|
187
|
-
training_options[
|
|
161
|
+
if key in training_options["using"] and value.get("secret", False):
|
|
162
|
+
training_options["using"][key] = "******"
|
|
188
163
|
# endregion
|
|
189
164
|
|
|
190
165
|
predictor_meta = {
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
}
|
|
212
|
-
if predictor_data.get('accuracies', None) is not None:
|
|
213
|
-
if len(predictor_data['accuracies']) > 0:
|
|
214
|
-
predictor_meta['accuracy'] = float(np.mean(list(predictor_data['accuracies'].values())))
|
|
215
|
-
return {
|
|
216
|
-
'name': predictor_record.name,
|
|
217
|
-
'metadata': predictor_meta,
|
|
218
|
-
'created_at': predictor_record.created_at
|
|
166
|
+
"type": "model",
|
|
167
|
+
"id": predictor_record.id,
|
|
168
|
+
"engine": integraion_record.engine,
|
|
169
|
+
"engine_name": integraion_record.name,
|
|
170
|
+
"active": predictor_record.active,
|
|
171
|
+
"version": predictor_record.version,
|
|
172
|
+
"status": predictor_record.status,
|
|
173
|
+
"accuracy": None,
|
|
174
|
+
"predict": predictor_record.to_predict[0],
|
|
175
|
+
"update_status": predictor_record.update_status,
|
|
176
|
+
"mindsdb_version": predictor_record.mindsdb_version,
|
|
177
|
+
"error": predictor_data.get("error"),
|
|
178
|
+
"select_data_query": predictor_record.fetch_data_query,
|
|
179
|
+
"training_options": training_options,
|
|
180
|
+
"deletable": True,
|
|
181
|
+
"label": predictor_record.label,
|
|
182
|
+
"current_training_phase": predictor_record.training_phase_current,
|
|
183
|
+
"total_training_phases": predictor_record.training_phase_total,
|
|
184
|
+
"training_phase_name": predictor_record.training_phase_name,
|
|
185
|
+
"training_time": training_time,
|
|
219
186
|
}
|
|
187
|
+
if predictor_data.get("accuracies", None) is not None:
|
|
188
|
+
if len(predictor_data["accuracies"]) > 0:
|
|
189
|
+
predictor_meta["accuracy"] = float(np.mean(list(predictor_data["accuracies"].values())))
|
|
190
|
+
return {"name": predictor_record.name, "metadata": predictor_meta, "created_at": predictor_record.created_at}
|
|
220
191
|
|
|
221
192
|
def get_model(self, name: str):
|
|
222
193
|
record = (
|
|
223
|
-
db.session.query(db.Predictor, db.Integration)
|
|
224
|
-
|
|
225
|
-
active=True,
|
|
226
|
-
name=name,
|
|
227
|
-
deleted_at=sa.null(),
|
|
228
|
-
company_id=ctx.company_id
|
|
229
|
-
)
|
|
194
|
+
db.session.query(db.Predictor, db.Integration)
|
|
195
|
+
.filter_by(project_id=self.id, active=True, name=name, deleted_at=sa.null(), company_id=ctx.company_id)
|
|
230
196
|
.join(db.Integration, db.Integration.id == db.Predictor.integration_id)
|
|
231
197
|
.order_by(db.Predictor.name, db.Predictor.id)
|
|
232
198
|
.first()
|
|
@@ -237,12 +203,8 @@ class Project:
|
|
|
237
203
|
|
|
238
204
|
def get_model_by_id(self, model_id: int):
|
|
239
205
|
record = (
|
|
240
|
-
db.session.query(db.Predictor, db.Integration)
|
|
241
|
-
|
|
242
|
-
id=model_id,
|
|
243
|
-
deleted_at=sa.null(),
|
|
244
|
-
company_id=ctx.company_id
|
|
245
|
-
)
|
|
206
|
+
db.session.query(db.Predictor, db.Integration)
|
|
207
|
+
.filter_by(project_id=self.id, id=model_id, deleted_at=sa.null(), company_id=ctx.company_id)
|
|
246
208
|
.join(db.Integration, db.Integration.id == db.Predictor.integration_id)
|
|
247
209
|
.order_by(db.Predictor.name, db.Predictor.id)
|
|
248
210
|
.first()
|
|
@@ -253,45 +215,38 @@ class Project:
|
|
|
253
215
|
|
|
254
216
|
def get_models(self, active: bool = True, with_secrets: bool = True):
|
|
255
217
|
query = db.session.query(db.Predictor, db.Integration).filter_by(
|
|
256
|
-
project_id=self.id,
|
|
257
|
-
deleted_at=sa.null(),
|
|
258
|
-
company_id=ctx.company_id
|
|
218
|
+
project_id=self.id, deleted_at=sa.null(), company_id=ctx.company_id
|
|
259
219
|
)
|
|
260
220
|
if isinstance(active, bool):
|
|
261
221
|
query = query.filter_by(active=active)
|
|
262
222
|
|
|
263
|
-
query = query.join(
|
|
264
|
-
db.
|
|
265
|
-
)
|
|
223
|
+
query = query.join(db.Integration, db.Integration.id == db.Predictor.integration_id).order_by(
|
|
224
|
+
db.Predictor.name, db.Predictor.id
|
|
225
|
+
)
|
|
266
226
|
|
|
267
227
|
data = []
|
|
268
228
|
|
|
269
229
|
for predictor_record, integraion_record in query.all():
|
|
270
|
-
data.append(
|
|
271
|
-
self._get_model_data(predictor_record, integraion_record, with_secrets)
|
|
272
|
-
)
|
|
230
|
+
data.append(self._get_model_data(predictor_record, integraion_record, with_secrets))
|
|
273
231
|
|
|
274
232
|
return data
|
|
275
233
|
|
|
276
234
|
def get_agents(self):
|
|
277
235
|
records = (
|
|
278
|
-
db.session.query(db.Agents)
|
|
236
|
+
db.session.query(db.Agents)
|
|
237
|
+
.filter(
|
|
279
238
|
db.Agents.project_id == self.id,
|
|
280
239
|
db.Agents.company_id == ctx.company_id,
|
|
281
|
-
db.Agents.deleted_at == sa.null()
|
|
240
|
+
db.Agents.deleted_at == sa.null(),
|
|
282
241
|
)
|
|
283
242
|
.order_by(db.Agents.name)
|
|
284
243
|
.all()
|
|
285
244
|
)
|
|
286
245
|
data = [
|
|
287
246
|
{
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
'type': 'agent',
|
|
292
|
-
'id': record.id,
|
|
293
|
-
'deletable': True
|
|
294
|
-
}
|
|
247
|
+
"name": record.name,
|
|
248
|
+
"query": record.query,
|
|
249
|
+
"metadata": {"type": "agent", "id": record.id, "deletable": True},
|
|
295
250
|
}
|
|
296
251
|
for record in records
|
|
297
252
|
]
|
|
@@ -299,113 +254,122 @@ class Project:
|
|
|
299
254
|
|
|
300
255
|
def get_knowledge_bases(self):
|
|
301
256
|
from mindsdb.api.executor.controllers.session_controller import SessionController
|
|
257
|
+
|
|
302
258
|
session = SessionController()
|
|
303
259
|
|
|
304
260
|
return {
|
|
305
|
-
kb[
|
|
306
|
-
'type': 'knowledge_base',
|
|
307
|
-
'id': kb['id'],
|
|
308
|
-
'deletable': True
|
|
309
|
-
}
|
|
261
|
+
kb["name"]: {"type": "knowledge_base", "id": kb["id"], "deletable": True}
|
|
310
262
|
for kb in session.kb_controller.list(self.name)
|
|
311
263
|
}
|
|
312
264
|
|
|
313
265
|
def get_views(self):
|
|
314
266
|
records = (
|
|
315
|
-
db.session.query(db.View)
|
|
316
|
-
|
|
317
|
-
company_id=ctx.company_id
|
|
318
|
-
)
|
|
267
|
+
db.session.query(db.View)
|
|
268
|
+
.filter_by(project_id=self.id, company_id=ctx.company_id)
|
|
319
269
|
.order_by(db.View.name, db.View.id)
|
|
320
270
|
.all()
|
|
321
271
|
)
|
|
322
|
-
data = [
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
'deletable': True
|
|
329
|
-
}}
|
|
272
|
+
data = [
|
|
273
|
+
{
|
|
274
|
+
"name": view_record.name,
|
|
275
|
+
"query": view_record.query,
|
|
276
|
+
"metadata": {"type": "view", "id": view_record.id, "deletable": True},
|
|
277
|
+
}
|
|
330
278
|
for view_record in records
|
|
331
279
|
]
|
|
332
280
|
return data
|
|
333
281
|
|
|
334
282
|
def get_view(self, name):
|
|
335
|
-
view_record =
|
|
336
|
-
db.
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
283
|
+
view_record = (
|
|
284
|
+
db.session.query(db.View)
|
|
285
|
+
.filter(
|
|
286
|
+
db.View.project_id == self.id,
|
|
287
|
+
db.View.company_id == ctx.company_id,
|
|
288
|
+
sa.func.lower(db.View.name) == name.lower(),
|
|
289
|
+
)
|
|
290
|
+
.one_or_none()
|
|
291
|
+
)
|
|
340
292
|
if view_record is None:
|
|
341
293
|
return view_record
|
|
342
294
|
return {
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
'type': 'view',
|
|
347
|
-
'id': view_record.id,
|
|
348
|
-
'deletable': True
|
|
349
|
-
}
|
|
295
|
+
"name": view_record.name,
|
|
296
|
+
"query": view_record.query,
|
|
297
|
+
"metadata": {"type": "view", "id": view_record.id, "deletable": True},
|
|
350
298
|
}
|
|
351
299
|
|
|
352
300
|
@profiler.profile()
|
|
353
301
|
def get_tables(self):
|
|
354
302
|
data = OrderedDict()
|
|
355
|
-
data[
|
|
303
|
+
data["models"] = {"type": "table", "deletable": False}
|
|
356
304
|
|
|
357
305
|
models = self.get_models()
|
|
358
306
|
for model in models:
|
|
359
|
-
if model[
|
|
360
|
-
data[model[
|
|
307
|
+
if model["metadata"]["active"] is True:
|
|
308
|
+
data[model["name"]] = model["metadata"]
|
|
361
309
|
|
|
362
310
|
views = self.get_views()
|
|
363
311
|
for view in views:
|
|
364
|
-
data[view[
|
|
312
|
+
data[view["name"]] = view["metadata"]
|
|
365
313
|
|
|
366
314
|
agents = self.get_agents()
|
|
367
315
|
for agent in agents:
|
|
368
|
-
data[agent[
|
|
316
|
+
data[agent["name"]] = agent["metadata"]
|
|
369
317
|
|
|
370
318
|
data.update(self.get_knowledge_bases())
|
|
371
319
|
|
|
372
320
|
return data
|
|
373
321
|
|
|
374
|
-
def get_columns(self, table_name: str):
|
|
375
|
-
# at the moment it works only for models
|
|
376
|
-
predictor_record = db.Predictor.query.filter_by(
|
|
377
|
-
company_id=ctx.company_id,
|
|
378
|
-
project_id=self.id,
|
|
379
|
-
name=table_name
|
|
380
|
-
).first()
|
|
322
|
+
def get_columns(self, table_name: str) -> list[str] | None:
|
|
381
323
|
columns = []
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
columns = [columns]
|
|
324
|
+
tables = self.get_tables()
|
|
325
|
+
table = None
|
|
326
|
+
for key, value in tables.items():
|
|
327
|
+
if key.lower() == table_name.lower():
|
|
328
|
+
table_name = key
|
|
329
|
+
table = value
|
|
330
|
+
if table is None:
|
|
390
331
|
return columns
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
332
|
+
|
|
333
|
+
match str(table["type"]).upper():
|
|
334
|
+
case "MODEL":
|
|
335
|
+
predictor_record = db.Predictor.query.filter_by(
|
|
336
|
+
company_id=ctx.company_id, project_id=self.id, name=table_name
|
|
337
|
+
).first()
|
|
338
|
+
columns = []
|
|
339
|
+
if predictor_record is not None:
|
|
340
|
+
if isinstance(predictor_record.dtype_dict, dict):
|
|
341
|
+
columns = list(predictor_record.dtype_dict.keys())
|
|
342
|
+
elif predictor_record.to_predict is not None:
|
|
343
|
+
# no dtype_dict, use target
|
|
344
|
+
columns = predictor_record.to_predict
|
|
345
|
+
if not isinstance(columns, list):
|
|
346
|
+
columns = [columns]
|
|
347
|
+
case "VIEW":
|
|
348
|
+
query = Select(targets=[Star()], from_table=Identifier(table_name), limit=Constant(1))
|
|
349
|
+
|
|
350
|
+
from mindsdb.api.executor.controllers.session_controller import SessionController
|
|
351
|
+
|
|
352
|
+
session = SessionController()
|
|
353
|
+
session.database = self.name
|
|
354
|
+
df = self.query_view(query, session)
|
|
355
|
+
columns = df.columns
|
|
356
|
+
case "AGENT":
|
|
357
|
+
agent = db.Agents.query.filter_by(
|
|
358
|
+
company_id=ctx.company_id, project_id=self.id, name=table_name
|
|
359
|
+
).first()
|
|
360
|
+
if agent is not None:
|
|
361
|
+
from mindsdb.interfaces.agents.constants import ASSISTANT_COLUMN, USER_COLUMN
|
|
362
|
+
|
|
363
|
+
columns = [ASSISTANT_COLUMN, USER_COLUMN]
|
|
364
|
+
case "KNOWLEDGE_BASE":
|
|
365
|
+
from mindsdb.interfaces.knowledge_base.controller import KB_TO_VECTORDB_COLUMNS
|
|
366
|
+
|
|
367
|
+
columns = list(KB_TO_VECTORDB_COLUMNS.keys()) + ["metadata", "relevance", "distance"]
|
|
368
|
+
case "TABLE":
|
|
369
|
+
# like 'mindsdb.models'
|
|
370
|
+
pass
|
|
371
|
+
case _:
|
|
372
|
+
logger.warning(f"Unknown table type: {table['type']}")
|
|
409
373
|
|
|
410
374
|
return columns
|
|
411
375
|
|
|
@@ -417,13 +381,14 @@ class ProjectController:
|
|
|
417
381
|
def get_list(self) -> List[Project]:
|
|
418
382
|
company_id = ctx.company_id if ctx.company_id is not None else 0
|
|
419
383
|
records = db.Project.query.filter(
|
|
420
|
-
(db.Project.company_id == company_id)
|
|
421
|
-
& (db.Project.deleted_at == sa.null())
|
|
384
|
+
(db.Project.company_id == company_id) & (db.Project.deleted_at == sa.null())
|
|
422
385
|
).order_by(db.Project.name)
|
|
423
386
|
|
|
424
387
|
return [Project.from_record(x) for x in records]
|
|
425
388
|
|
|
426
|
-
def get(
|
|
389
|
+
def get(
|
|
390
|
+
self, id: Optional[int] = None, name: Optional[str] = None, deleted: bool = False, is_default: bool = False
|
|
391
|
+
) -> Project:
|
|
427
392
|
if id is not None and name is not None:
|
|
428
393
|
raise ValueError("Both 'id' and 'name' can't be provided at the same time")
|
|
429
394
|
|
|
@@ -433,9 +398,7 @@ class ProjectController:
|
|
|
433
398
|
if id is not None:
|
|
434
399
|
q = q.filter_by(id=id)
|
|
435
400
|
elif name is not None:
|
|
436
|
-
q = q.filter(
|
|
437
|
-
(sa.func.lower(db.Project.name) == sa.func.lower(name))
|
|
438
|
-
)
|
|
401
|
+
q = q.filter((sa.func.lower(db.Project.name) == sa.func.lower(name)))
|
|
439
402
|
|
|
440
403
|
if deleted is True:
|
|
441
404
|
q = q.filter((db.Project.deleted_at != sa.null()))
|
|
@@ -443,12 +406,12 @@ class ProjectController:
|
|
|
443
406
|
q = q.filter_by(deleted_at=sa.null())
|
|
444
407
|
|
|
445
408
|
if is_default:
|
|
446
|
-
q = q.filter(db.Project.metadata_[
|
|
409
|
+
q = q.filter(db.Project.metadata_["is_default"].as_boolean() == is_default)
|
|
447
410
|
|
|
448
411
|
record = q.first()
|
|
449
412
|
|
|
450
413
|
if record is None:
|
|
451
|
-
raise EntityNotExistsError(f
|
|
414
|
+
raise EntityNotExistsError(f"Project not found: {name}")
|
|
452
415
|
return Project.from_record(record)
|
|
453
416
|
|
|
454
417
|
def add(self, name: str) -> Project:
|