MindsDB 25.5.4.2__py3-none-any.whl → 25.6.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of MindsDB might be problematic. Click here for more details.
- mindsdb/__about__.py +1 -1
- mindsdb/api/a2a/agent.py +28 -25
- mindsdb/api/a2a/common/server/server.py +32 -26
- mindsdb/api/executor/command_executor.py +69 -14
- mindsdb/api/executor/datahub/datanodes/integration_datanode.py +49 -65
- mindsdb/api/executor/datahub/datanodes/project_datanode.py +29 -48
- mindsdb/api/executor/datahub/datanodes/system_tables.py +35 -61
- mindsdb/api/executor/planner/plan_join.py +67 -77
- mindsdb/api/executor/planner/query_planner.py +176 -155
- mindsdb/api/executor/planner/steps.py +37 -12
- mindsdb/api/executor/sql_query/result_set.py +45 -64
- mindsdb/api/executor/sql_query/steps/fetch_dataframe.py +14 -18
- mindsdb/api/executor/sql_query/steps/fetch_dataframe_partition.py +17 -18
- mindsdb/api/executor/sql_query/steps/insert_step.py +13 -33
- mindsdb/api/executor/sql_query/steps/subselect_step.py +43 -35
- mindsdb/api/executor/utilities/sql.py +42 -48
- mindsdb/api/http/namespaces/config.py +1 -1
- mindsdb/api/http/namespaces/file.py +14 -23
- mindsdb/api/mysql/mysql_proxy/data_types/mysql_datum.py +12 -28
- mindsdb/api/mysql/mysql_proxy/data_types/mysql_packets/binary_resultset_row_package.py +59 -50
- mindsdb/api/mysql/mysql_proxy/data_types/mysql_packets/resultset_row_package.py +9 -8
- mindsdb/api/mysql/mysql_proxy/libs/constants/mysql.py +449 -461
- mindsdb/api/mysql/mysql_proxy/utilities/dump.py +87 -36
- mindsdb/integrations/handlers/file_handler/file_handler.py +15 -9
- mindsdb/integrations/handlers/file_handler/tests/test_file_handler.py +43 -24
- mindsdb/integrations/handlers/litellm_handler/litellm_handler.py +10 -3
- mindsdb/integrations/handlers/mysql_handler/mysql_handler.py +26 -33
- mindsdb/integrations/handlers/oracle_handler/oracle_handler.py +74 -51
- mindsdb/integrations/handlers/postgres_handler/postgres_handler.py +305 -98
- mindsdb/integrations/handlers/salesforce_handler/salesforce_handler.py +53 -34
- mindsdb/integrations/handlers/salesforce_handler/salesforce_tables.py +136 -6
- mindsdb/integrations/handlers/snowflake_handler/snowflake_handler.py +334 -83
- mindsdb/integrations/libs/api_handler.py +261 -57
- mindsdb/integrations/libs/base.py +100 -29
- mindsdb/integrations/utilities/files/file_reader.py +99 -73
- mindsdb/integrations/utilities/handler_utils.py +23 -8
- mindsdb/integrations/utilities/sql_utils.py +35 -40
- mindsdb/interfaces/agents/agents_controller.py +196 -192
- mindsdb/interfaces/agents/constants.py +7 -1
- mindsdb/interfaces/agents/langchain_agent.py +42 -11
- mindsdb/interfaces/agents/mcp_client_agent.py +29 -21
- mindsdb/interfaces/data_catalog/__init__.py +0 -0
- mindsdb/interfaces/data_catalog/base_data_catalog.py +54 -0
- mindsdb/interfaces/data_catalog/data_catalog_loader.py +359 -0
- mindsdb/interfaces/data_catalog/data_catalog_reader.py +34 -0
- mindsdb/interfaces/database/database.py +81 -57
- mindsdb/interfaces/database/integrations.py +220 -234
- mindsdb/interfaces/database/log.py +72 -104
- mindsdb/interfaces/database/projects.py +156 -193
- mindsdb/interfaces/file/file_controller.py +21 -65
- mindsdb/interfaces/knowledge_base/controller.py +63 -10
- mindsdb/interfaces/knowledge_base/evaluate.py +519 -0
- mindsdb/interfaces/knowledge_base/llm_client.py +75 -0
- mindsdb/interfaces/skills/custom/text2sql/mindsdb_kb_tools.py +83 -43
- mindsdb/interfaces/skills/skills_controller.py +54 -36
- mindsdb/interfaces/skills/sql_agent.py +109 -86
- mindsdb/interfaces/storage/db.py +223 -79
- mindsdb/migrations/versions/2025-05-28_a44643042fe8_added_data_catalog_tables.py +118 -0
- mindsdb/migrations/versions/2025-06-09_608e376c19a7_updated_data_catalog_data_types.py +58 -0
- mindsdb/utilities/config.py +9 -2
- mindsdb/utilities/log.py +35 -26
- mindsdb/utilities/ml_task_queue/task.py +19 -22
- mindsdb/utilities/render/sqlalchemy_render.py +129 -181
- mindsdb/utilities/starters.py +40 -0
- {mindsdb-25.5.4.2.dist-info → mindsdb-25.6.2.0.dist-info}/METADATA +253 -253
- {mindsdb-25.5.4.2.dist-info → mindsdb-25.6.2.0.dist-info}/RECORD +69 -61
- {mindsdb-25.5.4.2.dist-info → mindsdb-25.6.2.0.dist-info}/WHEEL +0 -0
- {mindsdb-25.5.4.2.dist-info → mindsdb-25.6.2.0.dist-info}/licenses/LICENSE +0 -0
- {mindsdb-25.5.4.2.dist-info → mindsdb-25.6.2.0.dist-info}/top_level.txt +0 -0
|
@@ -32,19 +32,19 @@ from mindsdb.utilities import log
|
|
|
32
32
|
from mindsdb.integrations.libs.ml_exec_base import BaseMLEngineExec
|
|
33
33
|
from mindsdb.integrations.libs.base import BaseHandler
|
|
34
34
|
import mindsdb.utilities.profiler as profiler
|
|
35
|
+
from mindsdb.interfaces.data_catalog.data_catalog_loader import DataCatalogLoader
|
|
35
36
|
|
|
36
37
|
logger = log.getLogger(__name__)
|
|
37
38
|
|
|
38
39
|
|
|
39
40
|
class HandlersCache:
|
|
40
|
-
"""
|
|
41
|
-
"""
|
|
41
|
+
"""Cache for data handlers that keep connections opened during ttl time from handler last use"""
|
|
42
42
|
|
|
43
43
|
def __init__(self, ttl: int = 60):
|
|
44
|
-
"""
|
|
44
|
+
"""init cache
|
|
45
45
|
|
|
46
|
-
|
|
47
|
-
|
|
46
|
+
Args:
|
|
47
|
+
ttl (int): time to live (in seconds) for record in cache
|
|
48
48
|
"""
|
|
49
49
|
self.ttl = ttl
|
|
50
50
|
self.handlers = {}
|
|
@@ -56,50 +56,46 @@ class HandlersCache:
|
|
|
56
56
|
self._stop_clean()
|
|
57
57
|
|
|
58
58
|
def _start_clean(self) -> None:
|
|
59
|
-
"""
|
|
60
|
-
|
|
61
|
-
if (
|
|
62
|
-
isinstance(self.cleaner_thread, threading.Thread)
|
|
63
|
-
and self.cleaner_thread.is_alive()
|
|
64
|
-
):
|
|
59
|
+
"""start worker that close connections after ttl expired"""
|
|
60
|
+
if isinstance(self.cleaner_thread, threading.Thread) and self.cleaner_thread.is_alive():
|
|
65
61
|
return
|
|
66
62
|
self._stop_event.clear()
|
|
67
|
-
self.cleaner_thread = threading.Thread(target=self._clean, name=
|
|
63
|
+
self.cleaner_thread = threading.Thread(target=self._clean, name="HandlersCache.clean")
|
|
68
64
|
self.cleaner_thread.daemon = True
|
|
69
65
|
self.cleaner_thread.start()
|
|
70
66
|
|
|
71
67
|
def _stop_clean(self) -> None:
|
|
72
|
-
"""
|
|
73
|
-
"""
|
|
68
|
+
"""stop clean worker"""
|
|
74
69
|
self._stop_event.set()
|
|
75
70
|
|
|
76
71
|
def set(self, handler: DatabaseHandler):
|
|
77
|
-
"""
|
|
72
|
+
"""add (or replace) handler in cache
|
|
78
73
|
|
|
79
|
-
|
|
80
|
-
|
|
74
|
+
Args:
|
|
75
|
+
handler (DatabaseHandler)
|
|
81
76
|
"""
|
|
82
77
|
with self._lock:
|
|
83
78
|
try:
|
|
84
79
|
# If the handler is defined to be thread safe, set 0 as the last element of the key, otherwise set the thrad ID.
|
|
85
|
-
key = (
|
|
80
|
+
key = (
|
|
81
|
+
handler.name,
|
|
82
|
+
ctx.company_id,
|
|
83
|
+
0 if getattr(handler, "thread_safe", False) else threading.get_native_id(),
|
|
84
|
+
)
|
|
86
85
|
handler.connect()
|
|
87
|
-
self.handlers[key] = {
|
|
88
|
-
'handler': handler,
|
|
89
|
-
'expired_at': time.time() + self.ttl
|
|
90
|
-
}
|
|
86
|
+
self.handlers[key] = {"handler": handler, "expired_at": time.time() + self.ttl}
|
|
91
87
|
except Exception:
|
|
92
88
|
pass
|
|
93
89
|
self._start_clean()
|
|
94
90
|
|
|
95
91
|
def get(self, name: str) -> Optional[DatabaseHandler]:
|
|
96
|
-
"""
|
|
92
|
+
"""get handler from cache by name
|
|
97
93
|
|
|
98
|
-
|
|
99
|
-
|
|
94
|
+
Args:
|
|
95
|
+
name (str): handler name
|
|
100
96
|
|
|
101
|
-
|
|
102
|
-
|
|
97
|
+
Returns:
|
|
98
|
+
DatabaseHandler
|
|
103
99
|
"""
|
|
104
100
|
with self._lock:
|
|
105
101
|
# If the handler is not thread safe, the thread ID will be assigned to the last element of the key.
|
|
@@ -107,19 +103,16 @@ class HandlersCache:
|
|
|
107
103
|
if key not in self.handlers:
|
|
108
104
|
# If the handler is thread safe, a 0 will be assigned to the last element of the key.
|
|
109
105
|
key = (name, ctx.company_id, 0)
|
|
110
|
-
if (
|
|
111
|
-
key not in self.handlers
|
|
112
|
-
or self.handlers[key]['expired_at'] < time.time()
|
|
113
|
-
):
|
|
106
|
+
if key not in self.handlers or self.handlers[key]["expired_at"] < time.time():
|
|
114
107
|
return None
|
|
115
|
-
self.handlers[key][
|
|
116
|
-
return self.handlers[key][
|
|
108
|
+
self.handlers[key]["expired_at"] = time.time() + self.ttl
|
|
109
|
+
return self.handlers[key]["handler"]
|
|
117
110
|
|
|
118
111
|
def delete(self, name: str) -> None:
|
|
119
|
-
"""
|
|
112
|
+
"""delete handler from cache
|
|
120
113
|
|
|
121
|
-
|
|
122
|
-
|
|
114
|
+
Args:
|
|
115
|
+
name (str): handler name
|
|
123
116
|
"""
|
|
124
117
|
with self._lock:
|
|
125
118
|
key = (name, ctx.company_id, threading.get_native_id())
|
|
@@ -133,14 +126,13 @@ class HandlersCache:
|
|
|
133
126
|
self._stop_clean()
|
|
134
127
|
|
|
135
128
|
def _clean(self) -> None:
|
|
136
|
-
"""
|
|
137
|
-
"""
|
|
129
|
+
"""worker that delete from cache handlers that was not in use for ttl"""
|
|
138
130
|
while self._stop_event.wait(timeout=3) is False:
|
|
139
131
|
with self._lock:
|
|
140
132
|
for key in list(self.handlers.keys()):
|
|
141
133
|
if (
|
|
142
|
-
self.handlers[key][
|
|
143
|
-
and sys.getrefcount(self.handlers[key]) == 2
|
|
134
|
+
self.handlers[key]["expired_at"] < time.time()
|
|
135
|
+
and sys.getrefcount(self.handlers[key]) == 2 # returned ref count is always 1 higher
|
|
144
136
|
):
|
|
145
137
|
try:
|
|
146
138
|
self.handlers[key].disconnect()
|
|
@@ -163,50 +155,43 @@ class IntegrationController:
|
|
|
163
155
|
|
|
164
156
|
def _add_integration_record(self, name, engine, connection_args):
|
|
165
157
|
integration_record = db.Integration(
|
|
166
|
-
name=name,
|
|
167
|
-
engine=engine,
|
|
168
|
-
data=connection_args or {},
|
|
169
|
-
company_id=ctx.company_id
|
|
158
|
+
name=name, engine=engine, data=connection_args or {}, company_id=ctx.company_id
|
|
170
159
|
)
|
|
171
160
|
db.session.add(integration_record)
|
|
172
161
|
db.session.commit()
|
|
173
162
|
return integration_record.id
|
|
174
163
|
|
|
175
164
|
def add(self, name, engine, connection_args):
|
|
176
|
-
|
|
177
165
|
logger.debug(
|
|
178
166
|
"%s: add method calling name=%s, engine=%s, connection_args=%s, company_id=%s",
|
|
179
|
-
self.__class__.__name__,
|
|
167
|
+
self.__class__.__name__,
|
|
168
|
+
name,
|
|
169
|
+
engine,
|
|
170
|
+
connection_args,
|
|
171
|
+
ctx.company_id,
|
|
180
172
|
)
|
|
181
173
|
handler_meta = self.get_handler_meta(engine)
|
|
182
174
|
|
|
183
|
-
accept_connection_args = handler_meta.get(
|
|
175
|
+
accept_connection_args = handler_meta.get("connection_args")
|
|
184
176
|
logger.debug("%s: accept_connection_args - %s", self.__class__.__name__, accept_connection_args)
|
|
185
177
|
|
|
186
178
|
files_dir = None
|
|
187
179
|
if accept_connection_args is not None and connection_args is not None:
|
|
188
180
|
for arg_name, arg_value in connection_args.items():
|
|
189
|
-
if
|
|
190
|
-
arg_name in accept_connection_args
|
|
191
|
-
and accept_connection_args[arg_name]['type'] == ARG_TYPE.PATH
|
|
192
|
-
):
|
|
181
|
+
if arg_name in accept_connection_args and accept_connection_args[arg_name]["type"] == ARG_TYPE.PATH:
|
|
193
182
|
if files_dir is None:
|
|
194
|
-
files_dir = tempfile.mkdtemp(prefix=
|
|
183
|
+
files_dir = tempfile.mkdtemp(prefix="mindsdb_files_")
|
|
195
184
|
shutil.copy(arg_value, files_dir)
|
|
196
185
|
connection_args[arg_name] = Path(arg_value).name
|
|
197
186
|
|
|
198
187
|
integration_id = self._add_integration_record(name, engine, connection_args)
|
|
199
188
|
|
|
200
189
|
if files_dir is not None:
|
|
201
|
-
store = FileStorage(
|
|
202
|
-
|
|
203
|
-
resource_id=integration_id,
|
|
204
|
-
sync=False
|
|
205
|
-
)
|
|
206
|
-
store.add(files_dir, '')
|
|
190
|
+
store = FileStorage(resource_group=RESOURCE_GROUP.INTEGRATION, resource_id=integration_id, sync=False)
|
|
191
|
+
store.add(files_dir, "")
|
|
207
192
|
store.push()
|
|
208
193
|
|
|
209
|
-
if handler_meta.get(
|
|
194
|
+
if handler_meta.get("type") == HANDLER_TYPE.ML:
|
|
210
195
|
ml_handler = self.get_ml_handler(name)
|
|
211
196
|
ml_handler.create_engine(connection_args, integration_id)
|
|
212
197
|
|
|
@@ -215,7 +200,7 @@ class IntegrationController:
|
|
|
215
200
|
def modify(self, name, data):
|
|
216
201
|
self.handlers_cache.delete(name)
|
|
217
202
|
integration_record = self._get_integration_record(name)
|
|
218
|
-
if isinstance(integration_record.data, dict) and integration_record.data.get(
|
|
203
|
+
if isinstance(integration_record.data, dict) and integration_record.data.get("is_demo") is True:
|
|
219
204
|
raise ValueError("It is forbidden to change properties of the demo object")
|
|
220
205
|
old_data = deepcopy(integration_record.data)
|
|
221
206
|
for k in old_data:
|
|
@@ -226,8 +211,8 @@ class IntegrationController:
|
|
|
226
211
|
db.session.commit()
|
|
227
212
|
|
|
228
213
|
def delete(self, name):
|
|
229
|
-
if name in (
|
|
230
|
-
raise Exception(
|
|
214
|
+
if name in ("files", "lightwood"):
|
|
215
|
+
raise Exception("Unable to drop: is system database")
|
|
231
216
|
|
|
232
217
|
self.handlers_cache.delete(name)
|
|
233
218
|
|
|
@@ -235,32 +220,32 @@ class IntegrationController:
|
|
|
235
220
|
if name in self.handler_modules:
|
|
236
221
|
handler = self.handler_modules[name]
|
|
237
222
|
|
|
238
|
-
if getattr(handler,
|
|
239
|
-
raise Exception(
|
|
223
|
+
if getattr(handler, "permanent", False) is True:
|
|
224
|
+
raise Exception("Unable to drop permanent integration")
|
|
240
225
|
|
|
241
226
|
integration_record = self._get_integration_record(name)
|
|
242
|
-
if isinstance(integration_record.data, dict) and integration_record.data.get(
|
|
243
|
-
raise Exception(
|
|
227
|
+
if isinstance(integration_record.data, dict) and integration_record.data.get("is_demo") is True:
|
|
228
|
+
raise Exception("Unable to drop demo object")
|
|
244
229
|
|
|
245
230
|
# if this is ml engine
|
|
246
231
|
engine_models = get_model_records(ml_handler_name=name, deleted_at=None)
|
|
247
232
|
active_models = [m.name for m in engine_models if m.deleted_at is None]
|
|
248
233
|
if len(active_models) > 0:
|
|
249
|
-
raise Exception(f
|
|
234
|
+
raise Exception(f"Unable to drop ml engine with active models: {active_models}")
|
|
250
235
|
|
|
251
236
|
# check linked KBs
|
|
252
237
|
kb = db.KnowledgeBase.query.filter_by(vector_database_id=integration_record.id).first()
|
|
253
238
|
if kb is not None:
|
|
254
|
-
raise Exception(f
|
|
239
|
+
raise Exception(f"Unable to drop, integration is used by knowledge base: {kb.name}")
|
|
255
240
|
|
|
256
241
|
# check linked predictors
|
|
257
242
|
models = get_model_records()
|
|
258
243
|
for model in models:
|
|
259
244
|
if (
|
|
260
245
|
model.data_integration_ref is not None
|
|
261
|
-
and model.data_integration_ref.get(
|
|
262
|
-
and isinstance(model.data_integration_ref.get(
|
|
263
|
-
and model.data_integration_ref[
|
|
246
|
+
and model.data_integration_ref.get("type") == "integration"
|
|
247
|
+
and isinstance(model.data_integration_ref.get("id"), int)
|
|
248
|
+
and model.data_integration_ref["id"] == integration_record.id
|
|
264
249
|
):
|
|
265
250
|
model.data_integration_ref = None
|
|
266
251
|
|
|
@@ -269,6 +254,12 @@ class IntegrationController:
|
|
|
269
254
|
if model.deleted_at is not None:
|
|
270
255
|
model.integration_id = None
|
|
271
256
|
|
|
257
|
+
# Remove the integration metadata from the data catalog (if enabled).
|
|
258
|
+
# TODO: Can this be handled via cascading delete in the database?
|
|
259
|
+
if Config().get("data_catalog", {}).get("enabled", False):
|
|
260
|
+
data_catalog_reader = DataCatalogLoader(database_name=name)
|
|
261
|
+
data_catalog_reader.unload_metadata()
|
|
262
|
+
|
|
272
263
|
db.session.delete(integration_record)
|
|
273
264
|
db.session.commit()
|
|
274
265
|
|
|
@@ -281,82 +272,77 @@ class IntegrationController:
|
|
|
281
272
|
return None
|
|
282
273
|
data = deepcopy(integration_record.data)
|
|
283
274
|
|
|
284
|
-
bundle_path = data.get(
|
|
285
|
-
mysql_ssl_ca = data.get(
|
|
286
|
-
mysql_ssl_cert = data.get(
|
|
287
|
-
mysql_ssl_key = data.get(
|
|
275
|
+
bundle_path = data.get("secure_connect_bundle")
|
|
276
|
+
mysql_ssl_ca = data.get("ssl_ca")
|
|
277
|
+
mysql_ssl_cert = data.get("ssl_cert")
|
|
278
|
+
mysql_ssl_key = data.get("ssl_key")
|
|
288
279
|
if (
|
|
289
|
-
data.get(
|
|
280
|
+
data.get("type") in ("mysql", "mariadb")
|
|
290
281
|
and (
|
|
291
282
|
self._is_not_empty_str(mysql_ssl_ca)
|
|
292
283
|
or self._is_not_empty_str(mysql_ssl_cert)
|
|
293
284
|
or self._is_not_empty_str(mysql_ssl_key)
|
|
294
285
|
)
|
|
295
|
-
or data.get(
|
|
286
|
+
or data.get("type") in ("cassandra", "scylla")
|
|
296
287
|
and bundle_path is not None
|
|
297
288
|
):
|
|
298
289
|
fs_store = FsStore()
|
|
299
|
-
integrations_dir = Config()[
|
|
300
|
-
folder_name = f
|
|
301
|
-
fs_store.get(
|
|
302
|
-
folder_name,
|
|
303
|
-
base_dir=integrations_dir
|
|
304
|
-
)
|
|
290
|
+
integrations_dir = Config()["paths"]["integrations"]
|
|
291
|
+
folder_name = f"integration_files_{integration_record.company_id}_{integration_record.id}"
|
|
292
|
+
fs_store.get(folder_name, base_dir=integrations_dir)
|
|
305
293
|
|
|
306
294
|
handler_meta = self.get_handler_metadata(integration_record.engine)
|
|
307
295
|
integration_type = None
|
|
308
296
|
if isinstance(handler_meta, dict):
|
|
309
297
|
# in other cases, the handler directory is likely not exist.
|
|
310
|
-
integration_type = handler_meta.get(
|
|
298
|
+
integration_type = handler_meta.get("type")
|
|
311
299
|
|
|
312
300
|
if show_secrets is False and handler_meta is not None:
|
|
313
|
-
connection_args = handler_meta.get(
|
|
301
|
+
connection_args = handler_meta.get("connection_args", None)
|
|
314
302
|
if isinstance(connection_args, dict):
|
|
315
303
|
if integration_type == HANDLER_TYPE.DATA:
|
|
316
304
|
for key, value in connection_args.items():
|
|
317
|
-
if key in data and value.get(
|
|
318
|
-
data[key] =
|
|
305
|
+
if key in data and value.get("secret", False) is True:
|
|
306
|
+
data[key] = "******"
|
|
319
307
|
elif integration_type == HANDLER_TYPE.ML:
|
|
320
|
-
creation_args = connection_args.get(
|
|
308
|
+
creation_args = connection_args.get("creation_args")
|
|
321
309
|
if isinstance(creation_args, dict):
|
|
322
310
|
for key, value in creation_args.items():
|
|
323
|
-
if key in data and value.get(
|
|
324
|
-
data[key] =
|
|
311
|
+
if key in data and value.get("secret", False) is True:
|
|
312
|
+
data[key] = "******"
|
|
325
313
|
else:
|
|
326
|
-
raise ValueError(f
|
|
314
|
+
raise ValueError(f"Unexpected handler type: {integration_type}")
|
|
327
315
|
else:
|
|
328
316
|
# region obsolete, del in future
|
|
329
|
-
if
|
|
330
|
-
data[
|
|
317
|
+
if "password" in data:
|
|
318
|
+
data["password"] = None
|
|
331
319
|
if (
|
|
332
|
-
data.get(
|
|
333
|
-
and isinstance(data.get(
|
|
334
|
-
and
|
|
320
|
+
data.get("type") == "redis"
|
|
321
|
+
and isinstance(data.get("connection"), dict)
|
|
322
|
+
and "password" in data["connection"]
|
|
335
323
|
):
|
|
336
|
-
data[
|
|
324
|
+
data["connection"] = None
|
|
337
325
|
# endregion
|
|
338
326
|
|
|
339
327
|
class_type, permanent = None, False
|
|
340
328
|
if handler_meta is not None:
|
|
341
|
-
class_type = handler_meta.get(
|
|
342
|
-
permanent = handler_meta.get(
|
|
329
|
+
class_type = handler_meta.get("class_type")
|
|
330
|
+
permanent = handler_meta.get("permanent", False)
|
|
343
331
|
|
|
344
332
|
return {
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
333
|
+
"id": integration_record.id,
|
|
334
|
+
"name": integration_record.name,
|
|
335
|
+
"type": integration_type,
|
|
336
|
+
"class_type": class_type,
|
|
337
|
+
"engine": integration_record.engine,
|
|
338
|
+
"permanent": permanent,
|
|
339
|
+
"date_last_update": deepcopy(integration_record.updated_at), # to del ?
|
|
340
|
+
"connection_data": data,
|
|
353
341
|
}
|
|
354
342
|
|
|
355
343
|
def get_by_id(self, integration_id, show_secrets=True):
|
|
356
344
|
integration_record = (
|
|
357
|
-
db.session.query(db.Integration)
|
|
358
|
-
.filter_by(company_id=ctx.company_id, id=integration_id)
|
|
359
|
-
.first()
|
|
345
|
+
db.session.query(db.Integration).filter_by(company_id=ctx.company_id, id=integration_id).first()
|
|
360
346
|
)
|
|
361
347
|
return self._get_integration_record_data(integration_record, show_secrets)
|
|
362
348
|
|
|
@@ -379,20 +365,21 @@ class IntegrationController:
|
|
|
379
365
|
db.Integration
|
|
380
366
|
"""
|
|
381
367
|
if case_sensitive:
|
|
382
|
-
integration_records = db.session.query(db.Integration).filter_by(
|
|
383
|
-
company_id=ctx.company_id,
|
|
384
|
-
name=name
|
|
385
|
-
).all()
|
|
368
|
+
integration_records = db.session.query(db.Integration).filter_by(company_id=ctx.company_id, name=name).all()
|
|
386
369
|
if len(integration_records) > 1:
|
|
387
370
|
raise Exception(f"There is {len(integration_records)} integrations with name '{name}'")
|
|
388
371
|
if len(integration_records) == 0:
|
|
389
372
|
raise EntityNotExistsError(f"There is no integration with name '{name}'")
|
|
390
373
|
integration_record = integration_records[0]
|
|
391
374
|
else:
|
|
392
|
-
integration_record =
|
|
393
|
-
(db.Integration
|
|
394
|
-
|
|
395
|
-
|
|
375
|
+
integration_record = (
|
|
376
|
+
db.session.query(db.Integration)
|
|
377
|
+
.filter(
|
|
378
|
+
(db.Integration.company_id == ctx.company_id)
|
|
379
|
+
& (func.lower(db.Integration.name) == func.lower(name))
|
|
380
|
+
)
|
|
381
|
+
.first()
|
|
382
|
+
)
|
|
396
383
|
if integration_record is None:
|
|
397
384
|
raise EntityNotExistsError(f"There is no integration with name '{name}'")
|
|
398
385
|
|
|
@@ -407,21 +394,28 @@ class IntegrationController:
|
|
|
407
394
|
integration_dict[record.name] = self._get_integration_record_data(record, show_secrets)
|
|
408
395
|
return integration_dict
|
|
409
396
|
|
|
410
|
-
def _make_handler_args(
|
|
411
|
-
|
|
397
|
+
def _make_handler_args(
|
|
398
|
+
self,
|
|
399
|
+
name: str,
|
|
400
|
+
handler_type: str,
|
|
401
|
+
connection_data: dict,
|
|
402
|
+
integration_id: int = None,
|
|
403
|
+
file_storage: FileStorage = None,
|
|
404
|
+
handler_storage: HandlerStorage = None,
|
|
405
|
+
):
|
|
412
406
|
handler_args = dict(
|
|
413
407
|
name=name,
|
|
414
408
|
integration_id=integration_id,
|
|
415
409
|
connection_data=connection_data,
|
|
416
410
|
file_storage=file_storage,
|
|
417
|
-
handler_storage=handler_storage
|
|
411
|
+
handler_storage=handler_storage,
|
|
418
412
|
)
|
|
419
413
|
|
|
420
|
-
if handler_type ==
|
|
421
|
-
handler_args[
|
|
414
|
+
if handler_type == "files":
|
|
415
|
+
handler_args["file_controller"] = FileController()
|
|
422
416
|
elif self.handler_modules.get(handler_type, False).type == HANDLER_TYPE.ML:
|
|
423
|
-
handler_args[
|
|
424
|
-
handler_args[
|
|
417
|
+
handler_args["handler_controller"] = self
|
|
418
|
+
handler_args["company_id"] = ctx.company_id
|
|
425
419
|
|
|
426
420
|
return handler_args
|
|
427
421
|
|
|
@@ -439,12 +433,9 @@ class IntegrationController:
|
|
|
439
433
|
integration_id = int(time.time() * 10000)
|
|
440
434
|
|
|
441
435
|
file_storage = FileStorage(
|
|
442
|
-
resource_group=RESOURCE_GROUP.INTEGRATION,
|
|
443
|
-
resource_id=integration_id,
|
|
444
|
-
root_dir='tmp',
|
|
445
|
-
sync=False
|
|
436
|
+
resource_group=RESOURCE_GROUP.INTEGRATION, resource_id=integration_id, root_dir="tmp", sync=False
|
|
446
437
|
)
|
|
447
|
-
handler_storage = HandlerStorage(integration_id, root_dir=
|
|
438
|
+
handler_storage = HandlerStorage(integration_id, root_dir="tmp", is_temporal=True)
|
|
448
439
|
|
|
449
440
|
handler_meta = self.get_handler_meta(engine)
|
|
450
441
|
if handler_meta is None:
|
|
@@ -466,7 +457,7 @@ class IntegrationController:
|
|
|
466
457
|
|
|
467
458
|
def copy_integration_storage(self, integration_id_from, integration_id_to):
|
|
468
459
|
storage_from = HandlerStorage(integration_id_from)
|
|
469
|
-
root_path =
|
|
460
|
+
root_path = ""
|
|
470
461
|
|
|
471
462
|
if storage_from.is_empty():
|
|
472
463
|
return None
|
|
@@ -494,7 +485,7 @@ class IntegrationController:
|
|
|
494
485
|
if integration_meta is None:
|
|
495
486
|
raise Exception(f"Handler '{name}' does not exists")
|
|
496
487
|
|
|
497
|
-
if integration_meta.get(
|
|
488
|
+
if integration_meta.get("type") != HANDLER_TYPE.ML:
|
|
498
489
|
raise Exception(f"Handler '{name}' must be ML type")
|
|
499
490
|
|
|
500
491
|
logger.info(
|
|
@@ -504,7 +495,7 @@ class IntegrationController:
|
|
|
504
495
|
handler = BaseMLEngineExec(
|
|
505
496
|
name=integration_record.name,
|
|
506
497
|
integration_id=integration_record.id,
|
|
507
|
-
handler_module=self.handler_modules[integration_engine]
|
|
498
|
+
handler_module=self.handler_modules[integration_engine],
|
|
508
499
|
)
|
|
509
500
|
|
|
510
501
|
return handler
|
|
@@ -532,35 +523,36 @@ class IntegrationController:
|
|
|
532
523
|
if integration_meta is None:
|
|
533
524
|
raise Exception(f"Handler '{name}' does not exist")
|
|
534
525
|
|
|
535
|
-
if integration_meta.get(
|
|
526
|
+
if integration_meta.get("type") != HANDLER_TYPE.DATA:
|
|
536
527
|
raise Exception(f"Handler '{name}' must be DATA type")
|
|
537
528
|
|
|
538
529
|
integration_data = self._get_integration_record_data(integration_record, True)
|
|
539
530
|
if integration_data is None:
|
|
540
531
|
raise Exception(f"Can't find integration_record for handler '{name}'")
|
|
541
|
-
connection_data = integration_data.get(
|
|
532
|
+
connection_data = integration_data.get("connection_data", {})
|
|
542
533
|
logger.debug(
|
|
543
534
|
"%s.get_handler: connection_data=%s, engine=%s",
|
|
544
535
|
self.__class__.__name__,
|
|
545
|
-
connection_data,
|
|
536
|
+
connection_data,
|
|
537
|
+
integration_engine,
|
|
546
538
|
)
|
|
547
539
|
|
|
548
540
|
if integration_meta["import"]["success"] is False:
|
|
549
|
-
msg = dedent(f
|
|
541
|
+
msg = dedent(f"""\
|
|
550
542
|
Handler '{integration_engine}' cannot be used. Reason is:
|
|
551
|
-
{integration_meta[
|
|
552
|
-
|
|
553
|
-
is_cloud = Config().get(
|
|
543
|
+
{integration_meta["import"]["error_message"]}
|
|
544
|
+
""")
|
|
545
|
+
is_cloud = Config().get("cloud", False)
|
|
554
546
|
if is_cloud is False:
|
|
555
|
-
msg += dedent(f
|
|
547
|
+
msg += dedent(f"""
|
|
556
548
|
|
|
557
549
|
If error is related to missing dependencies, then try to run command in shell and restart mindsdb:
|
|
558
550
|
pip install mindsdb[{integration_engine}]
|
|
559
|
-
|
|
551
|
+
""")
|
|
560
552
|
logger.debug(msg)
|
|
561
553
|
raise Exception(msg)
|
|
562
554
|
|
|
563
|
-
connection_args = integration_meta.get(
|
|
555
|
+
connection_args = integration_meta.get("connection_args")
|
|
564
556
|
logger.debug("%s.get_handler: connection args - %s", self.__class__.__name__, connection_args)
|
|
565
557
|
|
|
566
558
|
file_storage = FileStorage(
|
|
@@ -572,11 +564,11 @@ class IntegrationController:
|
|
|
572
564
|
|
|
573
565
|
if isinstance(connection_args, (dict, OrderedDict)):
|
|
574
566
|
files_to_get = {
|
|
575
|
-
arg_name: arg_value
|
|
576
|
-
|
|
567
|
+
arg_name: arg_value
|
|
568
|
+
for arg_name, arg_value in connection_data.items()
|
|
569
|
+
if arg_name in connection_args and connection_args.get(arg_name)["type"] == ARG_TYPE.PATH
|
|
577
570
|
}
|
|
578
571
|
if len(files_to_get) > 0:
|
|
579
|
-
|
|
580
572
|
for file_name, file_path in files_to_get.items():
|
|
581
573
|
connection_data[file_name] = file_storage.get_path(file_path)
|
|
582
574
|
|
|
@@ -584,9 +576,9 @@ class IntegrationController:
|
|
|
584
576
|
name=name,
|
|
585
577
|
handler_type=integration_engine,
|
|
586
578
|
connection_data=connection_data,
|
|
587
|
-
integration_id=integration_data[
|
|
579
|
+
integration_id=integration_data["id"],
|
|
588
580
|
file_storage=file_storage,
|
|
589
|
-
handler_storage=handler_storage
|
|
581
|
+
handler_storage=handler_storage,
|
|
590
582
|
)
|
|
591
583
|
|
|
592
584
|
HandlerClass = self.handler_modules[integration_engine].Handler
|
|
@@ -602,82 +594,76 @@ class IntegrationController:
|
|
|
602
594
|
handler_meta = self._get_handler_meta(handler_name)
|
|
603
595
|
except Exception as e:
|
|
604
596
|
handler_meta = self.handlers_import_status[handler_name]
|
|
605
|
-
handler_meta[
|
|
606
|
-
handler_meta[
|
|
597
|
+
handler_meta["import"]["success"] = False
|
|
598
|
+
handler_meta["import"]["error_message"] = str(e)
|
|
607
599
|
|
|
608
600
|
self.handlers_import_status[handler_name] = handler_meta
|
|
609
601
|
|
|
610
602
|
def _read_dependencies(self, path):
|
|
611
603
|
dependencies = []
|
|
612
|
-
requirements_txt = Path(path).joinpath(
|
|
604
|
+
requirements_txt = Path(path).joinpath("requirements.txt")
|
|
613
605
|
if requirements_txt.is_file():
|
|
614
|
-
with open(str(requirements_txt),
|
|
615
|
-
dependencies = [x.strip(
|
|
606
|
+
with open(str(requirements_txt), "rt") as f:
|
|
607
|
+
dependencies = [x.strip(" \t\n") for x in f.readlines()]
|
|
616
608
|
dependencies = [x for x in dependencies if len(x) > 0]
|
|
617
609
|
return dependencies
|
|
618
610
|
|
|
619
611
|
def _get_handler_meta(self, handler_name):
|
|
620
|
-
|
|
621
612
|
module = self.handler_modules[handler_name]
|
|
622
613
|
|
|
623
614
|
handler_dir = Path(module.__path__[0])
|
|
624
615
|
handler_folder_name = handler_dir.name
|
|
625
616
|
|
|
626
|
-
import_error = getattr(module,
|
|
617
|
+
import_error = getattr(module, "import_error", None)
|
|
627
618
|
handler_meta = self.handlers_import_status[handler_name]
|
|
628
|
-
handler_meta[
|
|
629
|
-
handler_meta[
|
|
630
|
-
handler_meta[
|
|
619
|
+
handler_meta["import"]["success"] = import_error is None
|
|
620
|
+
handler_meta["version"] = module.version
|
|
621
|
+
handler_meta["thread_safe"] = getattr(module, "thread_safe", False)
|
|
631
622
|
|
|
632
623
|
if import_error is not None:
|
|
633
|
-
handler_meta[
|
|
624
|
+
handler_meta["import"]["error_message"] = str(import_error)
|
|
634
625
|
|
|
635
|
-
handler_type = getattr(module,
|
|
626
|
+
handler_type = getattr(module, "type", None)
|
|
636
627
|
handler_class = None
|
|
637
|
-
if hasattr(module,
|
|
628
|
+
if hasattr(module, "Handler") and inspect.isclass(module.Handler):
|
|
638
629
|
handler_class = module.Handler
|
|
639
630
|
if issubclass(handler_class, BaseMLEngine):
|
|
640
|
-
handler_meta[
|
|
631
|
+
handler_meta["class_type"] = "ml"
|
|
641
632
|
elif issubclass(handler_class, DatabaseHandler):
|
|
642
|
-
handler_meta[
|
|
633
|
+
handler_meta["class_type"] = "sql"
|
|
643
634
|
if issubclass(handler_class, APIHandler):
|
|
644
|
-
handler_meta[
|
|
635
|
+
handler_meta["class_type"] = "api"
|
|
645
636
|
|
|
646
637
|
if handler_type == HANDLER_TYPE.ML:
|
|
647
638
|
# for ml engines, patch the connection_args from the argument probing
|
|
648
639
|
if handler_class:
|
|
649
640
|
try:
|
|
650
641
|
prediction_args = handler_class.prediction_args()
|
|
651
|
-
creation_args = getattr(module,
|
|
652
|
-
connection_args = {
|
|
653
|
-
|
|
654
|
-
"creation_args": creation_args
|
|
655
|
-
}
|
|
656
|
-
setattr(module, 'connection_args', connection_args)
|
|
642
|
+
creation_args = getattr(module, "creation_args", handler_class.creation_args())
|
|
643
|
+
connection_args = {"prediction": prediction_args, "creation_args": creation_args}
|
|
644
|
+
setattr(module, "connection_args", connection_args)
|
|
657
645
|
logger.debug("Patched connection_args for %s", handler_folder_name)
|
|
658
646
|
except Exception as e:
|
|
659
647
|
# do nothing
|
|
660
648
|
logger.debug("Failed to patch connection_args for %s, reason: %s", handler_folder_name, str(e))
|
|
661
649
|
|
|
662
|
-
module_attrs = [
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
'title'
|
|
668
|
-
] if hasattr(module, attr)]
|
|
650
|
+
module_attrs = [
|
|
651
|
+
attr
|
|
652
|
+
for attr in ["connection_args_example", "connection_args", "description", "type", "title"]
|
|
653
|
+
if hasattr(module, attr)
|
|
654
|
+
]
|
|
669
655
|
|
|
670
656
|
for attr in module_attrs:
|
|
671
657
|
handler_meta[attr] = getattr(module, attr)
|
|
672
658
|
|
|
673
659
|
# endregion
|
|
674
|
-
if hasattr(module,
|
|
675
|
-
handler_meta[
|
|
660
|
+
if hasattr(module, "permanent"):
|
|
661
|
+
handler_meta["permanent"] = module.permanent
|
|
676
662
|
else:
|
|
677
|
-
if handler_meta.get(
|
|
678
|
-
handler_meta[
|
|
663
|
+
if handler_meta.get("name") in ("files", "views", "lightwood"):
|
|
664
|
+
handler_meta["permanent"] = True
|
|
679
665
|
else:
|
|
680
|
-
handler_meta[
|
|
666
|
+
handler_meta["permanent"] = False
|
|
681
667
|
|
|
682
668
|
return handler_meta
|
|
683
669
|
|
|
@@ -685,60 +671,60 @@ class IntegrationController:
|
|
|
685
671
|
icon = {}
|
|
686
672
|
try:
|
|
687
673
|
icon_path = handler_dir.joinpath(icon_path)
|
|
688
|
-
icon_type = icon_path.name[icon_path.name.rfind(
|
|
674
|
+
icon_type = icon_path.name[icon_path.name.rfind(".") + 1 :].lower()
|
|
689
675
|
|
|
690
|
-
if icon_type ==
|
|
691
|
-
with open(str(icon_path),
|
|
692
|
-
icon[
|
|
676
|
+
if icon_type == "svg":
|
|
677
|
+
with open(str(icon_path), "rt") as f:
|
|
678
|
+
icon["data"] = f.read()
|
|
693
679
|
else:
|
|
694
|
-
with open(str(icon_path),
|
|
695
|
-
icon[
|
|
680
|
+
with open(str(icon_path), "rb") as f:
|
|
681
|
+
icon["data"] = base64.b64encode(f.read()).decode("utf-8")
|
|
696
682
|
|
|
697
|
-
icon[
|
|
698
|
-
icon[
|
|
683
|
+
icon["name"] = icon_path.name
|
|
684
|
+
icon["type"] = icon_type
|
|
699
685
|
|
|
700
686
|
except Exception as e:
|
|
701
|
-
logger.error(f
|
|
687
|
+
logger.error(f"Error reading icon for {handler_dir}, {e}!")
|
|
702
688
|
return icon
|
|
703
689
|
|
|
704
690
|
def _load_handler_modules(self):
|
|
705
|
-
mindsdb_path = Path(importlib.util.find_spec(
|
|
706
|
-
handlers_path = mindsdb_path.joinpath(
|
|
691
|
+
mindsdb_path = Path(importlib.util.find_spec("mindsdb").origin).parent
|
|
692
|
+
handlers_path = mindsdb_path.joinpath("integrations/handlers")
|
|
707
693
|
|
|
708
694
|
# edge case: running from tests directory, find_spec finds the base folder instead of actual package
|
|
709
695
|
if not os.path.isdir(handlers_path):
|
|
710
|
-
mindsdb_path = Path(importlib.util.find_spec(
|
|
711
|
-
handlers_path = mindsdb_path.joinpath(
|
|
696
|
+
mindsdb_path = Path(importlib.util.find_spec("mindsdb").origin).parent.joinpath("mindsdb")
|
|
697
|
+
handlers_path = mindsdb_path.joinpath("integrations/handlers")
|
|
712
698
|
|
|
713
699
|
self.handler_modules = {}
|
|
714
700
|
self.handlers_import_status = {}
|
|
715
701
|
for handler_dir in handlers_path.iterdir():
|
|
716
|
-
if handler_dir.is_dir() is False or handler_dir.name.startswith(
|
|
702
|
+
if handler_dir.is_dir() is False or handler_dir.name.startswith("__"):
|
|
717
703
|
continue
|
|
718
704
|
|
|
719
705
|
handler_info = self._get_handler_info(handler_dir)
|
|
720
|
-
if
|
|
706
|
+
if "name" not in handler_info:
|
|
721
707
|
continue
|
|
722
|
-
handler_name = handler_info[
|
|
708
|
+
handler_name = handler_info["name"]
|
|
723
709
|
dependencies = self._read_dependencies(handler_dir)
|
|
724
710
|
handler_meta = {
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
711
|
+
"path": handler_dir,
|
|
712
|
+
"import": {
|
|
713
|
+
"success": None,
|
|
714
|
+
"error_message": None,
|
|
715
|
+
"folder": handler_dir.name,
|
|
716
|
+
"dependencies": dependencies,
|
|
731
717
|
},
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
718
|
+
"name": handler_name,
|
|
719
|
+
"permanent": handler_info.get("permanent", False),
|
|
720
|
+
"connection_args": handler_info.get("connection_args", None),
|
|
721
|
+
"class_type": handler_info.get("class_type", None),
|
|
722
|
+
"type": handler_info.get("type"),
|
|
737
723
|
}
|
|
738
|
-
if
|
|
739
|
-
icon = self._get_handler_icon(handler_dir, handler_info[
|
|
724
|
+
if "icon_path" in handler_info:
|
|
725
|
+
icon = self._get_handler_icon(handler_dir, handler_info["icon_path"])
|
|
740
726
|
if icon:
|
|
741
|
-
handler_meta[
|
|
727
|
+
handler_meta["icon"] = icon
|
|
742
728
|
self.handlers_import_status[handler_name] = handler_meta
|
|
743
729
|
|
|
744
730
|
def _get_connection_args(self, args_file: Path, param_name: str) -> dict:
|
|
@@ -758,7 +744,7 @@ class IntegrationController:
|
|
|
758
744
|
continue
|
|
759
745
|
if not item.targets[0].id == param_name:
|
|
760
746
|
continue
|
|
761
|
-
if hasattr(item.value,
|
|
747
|
+
if hasattr(item.value, "keywords"):
|
|
762
748
|
for keyword in item.value.keywords:
|
|
763
749
|
name = keyword.arg
|
|
764
750
|
params = keyword.value
|
|
@@ -802,7 +788,7 @@ class IntegrationController:
|
|
|
802
788
|
if module_file is None:
|
|
803
789
|
return
|
|
804
790
|
|
|
805
|
-
path = handler_dir / f
|
|
791
|
+
path = handler_dir / f"{module_file}.py"
|
|
806
792
|
|
|
807
793
|
if not path.exists():
|
|
808
794
|
return
|
|
@@ -812,9 +798,9 @@ class IntegrationController:
|
|
|
812
798
|
for item in code.body:
|
|
813
799
|
if isinstance(item, ast.ClassDef):
|
|
814
800
|
bases = [base.id for base in item.bases]
|
|
815
|
-
if
|
|
816
|
-
return
|
|
817
|
-
return
|
|
801
|
+
if "APIHandler" in bases:
|
|
802
|
+
return "api"
|
|
803
|
+
return "sql"
|
|
818
804
|
|
|
819
805
|
def _get_handler_info(self, handler_dir: Path) -> dict:
|
|
820
806
|
"""
|
|
@@ -825,7 +811,7 @@ class IntegrationController:
|
|
|
825
811
|
- connection arguments
|
|
826
812
|
"""
|
|
827
813
|
|
|
828
|
-
init_file = handler_dir /
|
|
814
|
+
init_file = handler_dir / "__init__.py"
|
|
829
815
|
if not init_file.exists():
|
|
830
816
|
return {}
|
|
831
817
|
code = ast.parse(init_file.read_text())
|
|
@@ -838,47 +824,47 @@ class IntegrationController:
|
|
|
838
824
|
name = item.targets[0].id
|
|
839
825
|
if isinstance(item.value, ast.Constant):
|
|
840
826
|
info[name] = item.value.value
|
|
841
|
-
if isinstance(item.value, ast.Attribute) and name ==
|
|
842
|
-
if item.value.attr ==
|
|
827
|
+
if isinstance(item.value, ast.Attribute) and name == "type":
|
|
828
|
+
if item.value.attr == "ML":
|
|
843
829
|
info[name] = HANDLER_TYPE.ML
|
|
844
|
-
info[
|
|
830
|
+
info["class_type"] = "ml"
|
|
845
831
|
else:
|
|
846
832
|
info[name] = HANDLER_TYPE.DATA
|
|
847
|
-
info[
|
|
833
|
+
info["class_type"] = self._get_base_class_type(code, handler_dir) or "sql"
|
|
848
834
|
|
|
849
835
|
# connection args
|
|
850
|
-
if info[
|
|
851
|
-
args_file = handler_dir /
|
|
836
|
+
if info["type"] == HANDLER_TYPE.ML:
|
|
837
|
+
args_file = handler_dir / "creation_args.py"
|
|
852
838
|
if args_file.exists():
|
|
853
|
-
info[
|
|
839
|
+
info["connection_args"] = {
|
|
854
840
|
"prediction": {},
|
|
855
|
-
"creation_args": self._get_connection_args(args_file,
|
|
841
|
+
"creation_args": self._get_connection_args(args_file, "creation_args"),
|
|
856
842
|
}
|
|
857
843
|
else:
|
|
858
|
-
args_file = handler_dir /
|
|
844
|
+
args_file = handler_dir / "connection_args.py"
|
|
859
845
|
if args_file.exists():
|
|
860
|
-
info[
|
|
846
|
+
info["connection_args"] = self._get_connection_args(args_file, "connection_args")
|
|
861
847
|
|
|
862
848
|
return info
|
|
863
849
|
|
|
864
850
|
def import_handler(self, handler_name: str, base_import: str = None):
|
|
865
851
|
with self._import_lock:
|
|
866
852
|
handler_meta = self.handlers_import_status[handler_name]
|
|
867
|
-
handler_dir = handler_meta[
|
|
853
|
+
handler_dir = handler_meta["path"]
|
|
868
854
|
|
|
869
855
|
handler_folder_name = str(handler_dir.name)
|
|
870
856
|
if base_import is None:
|
|
871
|
-
base_import =
|
|
857
|
+
base_import = "mindsdb.integrations.handlers."
|
|
872
858
|
|
|
873
859
|
try:
|
|
874
|
-
handler_module = importlib.import_module(f
|
|
860
|
+
handler_module = importlib.import_module(f"{base_import}{handler_folder_name}")
|
|
875
861
|
self.handler_modules[handler_name] = handler_module
|
|
876
862
|
handler_meta = self._get_handler_meta(handler_name)
|
|
877
863
|
except Exception as e:
|
|
878
|
-
handler_meta[
|
|
879
|
-
handler_meta[
|
|
864
|
+
handler_meta["import"]["success"] = False
|
|
865
|
+
handler_meta["import"]["error_message"] = str(e)
|
|
880
866
|
|
|
881
|
-
self.handlers_import_status[handler_meta[
|
|
867
|
+
self.handlers_import_status[handler_meta["name"]] = handler_meta
|
|
882
868
|
return handler_meta
|
|
883
869
|
|
|
884
870
|
def get_handlers_import_status(self):
|