MindsDB 25.9.2.0a1__py3-none-any.whl → 25.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of MindsDB might be problematic. Click here for more details.
- mindsdb/__about__.py +1 -1
- mindsdb/__main__.py +40 -29
- mindsdb/api/a2a/__init__.py +1 -1
- mindsdb/api/a2a/agent.py +16 -10
- mindsdb/api/a2a/common/server/server.py +7 -3
- mindsdb/api/a2a/common/server/task_manager.py +12 -5
- mindsdb/api/a2a/common/types.py +66 -0
- mindsdb/api/a2a/task_manager.py +65 -17
- mindsdb/api/common/middleware.py +10 -12
- mindsdb/api/executor/command_executor.py +51 -40
- mindsdb/api/executor/datahub/datanodes/datanode.py +2 -2
- mindsdb/api/executor/datahub/datanodes/information_schema_datanode.py +7 -13
- mindsdb/api/executor/datahub/datanodes/integration_datanode.py +101 -49
- mindsdb/api/executor/datahub/datanodes/project_datanode.py +8 -4
- mindsdb/api/executor/datahub/datanodes/system_tables.py +3 -2
- mindsdb/api/executor/exceptions.py +29 -10
- mindsdb/api/executor/planner/plan_join.py +17 -3
- mindsdb/api/executor/planner/query_prepare.py +2 -20
- mindsdb/api/executor/sql_query/sql_query.py +74 -74
- mindsdb/api/executor/sql_query/steps/fetch_dataframe.py +1 -2
- mindsdb/api/executor/sql_query/steps/subselect_step.py +0 -1
- mindsdb/api/executor/utilities/functions.py +6 -6
- mindsdb/api/executor/utilities/sql.py +37 -20
- mindsdb/api/http/gui.py +5 -11
- mindsdb/api/http/initialize.py +75 -61
- mindsdb/api/http/namespaces/agents.py +10 -15
- mindsdb/api/http/namespaces/analysis.py +13 -20
- mindsdb/api/http/namespaces/auth.py +1 -1
- mindsdb/api/http/namespaces/chatbots.py +0 -5
- mindsdb/api/http/namespaces/config.py +15 -11
- mindsdb/api/http/namespaces/databases.py +140 -201
- mindsdb/api/http/namespaces/file.py +17 -4
- mindsdb/api/http/namespaces/handlers.py +17 -7
- mindsdb/api/http/namespaces/knowledge_bases.py +28 -7
- mindsdb/api/http/namespaces/models.py +94 -126
- mindsdb/api/http/namespaces/projects.py +13 -22
- mindsdb/api/http/namespaces/sql.py +33 -25
- mindsdb/api/http/namespaces/tab.py +27 -37
- mindsdb/api/http/namespaces/views.py +1 -1
- mindsdb/api/http/start.py +16 -10
- mindsdb/api/mcp/__init__.py +2 -1
- mindsdb/api/mysql/mysql_proxy/executor/mysql_executor.py +15 -20
- mindsdb/api/mysql/mysql_proxy/mysql_proxy.py +26 -50
- mindsdb/api/mysql/mysql_proxy/utilities/__init__.py +0 -1
- mindsdb/api/mysql/mysql_proxy/utilities/dump.py +8 -2
- mindsdb/integrations/handlers/byom_handler/byom_handler.py +165 -190
- mindsdb/integrations/handlers/databricks_handler/databricks_handler.py +98 -46
- mindsdb/integrations/handlers/druid_handler/druid_handler.py +32 -40
- mindsdb/integrations/handlers/file_handler/file_handler.py +7 -0
- mindsdb/integrations/handlers/gitlab_handler/gitlab_handler.py +5 -2
- mindsdb/integrations/handlers/lightwood_handler/functions.py +45 -79
- mindsdb/integrations/handlers/mssql_handler/mssql_handler.py +438 -100
- mindsdb/integrations/handlers/mssql_handler/requirements_odbc.txt +3 -0
- mindsdb/integrations/handlers/mysql_handler/mysql_handler.py +235 -3
- mindsdb/integrations/handlers/oracle_handler/__init__.py +2 -0
- mindsdb/integrations/handlers/oracle_handler/connection_args.py +7 -1
- mindsdb/integrations/handlers/oracle_handler/oracle_handler.py +321 -16
- mindsdb/integrations/handlers/oracle_handler/requirements.txt +1 -1
- mindsdb/integrations/handlers/postgres_handler/postgres_handler.py +14 -2
- mindsdb/integrations/handlers/shopify_handler/requirements.txt +1 -0
- mindsdb/integrations/handlers/shopify_handler/shopify_handler.py +80 -13
- mindsdb/integrations/handlers/snowflake_handler/snowflake_handler.py +2 -1
- mindsdb/integrations/handlers/statsforecast_handler/requirements.txt +1 -0
- mindsdb/integrations/handlers/statsforecast_handler/requirements_extra.txt +1 -0
- mindsdb/integrations/handlers/web_handler/urlcrawl_helpers.py +4 -4
- mindsdb/integrations/handlers/zendesk_handler/zendesk_tables.py +144 -111
- mindsdb/integrations/libs/api_handler.py +10 -10
- mindsdb/integrations/libs/base.py +4 -4
- mindsdb/integrations/libs/llm/utils.py +2 -2
- mindsdb/integrations/libs/ml_handler_process/create_engine_process.py +4 -7
- mindsdb/integrations/libs/ml_handler_process/func_call_process.py +2 -7
- mindsdb/integrations/libs/ml_handler_process/learn_process.py +37 -47
- mindsdb/integrations/libs/ml_handler_process/update_engine_process.py +4 -7
- mindsdb/integrations/libs/ml_handler_process/update_process.py +2 -7
- mindsdb/integrations/libs/process_cache.py +132 -140
- mindsdb/integrations/libs/response.py +18 -12
- mindsdb/integrations/libs/vectordatabase_handler.py +26 -0
- mindsdb/integrations/utilities/files/file_reader.py +6 -7
- mindsdb/integrations/utilities/handlers/auth_utilities/snowflake/__init__.py +1 -0
- mindsdb/integrations/utilities/handlers/auth_utilities/snowflake/snowflake_jwt_gen.py +151 -0
- mindsdb/integrations/utilities/rag/config_loader.py +37 -26
- mindsdb/integrations/utilities/rag/rerankers/base_reranker.py +83 -30
- mindsdb/integrations/utilities/rag/rerankers/reranker_compressor.py +4 -4
- mindsdb/integrations/utilities/rag/retrievers/sql_retriever.py +55 -133
- mindsdb/integrations/utilities/rag/settings.py +58 -133
- mindsdb/integrations/utilities/rag/splitters/file_splitter.py +5 -15
- mindsdb/interfaces/agents/agents_controller.py +2 -3
- mindsdb/interfaces/agents/constants.py +0 -2
- mindsdb/interfaces/agents/litellm_server.py +34 -58
- mindsdb/interfaces/agents/mcp_client_agent.py +10 -10
- mindsdb/interfaces/agents/mindsdb_database_agent.py +5 -5
- mindsdb/interfaces/agents/run_mcp_agent.py +12 -21
- mindsdb/interfaces/chatbot/chatbot_task.py +20 -23
- mindsdb/interfaces/chatbot/polling.py +30 -18
- mindsdb/interfaces/data_catalog/data_catalog_loader.py +16 -17
- mindsdb/interfaces/data_catalog/data_catalog_reader.py +15 -4
- mindsdb/interfaces/database/data_handlers_cache.py +190 -0
- mindsdb/interfaces/database/database.py +3 -3
- mindsdb/interfaces/database/integrations.py +7 -110
- mindsdb/interfaces/database/projects.py +2 -6
- mindsdb/interfaces/database/views.py +1 -4
- mindsdb/interfaces/file/file_controller.py +6 -6
- mindsdb/interfaces/functions/controller.py +1 -1
- mindsdb/interfaces/functions/to_markdown.py +2 -2
- mindsdb/interfaces/jobs/jobs_controller.py +5 -9
- mindsdb/interfaces/jobs/scheduler.py +3 -9
- mindsdb/interfaces/knowledge_base/controller.py +244 -128
- mindsdb/interfaces/knowledge_base/evaluate.py +36 -41
- mindsdb/interfaces/knowledge_base/executor.py +11 -0
- mindsdb/interfaces/knowledge_base/llm_client.py +51 -17
- mindsdb/interfaces/knowledge_base/preprocessing/json_chunker.py +40 -61
- mindsdb/interfaces/model/model_controller.py +172 -168
- mindsdb/interfaces/query_context/context_controller.py +14 -2
- mindsdb/interfaces/skills/custom/text2sql/mindsdb_sql_toolkit.py +10 -14
- mindsdb/interfaces/skills/retrieval_tool.py +43 -50
- mindsdb/interfaces/skills/skill_tool.py +2 -2
- mindsdb/interfaces/skills/skills_controller.py +1 -4
- mindsdb/interfaces/skills/sql_agent.py +25 -19
- mindsdb/interfaces/storage/db.py +16 -6
- mindsdb/interfaces/storage/fs.py +114 -169
- mindsdb/interfaces/storage/json.py +19 -18
- mindsdb/interfaces/tabs/tabs_controller.py +49 -72
- mindsdb/interfaces/tasks/task_monitor.py +3 -9
- mindsdb/interfaces/tasks/task_thread.py +7 -9
- mindsdb/interfaces/triggers/trigger_task.py +7 -13
- mindsdb/interfaces/triggers/triggers_controller.py +47 -52
- mindsdb/migrations/migrate.py +16 -16
- mindsdb/utilities/api_status.py +58 -0
- mindsdb/utilities/config.py +68 -2
- mindsdb/utilities/exception.py +40 -1
- mindsdb/utilities/fs.py +0 -1
- mindsdb/utilities/hooks/profiling.py +17 -14
- mindsdb/utilities/json_encoder.py +24 -10
- mindsdb/utilities/langfuse.py +40 -45
- mindsdb/utilities/log.py +272 -0
- mindsdb/utilities/ml_task_queue/consumer.py +52 -58
- mindsdb/utilities/ml_task_queue/producer.py +26 -30
- mindsdb/utilities/render/sqlalchemy_render.py +22 -20
- mindsdb/utilities/starters.py +0 -10
- mindsdb/utilities/utils.py +2 -2
- {mindsdb-25.9.2.0a1.dist-info → mindsdb-25.10.0.dist-info}/METADATA +286 -267
- {mindsdb-25.9.2.0a1.dist-info → mindsdb-25.10.0.dist-info}/RECORD +145 -159
- mindsdb/api/mysql/mysql_proxy/utilities/exceptions.py +0 -14
- mindsdb/api/postgres/__init__.py +0 -0
- mindsdb/api/postgres/postgres_proxy/__init__.py +0 -0
- mindsdb/api/postgres/postgres_proxy/executor/__init__.py +0 -1
- mindsdb/api/postgres/postgres_proxy/executor/executor.py +0 -189
- mindsdb/api/postgres/postgres_proxy/postgres_packets/__init__.py +0 -0
- mindsdb/api/postgres/postgres_proxy/postgres_packets/errors.py +0 -322
- mindsdb/api/postgres/postgres_proxy/postgres_packets/postgres_fields.py +0 -34
- mindsdb/api/postgres/postgres_proxy/postgres_packets/postgres_message.py +0 -31
- mindsdb/api/postgres/postgres_proxy/postgres_packets/postgres_message_formats.py +0 -1265
- mindsdb/api/postgres/postgres_proxy/postgres_packets/postgres_message_identifiers.py +0 -31
- mindsdb/api/postgres/postgres_proxy/postgres_packets/postgres_packets.py +0 -253
- mindsdb/api/postgres/postgres_proxy/postgres_proxy.py +0 -477
- mindsdb/api/postgres/postgres_proxy/utilities/__init__.py +0 -10
- mindsdb/api/postgres/start.py +0 -11
- mindsdb/integrations/handlers/mssql_handler/tests/__init__.py +0 -0
- mindsdb/integrations/handlers/mssql_handler/tests/test_mssql_handler.py +0 -169
- mindsdb/integrations/handlers/oracle_handler/tests/__init__.py +0 -0
- mindsdb/integrations/handlers/oracle_handler/tests/test_oracle_handler.py +0 -32
- {mindsdb-25.9.2.0a1.dist-info → mindsdb-25.10.0.dist-info}/WHEEL +0 -0
- {mindsdb-25.9.2.0a1.dist-info → mindsdb-25.10.0.dist-info}/licenses/LICENSE +0 -0
- {mindsdb-25.9.2.0a1.dist-info → mindsdb-25.10.0.dist-info}/top_level.txt +0 -0
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
from typing import
|
|
1
|
+
from typing import Any, Dict, List, Optional, Text
|
|
2
2
|
|
|
3
3
|
import oracledb
|
|
4
4
|
import pandas as pd
|
|
5
5
|
from oracledb import connect, Connection, DatabaseError, Cursor
|
|
6
6
|
from mindsdb_sql_parser.ast.base import ASTNode
|
|
7
7
|
|
|
8
|
-
from mindsdb.integrations.libs.base import
|
|
8
|
+
from mindsdb.integrations.libs.base import MetaDatabaseHandler
|
|
9
9
|
from mindsdb.integrations.libs.response import (
|
|
10
10
|
HandlerStatusResponse as StatusResponse,
|
|
11
11
|
HandlerResponse as Response,
|
|
@@ -13,6 +13,7 @@ from mindsdb.integrations.libs.response import (
|
|
|
13
13
|
)
|
|
14
14
|
from mindsdb.utilities import log
|
|
15
15
|
from mindsdb.utilities.render.sqlalchemy_render import SqlalchemyRender
|
|
16
|
+
import mindsdb.utilities.profiler as profiler
|
|
16
17
|
from mindsdb.api.mysql.mysql_proxy.libs.constants.mysql import MYSQL_DATA_TYPE
|
|
17
18
|
|
|
18
19
|
|
|
@@ -40,7 +41,6 @@ def _map_type(internal_type_name: str) -> MYSQL_DATA_TYPE:
|
|
|
40
41
|
"NATIONAL CHARACTER",
|
|
41
42
|
"NATIONAL CHAR",
|
|
42
43
|
"VARCHAR",
|
|
43
|
-
"NCHAR",
|
|
44
44
|
"NATIONAL CHARACTER VARYING",
|
|
45
45
|
"NATIONAL CHAR VARYING",
|
|
46
46
|
"NCHAR VARYING",
|
|
@@ -53,8 +53,18 @@ def _map_type(internal_type_name: str) -> MYSQL_DATA_TYPE:
|
|
|
53
53
|
("BINARY_DOUBLE",): MYSQL_DATA_TYPE.DOUBLE,
|
|
54
54
|
("LONG",): MYSQL_DATA_TYPE.BIGINT,
|
|
55
55
|
("DATE",): MYSQL_DATA_TYPE.DATE,
|
|
56
|
-
(
|
|
57
|
-
|
|
56
|
+
(
|
|
57
|
+
"HOUR",
|
|
58
|
+
"MINUTE",
|
|
59
|
+
"SECOND",
|
|
60
|
+
"TIMEZONE_HOUR",
|
|
61
|
+
"TIMEZONE_MINUTE",
|
|
62
|
+
): MYSQL_DATA_TYPE.SMALLINT,
|
|
63
|
+
(
|
|
64
|
+
"TIMESTAMP",
|
|
65
|
+
"TIMESTAMP WITH TIME ZONE",
|
|
66
|
+
"TIMESTAMP WITH LOCAL TIME ZONE",
|
|
67
|
+
): MYSQL_DATA_TYPE.TIMESTAMP,
|
|
58
68
|
("RAW", "LONG RAW", "BLOB", "BFILE"): MYSQL_DATA_TYPE.BINARY,
|
|
59
69
|
("ROWID", "UROWID"): MYSQL_DATA_TYPE.TEXT,
|
|
60
70
|
("CHAR", "NCHAR", "CLOB", "NCLOB", "CHARACTER"): MYSQL_DATA_TYPE.CHAR,
|
|
@@ -147,7 +157,7 @@ def _make_table_response(result: list[tuple[Any]], cursor: Cursor) -> Response:
|
|
|
147
157
|
return Response(RESPONSE_TYPE.TABLE, data_frame=df, mysql_types=mysql_types)
|
|
148
158
|
|
|
149
159
|
|
|
150
|
-
class OracleHandler(
|
|
160
|
+
class OracleHandler(MetaDatabaseHandler):
|
|
151
161
|
"""
|
|
152
162
|
This handler handles connection and execution of SQL queries on Oracle.
|
|
153
163
|
"""
|
|
@@ -160,7 +170,7 @@ class OracleHandler(DatabaseHandler):
|
|
|
160
170
|
|
|
161
171
|
Args:
|
|
162
172
|
name (Text): The name of the handler instance.
|
|
163
|
-
connection_data (Dict): The connection data required to connect to
|
|
173
|
+
connection_data (Dict): The connection data required to connect to OracleDB.
|
|
164
174
|
kwargs: Arbitrary keyword arguments.
|
|
165
175
|
"""
|
|
166
176
|
super().__init__(name)
|
|
@@ -188,7 +198,16 @@ class OracleHandler(DatabaseHandler):
|
|
|
188
198
|
raise ValueError("Required parameters (user, password) must be provided.")
|
|
189
199
|
|
|
190
200
|
if self.connection_data.get("thick_mode", False):
|
|
191
|
-
|
|
201
|
+
oracle_client_lib_dir = self.connection_data.get("oracle_client_lib_dir")
|
|
202
|
+
if isinstance(oracle_client_lib_dir, str) and oracle_client_lib_dir.strip():
|
|
203
|
+
try:
|
|
204
|
+
oracledb.init_oracle_client(lib_dir=oracle_client_lib_dir)
|
|
205
|
+
except Exception as e:
|
|
206
|
+
raise ValueError(f"Failed to initialize Oracle client: {e}")
|
|
207
|
+
else:
|
|
208
|
+
raise ValueError(
|
|
209
|
+
"Parameter 'oracle_client_lib_dir' must be provided as a non-empty string when using thick_mode."
|
|
210
|
+
)
|
|
192
211
|
|
|
193
212
|
config = {
|
|
194
213
|
"user": self.connection_data["user"],
|
|
@@ -240,7 +259,7 @@ class OracleHandler(DatabaseHandler):
|
|
|
240
259
|
raise
|
|
241
260
|
|
|
242
261
|
except Exception as unknown_error:
|
|
243
|
-
logger.error(f"Unknown error when connecting to
|
|
262
|
+
logger.error(f"Unknown error when connecting to Oracle: {unknown_error}")
|
|
244
263
|
raise
|
|
245
264
|
|
|
246
265
|
self.is_connected = True
|
|
@@ -285,6 +304,7 @@ class OracleHandler(DatabaseHandler):
|
|
|
285
304
|
|
|
286
305
|
return response
|
|
287
306
|
|
|
307
|
+
@profiler.profile()
|
|
288
308
|
def native_query(self, query: Text) -> Response:
|
|
289
309
|
"""
|
|
290
310
|
Executes a SQL query on the Oracle database and returns the result.
|
|
@@ -325,9 +345,69 @@ class OracleHandler(DatabaseHandler):
|
|
|
325
345
|
|
|
326
346
|
if need_to_close is True:
|
|
327
347
|
self.disconnect()
|
|
328
|
-
|
|
329
348
|
return response
|
|
330
349
|
|
|
350
|
+
def query_stream(self, query: ASTNode, fetch_size: int = 1000):
|
|
351
|
+
"""
|
|
352
|
+
Executes a SQL query represented by an ASTNode and retrieves the data in a streaming fashion.
|
|
353
|
+
|
|
354
|
+
Args:
|
|
355
|
+
query (ASTNode): An ASTNode representing the SQL query to be executed.
|
|
356
|
+
fetch_size (int): The number of rows to fetch in each batch.
|
|
357
|
+
Yields:
|
|
358
|
+
pd.DataFrame: A DataFrame containing a batch of rows from the query result.
|
|
359
|
+
Response: In case of an error, yields a Response object with the error details.
|
|
360
|
+
"""
|
|
361
|
+
query_str = SqlalchemyRender("oracle").get_string(query, with_failback=True)
|
|
362
|
+
need_to_close = self.is_connected is False
|
|
363
|
+
|
|
364
|
+
connection = self.connect()
|
|
365
|
+
with connection.cursor() as cur:
|
|
366
|
+
try:
|
|
367
|
+
cur.execute(query_str)
|
|
368
|
+
while True:
|
|
369
|
+
result = cur.fetchmany(fetch_size)
|
|
370
|
+
if not result:
|
|
371
|
+
break
|
|
372
|
+
df = pd.DataFrame(result, columns=[col[0] for col in cur.description])
|
|
373
|
+
yield df
|
|
374
|
+
connection.commit()
|
|
375
|
+
finally:
|
|
376
|
+
connect
|
|
377
|
+
if need_to_close is True:
|
|
378
|
+
self.disconnect()
|
|
379
|
+
|
|
380
|
+
def insert(self, table_name: str, df: pd.DataFrame) -> Response:
|
|
381
|
+
"""
|
|
382
|
+
Inserts data from a DataFrame into a specified table in the Oracle database.
|
|
383
|
+
|
|
384
|
+
Args:
|
|
385
|
+
table_name (str): The name of the table where the data will be inserted.
|
|
386
|
+
df (pd.DataFrame): The DataFrame containing the data to be inserted.
|
|
387
|
+
Returns:
|
|
388
|
+
Response: A response object indicating the success or failure of the insert operation.
|
|
389
|
+
"""
|
|
390
|
+
need_to_close = self.is_connected is False
|
|
391
|
+
connection = self.connect()
|
|
392
|
+
columns = list(df.columns)
|
|
393
|
+
placeholders = ", ".join([f":{i + 1}" for i in range(len(columns))])
|
|
394
|
+
insert_query = f"INSERT INTO {table_name} ({', '.join(columns)}) VALUES ({placeholders})"
|
|
395
|
+
|
|
396
|
+
with connection.cursor() as cur:
|
|
397
|
+
try:
|
|
398
|
+
cur.executemany(insert_query, df.values.tolist())
|
|
399
|
+
connection.commit()
|
|
400
|
+
rowcount = cur.rowcount
|
|
401
|
+
except DatabaseError as database_error:
|
|
402
|
+
logger.error(f"Error inserting data into table {table_name} on Oracle, {database_error}!")
|
|
403
|
+
connection.rollback()
|
|
404
|
+
raise
|
|
405
|
+
if need_to_close is True:
|
|
406
|
+
self.disconnect()
|
|
407
|
+
|
|
408
|
+
return Response(RESPONSE_TYPE.OK, affected_rows=rowcount)
|
|
409
|
+
|
|
410
|
+
@profiler.profile()
|
|
331
411
|
def query(self, query: ASTNode) -> Response:
|
|
332
412
|
"""
|
|
333
413
|
Executes a SQL query represented by an ASTNode and retrieves the data.
|
|
@@ -349,12 +429,29 @@ class OracleHandler(DatabaseHandler):
|
|
|
349
429
|
Returns:
|
|
350
430
|
Response: A response object containing the list of tables and views, formatted as per the `Response` class.
|
|
351
431
|
"""
|
|
352
|
-
# TODO: This query does not seem to be correct.
|
|
353
432
|
query = """
|
|
354
|
-
SELECT
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
433
|
+
SELECT
|
|
434
|
+
owner AS table_schema,
|
|
435
|
+
table_name AS table_name,
|
|
436
|
+
'BASE TABLE' AS table_type
|
|
437
|
+
FROM all_tables t
|
|
438
|
+
JOIN all_users u ON t.owner = u.username
|
|
439
|
+
WHERE t.tablespace_name = 'USERS'
|
|
440
|
+
|
|
441
|
+
UNION ALL
|
|
442
|
+
|
|
443
|
+
SELECT
|
|
444
|
+
v.owner AS table_schema,
|
|
445
|
+
v.view_name AS table_name,
|
|
446
|
+
'VIEW' AS table_type
|
|
447
|
+
FROM all_views v
|
|
448
|
+
JOIN all_users u ON v.owner = u.username
|
|
449
|
+
WHERE v.owner IN (
|
|
450
|
+
SELECT DISTINCT owner
|
|
451
|
+
FROM all_tables
|
|
452
|
+
WHERE tablespace_name = 'USERS'
|
|
453
|
+
)
|
|
454
|
+
"""
|
|
358
455
|
return self.native_query(query)
|
|
359
456
|
|
|
360
457
|
def get_columns(self, table_name: Text) -> Response:
|
|
@@ -385,9 +482,217 @@ class OracleHandler(DatabaseHandler):
|
|
|
385
482
|
NULL AS COLLATION_NAME
|
|
386
483
|
FROM USER_TAB_COLUMNS
|
|
387
484
|
WHERE table_name = '{table_name}'
|
|
388
|
-
ORDER BY TABLE_NAME, COLUMN_ID
|
|
485
|
+
ORDER BY TABLE_NAME, COLUMN_ID
|
|
389
486
|
"""
|
|
390
487
|
result = self.native_query(query)
|
|
391
488
|
if result.resp_type is RESPONSE_TYPE.TABLE:
|
|
392
489
|
result.to_columns_table_response(map_type_fn=_map_type)
|
|
393
490
|
return result
|
|
491
|
+
|
|
492
|
+
def meta_get_tables(self, table_names: Optional[List[str]]) -> Response:
|
|
493
|
+
"""
|
|
494
|
+
Retrieves metadata about all non-system tables and views in the current schema of the Oracle database.
|
|
495
|
+
|
|
496
|
+
Returns:
|
|
497
|
+
list[dict[str, Any]]: A list of dictionaries, each containing metadata about a table or view.
|
|
498
|
+
"""
|
|
499
|
+
query = """
|
|
500
|
+
SELECT
|
|
501
|
+
o.object_name AS table_name,
|
|
502
|
+
USER AS table_schema,
|
|
503
|
+
o.object_type AS table_type,
|
|
504
|
+
c.comments AS table_description,
|
|
505
|
+
t.num_rows AS row_count
|
|
506
|
+
FROM
|
|
507
|
+
user_objects o
|
|
508
|
+
LEFT JOIN
|
|
509
|
+
user_tab_comments c ON o.object_name = c.table_name
|
|
510
|
+
LEFT JOIN
|
|
511
|
+
user_tables t ON o.object_name = t.table_name AND o.object_type = 'TABLE'
|
|
512
|
+
WHERE
|
|
513
|
+
o.object_type IN ('TABLE', 'VIEW')
|
|
514
|
+
"""
|
|
515
|
+
if table_names is not None and len(table_names) > 0:
|
|
516
|
+
table_names = [f"'{t.upper()}'" for t in table_names]
|
|
517
|
+
query += f" AND o.object_name IN ({','.join(table_names)})"
|
|
518
|
+
|
|
519
|
+
query += " ORDER BY o.object_name"
|
|
520
|
+
|
|
521
|
+
result = self.native_query(query)
|
|
522
|
+
return result
|
|
523
|
+
|
|
524
|
+
def meta_get_columns(self, table_names: Optional[List[str]]) -> Response:
|
|
525
|
+
"""Retrieves metadata about the columns of specified tables in the Oracle database.
|
|
526
|
+
|
|
527
|
+
Args:
|
|
528
|
+
table_names (list[str]): A list of table names for which to retrieve column metadata.
|
|
529
|
+
|
|
530
|
+
Returns:
|
|
531
|
+
list[dict[str, Any]]: A list of dictionaries, each containing metadata about a column.
|
|
532
|
+
"""
|
|
533
|
+
query = """
|
|
534
|
+
SELECT
|
|
535
|
+
utc.table_name,
|
|
536
|
+
utc.column_name,
|
|
537
|
+
utc.data_type,
|
|
538
|
+
ucc.comments AS column_description,
|
|
539
|
+
utc.data_default AS column_default,
|
|
540
|
+
CASE
|
|
541
|
+
WHEN utc.nullable = 'Y' THEN 1
|
|
542
|
+
ELSE 0
|
|
543
|
+
END AS is_nullable
|
|
544
|
+
FROM
|
|
545
|
+
user_tab_columns utc
|
|
546
|
+
JOIN
|
|
547
|
+
user_tables ut ON utc.table_name = ut.table_name
|
|
548
|
+
LEFT JOIN
|
|
549
|
+
user_col_comments ucc ON utc.table_name = ucc.table_name AND utc.column_name = ucc.column_name
|
|
550
|
+
"""
|
|
551
|
+
if table_names is not None and len(table_names) > 0:
|
|
552
|
+
table_names = [f"'{t.upper()}'" for t in table_names]
|
|
553
|
+
query += f" WHERE utc.table_name IN ({','.join(table_names)})"
|
|
554
|
+
query += " ORDER BY utc.table_name, utc.column_id"
|
|
555
|
+
result = self.native_query(query)
|
|
556
|
+
return result
|
|
557
|
+
|
|
558
|
+
def meta_get_column_statistics(self, table_names: Optional[List[str]]) -> Response:
|
|
559
|
+
"""Retrieves statistics about the columns of specified tables in the Oracle database.
|
|
560
|
+
|
|
561
|
+
Args:
|
|
562
|
+
table_names (list[str]): A list of table names for which to retrieve column statistics.
|
|
563
|
+
|
|
564
|
+
Returns:
|
|
565
|
+
list[dict[str, Any]]: A list of dictionaries, each containing statistics about a column.
|
|
566
|
+
"""
|
|
567
|
+
table_filter = ""
|
|
568
|
+
if table_names is not None and len(table_names) > 0:
|
|
569
|
+
quoted_names = [f"'{t.upper()}'" for t in table_names]
|
|
570
|
+
table_filter = f" WHERE cs.table_name IN ({','.join(quoted_names)})"
|
|
571
|
+
|
|
572
|
+
query = (
|
|
573
|
+
"""
|
|
574
|
+
SELECT
|
|
575
|
+
cs.table_name AS TABLE_NAME,
|
|
576
|
+
cs.column_name AS COLUMN_NAME,
|
|
577
|
+
CASE
|
|
578
|
+
WHEN cs.sample_size > 0 THEN ROUND((cs.num_nulls / cs.sample_size) * 100, 2)
|
|
579
|
+
ELSE NULL
|
|
580
|
+
END AS NULL_PERCENTAGE,
|
|
581
|
+
cs.num_distinct AS DISTINCT_VALUES_COUNT,
|
|
582
|
+
NULL AS MOST_COMMON_VALUES,
|
|
583
|
+
NULL AS MOST_COMMON_FREQUENCIES,
|
|
584
|
+
cs.histogram AS HISTOGRAM_TYPE,
|
|
585
|
+
h.bounds AS HISTOGRAM_BOUNDS
|
|
586
|
+
FROM
|
|
587
|
+
user_tab_col_statistics cs
|
|
588
|
+
LEFT JOIN (
|
|
589
|
+
SELECT
|
|
590
|
+
table_name,
|
|
591
|
+
column_name,
|
|
592
|
+
LISTAGG(endpoint_value, ', ') WITHIN GROUP (ORDER BY endpoint_number) AS bounds
|
|
593
|
+
FROM
|
|
594
|
+
user_tab_histograms
|
|
595
|
+
GROUP BY
|
|
596
|
+
table_name,
|
|
597
|
+
column_name
|
|
598
|
+
) h ON cs.table_name = h.table_name AND cs.column_name = h.column_name
|
|
599
|
+
"""
|
|
600
|
+
+ table_filter
|
|
601
|
+
+ """
|
|
602
|
+
ORDER BY
|
|
603
|
+
cs.table_name,
|
|
604
|
+
cs.column_name
|
|
605
|
+
"""
|
|
606
|
+
)
|
|
607
|
+
|
|
608
|
+
result = self.native_query(query)
|
|
609
|
+
|
|
610
|
+
if result.resp_type is RESPONSE_TYPE.TABLE and result.data_frame is not None:
|
|
611
|
+
df = result.data_frame
|
|
612
|
+
|
|
613
|
+
def extract_min_max(
|
|
614
|
+
histogram_str: str,
|
|
615
|
+
) -> tuple[Optional[float], Optional[float]]:
|
|
616
|
+
if histogram_str and str(histogram_str).lower() not in ["nan", "none"]:
|
|
617
|
+
values = str(histogram_str).split(",")
|
|
618
|
+
if values:
|
|
619
|
+
min_val = values[0].strip(" '\"")
|
|
620
|
+
max_val = values[-1].strip(" '\"")
|
|
621
|
+
return min_val, max_val
|
|
622
|
+
return None, None
|
|
623
|
+
|
|
624
|
+
min_max_values = df["HISTOGRAM_BOUNDS"].apply(extract_min_max)
|
|
625
|
+
df["MINIMUM_VALUE"] = min_max_values.apply(lambda x: x[0])
|
|
626
|
+
df["MAXIMUM_VALUE"] = min_max_values.apply(lambda x: x[1])
|
|
627
|
+
df.drop(columns=["HISTOGRAM_BOUNDS"], inplace=True)
|
|
628
|
+
return result
|
|
629
|
+
|
|
630
|
+
def meta_get_primary_keys(self, table_names: Optional[List[str]]) -> Response:
|
|
631
|
+
"""
|
|
632
|
+
Retrieves the primary keys for the specified tables in the Oracle database.
|
|
633
|
+
|
|
634
|
+
Args:
|
|
635
|
+
table_names (list[str]): A list of table names for which to retrieve primary keys.
|
|
636
|
+
|
|
637
|
+
Returns:
|
|
638
|
+
list[dict[str, Any]]: A list of dictionaries, each containing information about a primary key.
|
|
639
|
+
"""
|
|
640
|
+
|
|
641
|
+
query = """
|
|
642
|
+
SELECT
|
|
643
|
+
cols.table_name,
|
|
644
|
+
cols.column_name,
|
|
645
|
+
cols.position AS ordinal_position,
|
|
646
|
+
cons.constraint_name
|
|
647
|
+
FROM
|
|
648
|
+
all_constraints cons
|
|
649
|
+
JOIN
|
|
650
|
+
all_cons_columns cols ON cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner
|
|
651
|
+
WHERE
|
|
652
|
+
cons.constraint_type = 'P'
|
|
653
|
+
AND cons.owner = SYS_CONTEXT('USERENV', 'CURRENT_SCHEMA')
|
|
654
|
+
"""
|
|
655
|
+
if table_names is not None and len(table_names) > 0:
|
|
656
|
+
quoted_names = [f"'{t.upper()}'" for t in table_names]
|
|
657
|
+
query += f" AND cols.table_name IN ({','.join(quoted_names)})"
|
|
658
|
+
|
|
659
|
+
query += " ORDER BY cols.table_name, cols.position"
|
|
660
|
+
|
|
661
|
+
result = self.native_query(query)
|
|
662
|
+
return result
|
|
663
|
+
|
|
664
|
+
def meta_get_foreign_keys(self, table_names: Optional[List[str]]) -> Response:
|
|
665
|
+
"""
|
|
666
|
+
Retrieves the foreign keys for the specified tables in the Oracle database.
|
|
667
|
+
|
|
668
|
+
Args:
|
|
669
|
+
table_names (list[str]): A list of table names for which to retrieve foreign keys.
|
|
670
|
+
|
|
671
|
+
Returns:
|
|
672
|
+
list[dict[str, Any]]: A list of dictionaries, each containing information about a foreign key.
|
|
673
|
+
"""
|
|
674
|
+
|
|
675
|
+
query = """
|
|
676
|
+
SELECT
|
|
677
|
+
pk_cols.table_name AS parent_table_name,
|
|
678
|
+
pk_cols.column_name AS parent_column_name,
|
|
679
|
+
fk_cols.table_name AS child_table_name,
|
|
680
|
+
fk_cols.column_name AS child_column_name,
|
|
681
|
+
fk_cons.constraint_name
|
|
682
|
+
FROM
|
|
683
|
+
all_constraints fk_cons
|
|
684
|
+
JOIN
|
|
685
|
+
all_cons_columns fk_cols ON fk_cons.owner = fk_cols.owner AND fk_cons.constraint_name = fk_cols.constraint_name
|
|
686
|
+
JOIN
|
|
687
|
+
all_cons_columns pk_cols ON fk_cons.owner = pk_cols.owner AND fk_cons.r_constraint_name = pk_cols.constraint_name
|
|
688
|
+
WHERE
|
|
689
|
+
fk_cons.constraint_type = 'R'
|
|
690
|
+
AND fk_cons.owner = SYS_CONTEXT('USERENV', 'CURRENT_SCHEMA')
|
|
691
|
+
"""
|
|
692
|
+
if table_names is not None and len(table_names) > 0:
|
|
693
|
+
quoted_names = [f"'{t.upper()}'" for t in table_names]
|
|
694
|
+
query += f" AND fk_cols.table_name IN ({','.join(quoted_names)})"
|
|
695
|
+
|
|
696
|
+
query += " ORDER BY fk_cols.table_name, fk_cols.position"
|
|
697
|
+
result = self.native_query(query)
|
|
698
|
+
return result
|
|
@@ -1 +1 @@
|
|
|
1
|
-
oracledb==
|
|
1
|
+
oracledb==3.3.0
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import time
|
|
2
2
|
import json
|
|
3
|
+
import logging
|
|
3
4
|
from typing import Optional, Any
|
|
4
5
|
|
|
5
6
|
import pandas as pd
|
|
@@ -146,7 +147,7 @@ class PostgresHandler(MetaDatabaseHandler):
|
|
|
146
147
|
|
|
147
148
|
self.connection = None
|
|
148
149
|
self.is_connected = False
|
|
149
|
-
self.thread_safe =
|
|
150
|
+
self.thread_safe = True
|
|
150
151
|
|
|
151
152
|
def __del__(self):
|
|
152
153
|
if self.is_connected:
|
|
@@ -304,8 +305,19 @@ class PostgresHandler(MetaDatabaseHandler):
|
|
|
304
305
|
result = cur.fetchall()
|
|
305
306
|
response = _make_table_response(result, cur)
|
|
306
307
|
connection.commit()
|
|
308
|
+
except (psycopg.ProgrammingError, psycopg.DataError) as e:
|
|
309
|
+
# These is 'expected' exceptions, they should not be treated as mindsdb's errors
|
|
310
|
+
# ProgrammingError: table not found or already exists, syntax error, etc
|
|
311
|
+
# DataError: division by zero, numeric value out of range, etc.
|
|
312
|
+
# https://www.psycopg.org/psycopg3/docs/api/errors.html
|
|
313
|
+
log_message = "Database query failed with error, likely due to invalid SQL query"
|
|
314
|
+
if logger.isEnabledFor(logging.DEBUG):
|
|
315
|
+
log_message += f". Executed query:\n{query}"
|
|
316
|
+
logger.info(log_message)
|
|
317
|
+
response = Response(RESPONSE_TYPE.ERROR, error_code=0, error_message=str(e), is_expected_error=True)
|
|
318
|
+
connection.rollback()
|
|
307
319
|
except Exception as e:
|
|
308
|
-
logger.error(f"Error running query
|
|
320
|
+
logger.error(f"Error running query:\n{query}\non {self.database}, {e}")
|
|
309
321
|
response = Response(RESPONSE_TYPE.ERROR, error_code=0, error_message=str(e))
|
|
310
322
|
connection.rollback()
|
|
311
323
|
|
|
@@ -1,7 +1,20 @@
|
|
|
1
1
|
import shopify
|
|
2
2
|
import requests
|
|
3
|
-
|
|
4
|
-
|
|
3
|
+
import json
|
|
4
|
+
|
|
5
|
+
from pyactiveresource.connection import ClientError, ServerError, ConnectionError as ResourceConnectionError
|
|
6
|
+
|
|
7
|
+
from mindsdb.integrations.handlers.shopify_handler.shopify_tables import (
|
|
8
|
+
ProductsTable,
|
|
9
|
+
CustomersTable,
|
|
10
|
+
OrdersTable,
|
|
11
|
+
InventoryLevelTable,
|
|
12
|
+
LocationTable,
|
|
13
|
+
CustomerReviews,
|
|
14
|
+
CarrierServiceTable,
|
|
15
|
+
ShippingZoneTable,
|
|
16
|
+
SalesChannelTable,
|
|
17
|
+
)
|
|
5
18
|
from mindsdb.integrations.libs.api_handler import APIHandler
|
|
6
19
|
from mindsdb.integrations.libs.response import (
|
|
7
20
|
HandlerStatusResponse as StatusResponse,
|
|
@@ -9,7 +22,11 @@ from mindsdb.integrations.libs.response import (
|
|
|
9
22
|
|
|
10
23
|
from mindsdb.utilities import log
|
|
11
24
|
from mindsdb_sql_parser import parse_sql
|
|
12
|
-
from mindsdb.integrations.libs.api_handler_exceptions import
|
|
25
|
+
from mindsdb.integrations.libs.api_handler_exceptions import (
|
|
26
|
+
InvalidNativeQuery,
|
|
27
|
+
ConnectionFailed,
|
|
28
|
+
MissingConnectionParams,
|
|
29
|
+
)
|
|
13
30
|
|
|
14
31
|
logger = log.getLogger(__name__)
|
|
15
32
|
|
|
@@ -19,7 +36,7 @@ class ShopifyHandler(APIHandler):
|
|
|
19
36
|
The Shopify handler implementation.
|
|
20
37
|
"""
|
|
21
38
|
|
|
22
|
-
name =
|
|
39
|
+
name = "shopify"
|
|
23
40
|
|
|
24
41
|
def __init__(self, name: str, **kwargs):
|
|
25
42
|
"""
|
|
@@ -81,10 +98,14 @@ class ShopifyHandler(APIHandler):
|
|
|
81
98
|
if self.kwargs.get("connection_data") is None:
|
|
82
99
|
raise MissingConnectionParams("Incomplete parameters passed to Shopify Handler")
|
|
83
100
|
|
|
84
|
-
api_session = shopify.Session(
|
|
101
|
+
api_session = shopify.Session(
|
|
102
|
+
self.connection_data["shop_url"].strip(), "2021-10", self.connection_data["access_token"]
|
|
103
|
+
)
|
|
85
104
|
|
|
86
|
-
self.yotpo_app_key = self.connection_data[
|
|
87
|
-
self.yotpo_access_token =
|
|
105
|
+
self.yotpo_app_key = self.connection_data["yotpo_app_key"] if "yotpo_app_key" in self.connection_data else None
|
|
106
|
+
self.yotpo_access_token = (
|
|
107
|
+
self.connection_data["yotpo_access_token"] if "yotpo_access_token" in self.connection_data else None
|
|
108
|
+
)
|
|
88
109
|
|
|
89
110
|
self.connection = api_session
|
|
90
111
|
|
|
@@ -106,17 +127,63 @@ class ShopifyHandler(APIHandler):
|
|
|
106
127
|
shopify.ShopifyResource.activate_session(api_session)
|
|
107
128
|
shopify.Shop.current()
|
|
108
129
|
response.success = True
|
|
130
|
+
except ClientError as e:
|
|
131
|
+
# Handle Shopify API client errors (4xx responses)
|
|
132
|
+
logger.error(f"Error connecting to Shopify: {str(e)}")
|
|
133
|
+
response.error_message = str(e)
|
|
134
|
+
|
|
135
|
+
status_code = e.response.code if hasattr(e.response, "code") else None
|
|
136
|
+
|
|
137
|
+
# Try to parse error message from response body
|
|
138
|
+
error_detail = None
|
|
139
|
+
if hasattr(e.response, "body"):
|
|
140
|
+
try:
|
|
141
|
+
body = json.loads(e.response.body)
|
|
142
|
+
error_detail = body.get("errors", None)
|
|
143
|
+
except (json.JSONDecodeError, AttributeError):
|
|
144
|
+
pass
|
|
145
|
+
|
|
146
|
+
if status_code == 402:
|
|
147
|
+
if error_detail and "Unavailable Shop" in str(error_detail):
|
|
148
|
+
raise ConnectionFailed(
|
|
149
|
+
"Shopify shop is unavailable. This could be due to shop suspension, billing issues, or incorrect shop URL."
|
|
150
|
+
)
|
|
151
|
+
else:
|
|
152
|
+
raise ConnectionFailed(
|
|
153
|
+
"Shopify API access requires payment. Please check your Shopify billing status."
|
|
154
|
+
)
|
|
155
|
+
elif status_code == 401:
|
|
156
|
+
raise ConnectionFailed("Invalid Shopify API credentials. Please check your access token and shop URL.")
|
|
157
|
+
elif status_code == 404:
|
|
158
|
+
raise ConnectionFailed("Shopify shop not found. Please verify the shop URL is correct.")
|
|
159
|
+
elif status_code == 403:
|
|
160
|
+
raise ConnectionFailed("Access denied. Please check your API permissions and credentials.")
|
|
161
|
+
else:
|
|
162
|
+
if error_detail:
|
|
163
|
+
raise ConnectionFailed(f"Shopify API error: {error_detail}")
|
|
164
|
+
else:
|
|
165
|
+
raise ConnectionFailed(
|
|
166
|
+
"Failed to connect to Shopify API. Please check your credentials and shop URL."
|
|
167
|
+
)
|
|
168
|
+
except ServerError as e:
|
|
169
|
+
# Handle Shopify API server errors (5xx responses)
|
|
170
|
+
logger.error(f"Shopify server error: {str(e)}")
|
|
171
|
+
response.error_message = str(e)
|
|
172
|
+
raise ConnectionFailed("Shopify API server error. Please try again later or contact Shopify support.")
|
|
173
|
+
except ResourceConnectionError as e:
|
|
174
|
+
# Handle network/connection errors
|
|
175
|
+
logger.error(f"Connection error: {str(e)}")
|
|
176
|
+
response.error_message = str(e)
|
|
177
|
+
raise ConnectionFailed("Network connection failed. Please check your internet connection and try again.")
|
|
109
178
|
except Exception as e:
|
|
110
|
-
|
|
111
|
-
|
|
179
|
+
# Handle any other unexpected errors
|
|
180
|
+
logger.error(f"Unexpected error connecting to Shopify: {str(e)}")
|
|
112
181
|
response.error_message = str(e)
|
|
182
|
+
raise ConnectionFailed("Failed to connect to Shopify. Please verify your shop URL and access token.")
|
|
113
183
|
|
|
114
184
|
if self.yotpo_app_key is not None and self.yotpo_access_token is not None:
|
|
115
185
|
url = f"https://api.yotpo.com/v1/apps/{self.yotpo_app_key}/reviews?count=1&utoken={self.yotpo_access_token}"
|
|
116
|
-
headers = {
|
|
117
|
-
"accept": "application/json",
|
|
118
|
-
"Content-Type": "application/json"
|
|
119
|
-
}
|
|
186
|
+
headers = {"accept": "application/json", "Content-Type": "application/json"}
|
|
120
187
|
if requests.get(url, headers=headers).status_code == 200:
|
|
121
188
|
response.success = True
|
|
122
189
|
else:
|
|
@@ -204,10 +204,11 @@ class SnowflakeHandler(MetaDatabaseHandler):
|
|
|
204
204
|
"user": self.connection_data.get("user"),
|
|
205
205
|
"password": self.connection_data.get("password"),
|
|
206
206
|
"database": self.connection_data.get("database"),
|
|
207
|
+
"schema": self.connection_data.get("schema", "PUBLIC"),
|
|
207
208
|
}
|
|
208
209
|
|
|
209
210
|
# Optional connection parameters
|
|
210
|
-
optional_params = ["
|
|
211
|
+
optional_params = ["warehouse", "role"]
|
|
211
212
|
for param in optional_params:
|
|
212
213
|
if param in self.connection_data:
|
|
213
214
|
config[param] = self.connection_data[param]
|
|
@@ -170,9 +170,9 @@ def get_all_website_links(url, headers: dict = None) -> dict:
|
|
|
170
170
|
href = href.rstrip("/")
|
|
171
171
|
urls.add(href)
|
|
172
172
|
|
|
173
|
-
except Exception
|
|
173
|
+
except Exception:
|
|
174
174
|
error_message = traceback.format_exc().splitlines()[-1]
|
|
175
|
-
logger.
|
|
175
|
+
logger.exception("An exception occurred:")
|
|
176
176
|
return {
|
|
177
177
|
"url": url,
|
|
178
178
|
"urls": urls,
|
|
@@ -238,9 +238,9 @@ def get_all_website_links_recursively(
|
|
|
238
238
|
if url not in reviewed_urls and matches_filter:
|
|
239
239
|
try:
|
|
240
240
|
reviewed_urls[url] = get_all_website_links(url, headers=headers)
|
|
241
|
-
except Exception
|
|
241
|
+
except Exception:
|
|
242
242
|
error_message = traceback.format_exc().splitlines()[-1]
|
|
243
|
-
logger.
|
|
243
|
+
logger.exception("An exception occurred:")
|
|
244
244
|
reviewed_urls[url] = {
|
|
245
245
|
"url": url,
|
|
246
246
|
"urls": [],
|