MindsDB 25.3.4.0__py3-none-any.whl → 25.3.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of MindsDB might be problematic. Click here for more details.

Files changed (32) hide show
  1. mindsdb/__about__.py +2 -2
  2. mindsdb/api/executor/datahub/datanodes/integration_datanode.py +5 -2
  3. mindsdb/api/executor/datahub/datanodes/system_tables.py +131 -138
  4. mindsdb/api/mysql/mysql_proxy/libs/constants/mysql.py +74 -0
  5. mindsdb/integrations/handlers/confluence_handler/confluence_api_client.py +176 -0
  6. mindsdb/integrations/handlers/confluence_handler/confluence_handler.py +54 -59
  7. mindsdb/integrations/handlers/confluence_handler/confluence_tables.py +753 -0
  8. mindsdb/integrations/handlers/confluence_handler/connection_args.py +8 -8
  9. mindsdb/integrations/handlers/langchain_handler/requirements.txt +1 -1
  10. mindsdb/integrations/handlers/lightwood_handler/requirements.txt +3 -3
  11. mindsdb/integrations/handlers/litellm_handler/requirements.txt +1 -1
  12. mindsdb/integrations/handlers/llama_index_handler/requirements.txt +1 -1
  13. mindsdb/integrations/handlers/ms_teams_handler/ms_graph_api_teams_client.py +278 -55
  14. mindsdb/integrations/handlers/ms_teams_handler/ms_teams_handler.py +52 -21
  15. mindsdb/integrations/handlers/ms_teams_handler/ms_teams_tables.py +6 -29
  16. mindsdb/integrations/handlers/mssql_handler/mssql_handler.py +37 -1
  17. mindsdb/integrations/handlers/mysql_handler/mysql_handler.py +30 -1
  18. mindsdb/integrations/handlers/oracle_handler/oracle_handler.py +53 -5
  19. mindsdb/integrations/handlers/postgres_handler/postgres_handler.py +37 -1
  20. mindsdb/integrations/handlers/ray_serve_handler/ray_serve_handler.py +18 -16
  21. mindsdb/integrations/handlers/snowflake_handler/snowflake_handler.py +68 -2
  22. mindsdb/integrations/utilities/handlers/auth_utilities/__init__.py +1 -1
  23. mindsdb/integrations/utilities/handlers/auth_utilities/microsoft/__init__.py +1 -1
  24. mindsdb/integrations/utilities/handlers/auth_utilities/microsoft/ms_graph_api_auth_utilities.py +97 -18
  25. mindsdb/utilities/render/sqlalchemy_render.py +30 -6
  26. {mindsdb-25.3.4.0.dist-info → mindsdb-25.3.4.2.dist-info}/METADATA +226 -231
  27. {mindsdb-25.3.4.0.dist-info → mindsdb-25.3.4.2.dist-info}/RECORD +30 -30
  28. {mindsdb-25.3.4.0.dist-info → mindsdb-25.3.4.2.dist-info}/WHEEL +1 -1
  29. mindsdb/integrations/handlers/confluence_handler/confluence_table.py +0 -193
  30. mindsdb/integrations/handlers/confluence_handler/requirements.txt +0 -1
  31. {mindsdb-25.3.4.0.dist-info → mindsdb-25.3.4.2.dist-info}/licenses/LICENSE +0 -0
  32. {mindsdb-25.3.4.0.dist-info → mindsdb-25.3.4.2.dist-info}/top_level.txt +0 -0
@@ -33,10 +33,10 @@ class TeamsTable(APIResource):
33
33
  targets (List[str]): The list of target columns to return.
34
34
  """
35
35
  client: MSGraphAPITeamsDelegatedPermissionsClient = self.handler.connect()
36
- teams = client.get_all_groups()
36
+ teams = client.get_teams()
37
37
 
38
38
  teams_df = pd.json_normalize(teams, sep="_")
39
- teams_df = teams_df[self.get_columns()]
39
+ teams_df = teams_df.reindex(columns=self.get_columns(), fill_value=None)
40
40
 
41
41
  return teams_df
42
42
 
@@ -114,18 +114,7 @@ class ChannelsTable(APIResource):
114
114
 
115
115
  condition.applied = True
116
116
 
117
- if team_id:
118
- if channel_ids:
119
- channels = client.get_channels_in_group_by_ids(team_id, channel_ids)
120
-
121
- else:
122
- channels = client.get_all_channels_in_group(team_id)
123
-
124
- elif channel_ids:
125
- channels = client.get_channels_across_all_groups_by_ids(channel_ids)
126
-
127
- else:
128
- channels = client.get_all_channels_across_all_groups()
117
+ channels = client.get_channels(team_id, channel_ids)
129
118
 
130
119
  channels_df = pd.json_normalize(channels, sep="_")
131
120
  channels_df = channels_df[self.get_columns()]
@@ -218,11 +207,7 @@ class ChannelMessagesTable(APIResource):
218
207
  if not group_id or not channel_id:
219
208
  raise ValueError("The 'channelIdentity_teamId' and 'channelIdentity_channelId' columns are required.")
220
209
 
221
- if message_ids:
222
- messages = client.get_messages_in_channel_by_ids(group_id, channel_id, message_ids)
223
-
224
- else:
225
- messages = client.get_all_messages_in_channel(group_id, channel_id, limit)
210
+ messages = client.get_channel_messages(group_id, channel_id, message_ids)
226
211
 
227
212
  messages_df = pd.json_normalize(messages, sep="_")
228
213
  messages_df = messages_df[self.get_columns()]
@@ -304,11 +289,7 @@ class ChatsTable(APIResource):
304
289
 
305
290
  condition.applied = True
306
291
 
307
- if chat_ids:
308
- chats = client.get_chats_by_ids(chat_ids)
309
-
310
- else:
311
- chats = client.get_all_chats(limit)
292
+ chats = client.get_chats(chat_ids)
312
293
 
313
294
  chats_df = pd.json_normalize(chats, sep="_")
314
295
  chats_df = chats_df[self.get_columns()]
@@ -387,11 +368,7 @@ class ChatMessagesTable(APIResource):
387
368
  if not chat_id:
388
369
  raise ValueError("The 'chatId' column is required.")
389
370
 
390
- if message_ids:
391
- messages = client.get_messages_in_chat_by_ids(chat_id, message_ids)
392
-
393
- else:
394
- messages = client.get_all_messages_in_chat(chat_id, limit)
371
+ messages = client.get_chat_messages(chat_id, message_ids)
395
372
 
396
373
  messages_df = pd.json_normalize(messages, sep="_")
397
374
  messages_df = messages_df[self.get_columns()]
@@ -13,10 +13,43 @@ from mindsdb.integrations.libs.response import (
13
13
  HandlerResponse as Response,
14
14
  RESPONSE_TYPE
15
15
  )
16
+ from mindsdb.api.mysql.mysql_proxy.libs.constants.mysql import MYSQL_DATA_TYPE
17
+
16
18
 
17
19
  logger = log.getLogger(__name__)
18
20
 
19
21
 
22
+ def _map_type(mssql_type_text: str) -> MYSQL_DATA_TYPE:
23
+ """ Map MSSQL text types names to MySQL types as enum.
24
+
25
+ Args:
26
+ mssql_type_text (str): The name of the MSSQL type to map.
27
+
28
+ Returns:
29
+ MYSQL_DATA_TYPE: The MySQL type enum that corresponds to the MSSQL text type name.
30
+ """
31
+ internal_type_name = mssql_type_text.lower()
32
+ types_map = {
33
+ ('tinyint', 'smallint', 'int', 'bigint'): MYSQL_DATA_TYPE.INT,
34
+ ('bit',): MYSQL_DATA_TYPE.BOOL,
35
+ ('money', 'smallmoney', 'float', 'real'): MYSQL_DATA_TYPE.FLOAT,
36
+ ('decimal', 'numeric'): MYSQL_DATA_TYPE.DECIMAL,
37
+ ('date',): MYSQL_DATA_TYPE.DATE,
38
+ ('time',): MYSQL_DATA_TYPE.TIME,
39
+ ('datetime2', 'datetimeoffset', 'datetime', 'smalldatetime'): MYSQL_DATA_TYPE.DATETIME,
40
+ ('varchar', 'nvarchar'): MYSQL_DATA_TYPE.VARCHAR,
41
+ ('char', 'text', 'nchar', 'ntext'): MYSQL_DATA_TYPE.TEXT,
42
+ ('binary', 'varbinary', 'image'): MYSQL_DATA_TYPE.BINARY
43
+ }
44
+
45
+ for db_types_list, mysql_data_type in types_map.items():
46
+ if internal_type_name in db_types_list:
47
+ return mysql_data_type
48
+
49
+ logger.warning(f"MSSQL handler type mapping: unknown type: {internal_type_name}, use VARCHAR as fallback.")
50
+ return MYSQL_DATA_TYPE.VARCHAR
51
+
52
+
20
53
  class SqlServerHandler(DatabaseHandler):
21
54
  """
22
55
  This handler handles connection and execution of the Microsoft SQL Server statements.
@@ -215,4 +248,7 @@ class SqlServerHandler(DatabaseHandler):
215
248
  WHERE
216
249
  table_name = '{table_name}'
217
250
  """
218
- return self.native_query(query)
251
+ result = self.native_query(query)
252
+ if result.resp_type is RESPONSE_TYPE.TABLE:
253
+ result.data_frame['mysql_data_type'] = result.data_frame['Type'].apply(_map_type)
254
+ return result
@@ -13,10 +13,27 @@ from mindsdb.integrations.libs.response import (
13
13
  RESPONSE_TYPE
14
14
  )
15
15
  from mindsdb.integrations.handlers.mysql_handler.settings import ConnectionConfig
16
+ from mindsdb.api.mysql.mysql_proxy.libs.constants.mysql import MYSQL_DATA_TYPE
16
17
 
17
18
  logger = log.getLogger(__name__)
18
19
 
19
20
 
21
+ def _map_type(mysql_type_text: str) -> MYSQL_DATA_TYPE:
22
+ """ Map MySQL text types names to MySQL types as enum.
23
+
24
+ Args:
25
+ mysql_type_text (str): The name of the MySQL type to map.
26
+
27
+ Returns:
28
+ MYSQL_DATA_TYPE: The MySQL type enum that corresponds to the MySQL text type name.
29
+ """
30
+ try:
31
+ return MYSQL_DATA_TYPE(mysql_type_text.upper())
32
+ except Exception:
33
+ logger.warning(f'MySQL handler: unknown type: {mysql_type_text}, use TEXT as fallback.')
34
+ return MYSQL_DATA_TYPE.TEXT
35
+
36
+
20
37
  class MySQLHandler(DatabaseHandler):
21
38
  """
22
39
  This handler handles connection and execution of the MySQL statements.
@@ -94,6 +111,8 @@ class MySQLHandler(DatabaseHandler):
94
111
  config["ssl_key"] = ssl_key
95
112
  if 'collation' not in config:
96
113
  config['collation'] = 'utf8mb4_general_ci'
114
+ if 'use_pure' not in config:
115
+ config['use_pure'] = True
97
116
  try:
98
117
  connection = mysql.connector.connect(**config)
99
118
  connection.autocommit = True
@@ -209,6 +228,16 @@ class MySQLHandler(DatabaseHandler):
209
228
  """
210
229
  Show details about the table
211
230
  """
212
- q = f"DESCRIBE `{table_name}`;"
231
+ q = f"""
232
+ select
233
+ COLUMN_NAME AS FIELD, DATA_TYPE AS TYPE
234
+ from
235
+ information_schema.columns
236
+ where
237
+ table_name = '{table_name}'
238
+ """
213
239
  result = self.native_query(q)
240
+ if result.resp_type is RESPONSE_TYPE.TABLE:
241
+ result.data_frame = result.data_frame.rename(columns={'FIELD': 'Field', 'TYPE': 'Type'})
242
+ result.data_frame['mysql_data_type'] = result.data_frame['Type'].apply(_map_type)
214
243
  return result
@@ -1,10 +1,9 @@
1
1
  from typing import Text, Dict, Optional
2
2
 
3
- from mindsdb_sql_parser.ast.base import ASTNode
4
- from mindsdb.utilities.render.sqlalchemy_render import SqlalchemyRender
5
3
  import oracledb
6
- from oracledb import connect, Connection, DatabaseError
7
4
  import pandas as pd
5
+ from oracledb import connect, Connection, DatabaseError
6
+ from mindsdb_sql_parser.ast.base import ASTNode
8
7
 
9
8
  from mindsdb.integrations.libs.base import DatabaseHandler
10
9
  from mindsdb.integrations.libs.response import (
@@ -13,12 +12,52 @@ from mindsdb.integrations.libs.response import (
13
12
  RESPONSE_TYPE,
14
13
  )
15
14
  from mindsdb.utilities import log
15
+ from mindsdb.utilities.render.sqlalchemy_render import SqlalchemyRender
16
+ from mindsdb.api.mysql.mysql_proxy.libs.constants.mysql import MYSQL_DATA_TYPE
16
17
 
17
18
 
18
19
  oracledb.defaults.fetch_lobs = False # Return LOBs directly as strings or bytes.
19
20
  logger = log.getLogger(__name__)
20
21
 
21
22
 
23
+ def _map_type(internal_type_name: str) -> MYSQL_DATA_TYPE:
24
+ """ Map Oracle types to MySQL types.
25
+ List of types: https://docs.oracle.com/en/database/oracle/oracle-database/19/sqlrf/Data-Types.html
26
+
27
+ Args:
28
+ internal_type_name (str): The name of the Oracle type to map.
29
+
30
+ Returns:
31
+ MYSQL_DATA_TYPE: The MySQL type that corresponds to the Oracle type.
32
+ """
33
+ internal_type_name = internal_type_name.upper()
34
+ types_map = {
35
+ (
36
+ 'VARCHAR2', 'NVARCHAR2', 'CHARACTER VARYING', 'CHAR VARYING', 'NATIONAL CHARACTER', 'NATIONAL CHAR',
37
+ 'VARCHAR', 'NCHAR', 'NATIONAL CHARACTER VARYING', 'NATIONAL CHAR VARYING', 'NCHAR VARYING', 'LONG VARCHAR'
38
+ ): MYSQL_DATA_TYPE.VARCHAR,
39
+ ('INTEGER', 'INT'): MYSQL_DATA_TYPE.INT,
40
+ ('SMALLINT',): MYSQL_DATA_TYPE.SMALLINT,
41
+ ('NUMBER', 'DECIMAL'): MYSQL_DATA_TYPE.DECIMAL,
42
+ ('FLOAT', 'BINARY_FLOAT', 'REAL'): MYSQL_DATA_TYPE.FLOAT,
43
+ ('BINARY_DOUBLE',): MYSQL_DATA_TYPE.DOUBLE,
44
+ ('LONG',): MYSQL_DATA_TYPE.BIGINT,
45
+ ('DATE',): MYSQL_DATA_TYPE.DATE,
46
+ ('HOUR', 'MINUTE', 'SECOND', 'TIMEZONE_HOUR', 'TIMEZONE_MINUTE'): MYSQL_DATA_TYPE.SMALLINT,
47
+ ('TIMESTAMP', 'TIMESTAMP WITH TIME ZONE', 'TIMESTAMP WITH LOCAL TIME ZONE'): MYSQL_DATA_TYPE.TIMESTAMP,
48
+ ('RAW', 'LONG RAW', 'BLOB', 'BFILE'): MYSQL_DATA_TYPE.BINARY,
49
+ ('ROWID', 'UROWID'): MYSQL_DATA_TYPE.TEXT,
50
+ ('CHAR', 'NCHAR', 'CLOB', 'NCLOB', 'CHARACTER'): MYSQL_DATA_TYPE.CHAR,
51
+ }
52
+
53
+ for db_types_list, mysql_data_type in types_map.items():
54
+ if internal_type_name in db_types_list:
55
+ return mysql_data_type
56
+
57
+ logger.warning(f"Oracle handler type mapping: unknown type: {internal_type_name}, use VARCHAR as fallback.")
58
+ return MYSQL_DATA_TYPE.VARCHAR
59
+
60
+
22
61
  class OracleHandler(DatabaseHandler):
23
62
  """
24
63
  This handler handles connection and execution of SQL queries on Oracle.
@@ -94,6 +133,12 @@ class OracleHandler(DatabaseHandler):
94
133
  connection = connect(
95
134
  **config,
96
135
  )
136
+
137
+ if 'session_variables' in self.connection_data:
138
+ with connection.cursor() as cur:
139
+ for key, value in self.connection_data['session_variables'].items():
140
+ cur.execute(f"ALTER SESSION SET {key} = {repr(value)}")
141
+
97
142
  except DatabaseError as database_error:
98
143
  logger.error(f'Error connecting to Oracle, {database_error}!')
99
144
  raise
@@ -237,10 +282,13 @@ class OracleHandler(DatabaseHandler):
237
282
  """
238
283
  query = f"""
239
284
  SELECT
240
- column_name,
241
- data_type
285
+ column_name AS field,
286
+ data_type AS type
242
287
  FROM USER_TAB_COLUMNS
243
288
  WHERE table_name = '{table_name}'
244
289
  """
245
290
  result = self.native_query(query)
291
+ if result.resp_type is RESPONSE_TYPE.TABLE:
292
+ result.data_frame.columns = [name.lower() for name in result.data_frame.columns]
293
+ result.data_frame['mysql_data_type'] = result.data_frame['type'].apply(_map_type)
246
294
  return result
@@ -20,12 +20,44 @@ from mindsdb.integrations.libs.response import (
20
20
  RESPONSE_TYPE
21
21
  )
22
22
  import mindsdb.utilities.profiler as profiler
23
+ from mindsdb.api.mysql.mysql_proxy.libs.constants.mysql import MYSQL_DATA_TYPE
23
24
 
24
25
  logger = log.getLogger(__name__)
25
26
 
26
27
  SUBSCRIBE_SLEEP_INTERVAL = 1
27
28
 
28
29
 
30
+ def _map_type(internal_type_name: str) -> MYSQL_DATA_TYPE:
31
+ """Map Postgres types to MySQL types.
32
+
33
+ Args:
34
+ internal_type_name (str): The name of the Postgres type to map.
35
+
36
+ Returns:
37
+ MYSQL_DATA_TYPE: The MySQL type that corresponds to the Postgres type.
38
+ """
39
+ internal_type_name = internal_type_name.lower()
40
+ types_map = {
41
+ ('smallint', 'integer', 'bigint', 'int', 'smallserial', 'serial', 'bigserial'): MYSQL_DATA_TYPE.INT,
42
+ ('real', 'money', 'float'): MYSQL_DATA_TYPE.FLOAT,
43
+ ('numeric', 'decimal'): MYSQL_DATA_TYPE.DECIMAL,
44
+ ('double precision',): MYSQL_DATA_TYPE.DOUBLE,
45
+ ('character varying', 'varchar', 'character', 'char', 'bpchar', 'bpchar', 'text'): MYSQL_DATA_TYPE.TEXT,
46
+ ('timestamp', 'timestamp without time zone', 'timestamp with time zone'): MYSQL_DATA_TYPE.DATETIME,
47
+ ('date', ): MYSQL_DATA_TYPE.DATE,
48
+ ('time', 'time without time zone', 'time with time zone'): MYSQL_DATA_TYPE.TIME,
49
+ ('boolean',): MYSQL_DATA_TYPE.BOOL,
50
+ ('bytea',): MYSQL_DATA_TYPE.BINARY,
51
+ }
52
+
53
+ for db_types_list, mysql_data_type in types_map.items():
54
+ if internal_type_name in db_types_list:
55
+ return mysql_data_type
56
+
57
+ logger.warning(f"Postgres handler type mapping: unknown type: {internal_type_name}, use VARCHAR as fallback.")
58
+ return MYSQL_DATA_TYPE.VARCHAR
59
+
60
+
29
61
  class PostgresHandler(DatabaseHandler):
30
62
  """
31
63
  This handler handles connection and execution of the PostgreSQL statements.
@@ -314,7 +346,11 @@ class PostgresHandler(DatabaseHandler):
314
346
  AND
315
347
  table_schema = {schema_name}
316
348
  """
317
- return self.native_query(query)
349
+ result = self.native_query(query)
350
+ if result.resp_type is RESPONSE_TYPE.TABLE:
351
+ result.data_frame.columns = [name.lower() for name in result.data_frame.columns]
352
+ result.data_frame['mysql_data_type'] = result.data_frame['type'].apply(_map_type)
353
+ return result
318
354
 
319
355
  def subscribe(self, stop_event, callback, table_name, columns=None, **kwargs):
320
356
  config = self._make_connection_args()
@@ -81,27 +81,29 @@ class RayServeHandler(BaseMLEngine):
81
81
  resp = requests.post(args['predict_url'],
82
82
  json={'df': df.to_json(orient='records'), 'pred_args': pred_args},
83
83
  headers={'content-type': 'application/json; format=pandas-records'})
84
- try:
85
- if args.get('is_parquet', False):
84
+ content_type = resp.headers.get("Content-Type", "")
85
+ if "application/octet-stream" in content_type:
86
+ try:
86
87
  buffer = io.BytesIO(resp.content)
87
88
  table = pq.read_table(buffer)
88
89
  response = table.to_pandas()
89
- else:
90
+ except Exception:
91
+ error = 'Could not decode parquet.'
92
+ else:
93
+ try:
90
94
  response = resp.json()
91
- except json.JSONDecodeError:
92
- error = resp.text
93
- except Exception:
94
- error = 'Could not decode parquet.'
95
+ except json.JSONDecodeError:
96
+ error = resp.text
97
+
98
+ if 'prediction' in response:
99
+ target = args['target']
100
+ if target != 'prediction':
101
+ # rename prediction to target
102
+ response[target] = response.pop('prediction')
103
+ return pd.DataFrame(response)
95
104
  else:
96
- if 'prediction' in response:
97
- target = args['target']
98
- if target != 'prediction':
99
- # rename prediction to target
100
- response[target] = response.pop('prediction')
101
- return pd.DataFrame(response)
102
- else:
103
- # something wrong
104
- error = response
105
+ # something wrong
106
+ error = response
105
107
 
106
108
  raise RayServeException(f"Error: {error}")
107
109
 
@@ -7,6 +7,8 @@ from snowflake.connector.errors import NotSupportedError
7
7
 
8
8
  from mindsdb.utilities import log
9
9
  from mindsdb_sql_parser.ast.base import ASTNode
10
+ from mindsdb_sql_parser.ast import Select, Identifier
11
+
10
12
  from mindsdb.integrations.libs.base import DatabaseHandler
11
13
  from mindsdb.utilities.render.sqlalchemy_render import SqlalchemyRender
12
14
  from mindsdb.integrations.libs.response import (
@@ -14,6 +16,7 @@ from mindsdb.integrations.libs.response import (
14
16
  HandlerResponse as Response,
15
17
  RESPONSE_TYPE
16
18
  )
19
+ from mindsdb.api.mysql.mysql_proxy.libs.constants.mysql import MYSQL_DATA_TYPE
17
20
 
18
21
  try:
19
22
  import pyarrow as pa
@@ -25,6 +28,43 @@ except Exception:
25
28
  logger = log.getLogger(__name__)
26
29
 
27
30
 
31
+ def _map_type(internal_type_name: str) -> MYSQL_DATA_TYPE:
32
+ """ Map Snowflake types to MySQL types.
33
+
34
+ Args:
35
+ internal_type_name (str): The name of the Snowflake type to map.
36
+
37
+ Returns:
38
+ MYSQL_DATA_TYPE: The MySQL type that corresponds to the Snowflake type.
39
+ """
40
+ internal_type_name = internal_type_name.upper()
41
+ types_map = {
42
+ ('NUMBER', 'DECIMAL', 'DEC', 'NUMERIC'): MYSQL_DATA_TYPE.DECIMAL,
43
+ ('INT , INTEGER , BIGINT , SMALLINT , TINYINT , BYTEINT'): MYSQL_DATA_TYPE.INT,
44
+ ('FLOAT', 'FLOAT4', 'FLOAT8'): MYSQL_DATA_TYPE.FLOAT,
45
+ ('DOUBLE', 'DOUBLE PRECISION', 'REAL'): MYSQL_DATA_TYPE.DOUBLE,
46
+ ('VARCHAR'): MYSQL_DATA_TYPE.VARCHAR,
47
+ ('CHAR', 'CHARACTER', 'NCHAR'): MYSQL_DATA_TYPE.CHAR,
48
+ ('STRING', 'TEXT', 'NVARCHAR'): MYSQL_DATA_TYPE.TEXT,
49
+ ('NVARCHAR2', 'CHAR VARYING', 'NCHAR VARYING'): MYSQL_DATA_TYPE.VARCHAR,
50
+ ('BINARY', 'VARBINARY'): MYSQL_DATA_TYPE.BINARY,
51
+ ('BOOLEAN',): MYSQL_DATA_TYPE.BOOL,
52
+ ('TIMESTAMP_NTZ', 'DATETIME'): MYSQL_DATA_TYPE.DATETIME,
53
+ ('DATE',): MYSQL_DATA_TYPE.DATE,
54
+ ('TIME',): MYSQL_DATA_TYPE.TIME,
55
+ ('TIMESTAMP_LTZ'): MYSQL_DATA_TYPE.DATETIME,
56
+ ('TIMESTAMP_TZ'): MYSQL_DATA_TYPE.DATETIME,
57
+ ('VARIANT', 'OBJECT', 'ARRAY', 'MAP', 'GEOGRAPHY', 'GEOMETRY', 'VECTOR'): MYSQL_DATA_TYPE.VARCHAR
58
+ }
59
+
60
+ for db_types_list, mysql_data_type in types_map.items():
61
+ if internal_type_name in db_types_list:
62
+ return mysql_data_type
63
+
64
+ logger.warning(f"Snowflake handler type mapping: unknown type: {internal_type_name}, use VARCHAR as fallback.")
65
+ return MYSQL_DATA_TYPE.VARCHAR
66
+
67
+
28
68
  class SnowflakeHandler(DatabaseHandler):
29
69
  """
30
70
  This handler handles connection and execution of the Snowflake statements.
@@ -234,7 +274,30 @@ class SnowflakeHandler(DatabaseHandler):
234
274
 
235
275
  query_str = self.renderer.get_string(query, with_failback=True)
236
276
  logger.debug(f"Executing SQL query: {query_str}")
237
- return self.native_query(query_str)
277
+ result = self.native_query(query_str)
278
+ return self.lowercase_columns(result, query)
279
+
280
+ def lowercase_columns(self, result, query):
281
+ if not isinstance(query, Select) or result.data_frame is None:
282
+ return result
283
+
284
+ quoted_columns = []
285
+ if query.targets is not None:
286
+ for column in query.targets:
287
+ if hasattr(column, 'alias') and column.alias is not None:
288
+ if column.alias.is_quoted[-1]:
289
+ quoted_columns.append(column.alias.parts[-1])
290
+ elif isinstance(column, Identifier):
291
+ if column.is_quoted[-1]:
292
+ quoted_columns.append(column.parts[-1])
293
+
294
+ rename_columns = {}
295
+ for col in result.data_frame.columns:
296
+ if col.isupper() and col not in quoted_columns:
297
+ rename_columns[col] = col.lower()
298
+ if rename_columns:
299
+ result.data_frame = result.data_frame.rename(columns=rename_columns)
300
+ return result
238
301
 
239
302
  def get_tables(self) -> Response:
240
303
  """
@@ -261,6 +324,7 @@ class SnowflakeHandler(DatabaseHandler):
261
324
 
262
325
  Returns:
263
326
  Response: A response object containing the column details, formatted as per the `Response` class.
327
+
264
328
  Raises:
265
329
  ValueError: If the 'table_name' is not a valid string.
266
330
  """
@@ -275,6 +339,8 @@ class SnowflakeHandler(DatabaseHandler):
275
339
  AND TABLE_SCHEMA = current_schema()
276
340
  """
277
341
  result = self.native_query(query)
278
- result.data_frame = result.data_frame.rename(columns={'FIELD': 'Field', 'TYPE': 'Type'})
342
+ if result.resp_type is RESPONSE_TYPE.TABLE:
343
+ result.data_frame = result.data_frame.rename(columns={'FIELD': 'Field', 'TYPE': 'Type'})
344
+ result.data_frame['mysql_data_type'] = result.data_frame['Type'].apply(_map_type)
279
345
 
280
346
  return result
@@ -1,2 +1,2 @@
1
1
  from .google import GoogleUserOAuth2Manager, GoogleServiceAccountOAuth2Manager
2
- from .microsoft import MSGraphAPIDelegatedPermissionsManager
2
+ from .microsoft import MSGraphAPIApplicationPermissionsManager, MSGraphAPIDelegatedPermissionsManager
@@ -1 +1 @@
1
- from .ms_graph_api_auth_utilities import MSGraphAPIDelegatedPermissionsManager
1
+ from .ms_graph_api_auth_utilities import MSGraphAPIApplicationPermissionsManager, MSGraphAPIDelegatedPermissionsManager
@@ -1,3 +1,4 @@
1
+ from abc import ABC, abstractmethod
1
2
  from typing import Dict, List, Text
2
3
 
3
4
  from flask import request
@@ -6,12 +7,13 @@ import msal
6
7
  from mindsdb.integrations.utilities.handlers.auth_utilities.exceptions import AuthException
7
8
  from mindsdb.utilities import log
8
9
 
10
+
9
11
  logger = log.getLogger(__name__)
10
12
 
11
13
 
12
- class MSGraphAPIDelegatedPermissionsManager:
14
+ class MSGraphAPIPermissionsManager(ABC):
13
15
  """
14
- The class for managing the delegated permissions for the Microsoft Graph API.
16
+ The base class for managing the delegated permissions for the Microsoft Graph API.
15
17
  """
16
18
  def __init__(
17
19
  self,
@@ -20,10 +22,9 @@ class MSGraphAPIDelegatedPermissionsManager:
20
22
  tenant_id: Text,
21
23
  cache: msal.SerializableTokenCache,
22
24
  scopes: List = ["https://graph.microsoft.com/.default"],
23
- code: Text = None,
24
25
  ) -> None:
25
26
  """
26
- Initializes the delegated permissions manager.
27
+ Initializes the permissions manager.
27
28
 
28
29
  Args:
29
30
  client_id (Text): The client ID of the application registered in Microsoft Entra ID.
@@ -38,8 +39,68 @@ class MSGraphAPIDelegatedPermissionsManager:
38
39
  self.tenant_id = tenant_id
39
40
  self.cache = cache
40
41
  self.scopes = scopes
42
+
43
+ @abstractmethod
44
+ def get_access_token(self) -> Text:
45
+ """
46
+ Retrieves an access token for the Microsoft Graph API.
47
+
48
+ Returns:
49
+ Text: The access token for the Microsoft Graph API.
50
+ """
51
+ pass
52
+
53
+ def _get_msal_app(self) -> msal.ConfidentialClientApplication:
54
+ """
55
+ Returns an instance of the MSAL ConfidentialClientApplication.
56
+
57
+ Returns:
58
+ msal.ConfidentialClientApplication: An instance of the MSAL ConfidentialClientApplication.
59
+ """
60
+ return msal.ConfidentialClientApplication(
61
+ self.client_id,
62
+ authority=f"https://login.microsoftonline.com/{self.tenant_id}",
63
+ client_credential=self.client_secret,
64
+ token_cache=self.cache,
65
+ )
66
+
67
+
68
+ class MSGraphAPIDelegatedPermissionsManager(MSGraphAPIPermissionsManager):
69
+ """
70
+ The class for managing the delegated permissions for the Microsoft Graph API.
71
+ """
72
+ def __init__(
73
+ self,
74
+ client_id: Text,
75
+ client_secret: Text,
76
+ tenant_id: Text,
77
+ cache: msal.SerializableTokenCache,
78
+ scopes: List = ["https://graph.microsoft.com/.default"],
79
+ code: Text = None,
80
+ ) -> None:
81
+ """
82
+ Initializes the delegated permissions manager.
83
+
84
+ Args:
85
+ client_id (Text): The client ID of the application registered in Microsoft Entra ID.
86
+ client_secret (Text): The client secret of the application registered in Microsoft Entra ID.
87
+ tenant_id (Text): The tenant ID of the application registered in Microsoft Entra ID.
88
+ cache (msal.SerializableTokenCache): The token cache for storing the access token.
89
+ scopes (List): The scopes for the Microsoft Graph API.
90
+ code (Text): The authentication code for acquiring the access token.
91
+ """
92
+ super().__init__(client_id, client_secret, tenant_id, cache, scopes)
41
93
  self.code = code
94
+ self.redirect_uri = None
95
+ self._set_redirect_uri()
96
+
97
+ def _set_redirect_uri(self) -> None:
98
+ """
99
+ Sets the redirect URI based on the request origin.
42
100
 
101
+ Raises:
102
+ AuthException: If the request origin could not be determined.
103
+ """
43
104
  # Set the redirect URI based on the request origin.
44
105
  # If the request origin is 127.0.0.1 (localhost), replace it with localhost.
45
106
  # This is done because the only HTTP origin allowed in Microsoft Entra ID app registration is localhost.
@@ -85,20 +146,6 @@ class MSGraphAPIDelegatedPermissionsManager:
85
146
  auth_url=response.get('auth_url')
86
147
  )
87
148
 
88
- def _get_msal_app(self) -> msal.ConfidentialClientApplication:
89
- """
90
- Returns an instance of the MSAL ConfidentialClientApplication.
91
-
92
- Returns:
93
- msal.ConfidentialClientApplication: An instance of the MSAL ConfidentialClientApplication.
94
- """
95
- return msal.ConfidentialClientApplication(
96
- self.client_id,
97
- authority=f"https://login.microsoftonline.com/{self.tenant_id}",
98
- client_credential=self.client_secret,
99
- token_cache=self.cache,
100
- )
101
-
102
149
  def _execute_ms_graph_api_auth_flow(self) -> Dict:
103
150
  """
104
151
  Executes the authentication flow for the Microsoft Graph API.
@@ -131,3 +178,35 @@ class MSGraphAPIDelegatedPermissionsManager:
131
178
  )
132
179
 
133
180
  raise AuthException(f'Authorisation required. Please follow the url: {auth_url}', auth_url=auth_url)
181
+
182
+
183
+ class MSGraphAPIApplicationPermissionsManager(MSGraphAPIPermissionsManager):
184
+ """
185
+ The class for managing application permissions for the Microsoft Graph API.
186
+ """
187
+
188
+ def get_access_token(self) -> Text:
189
+ """
190
+ Retrieves an access token for the Microsoft Graph API using the client credentials flow.
191
+
192
+ Returns:
193
+ Text: The access token for the Microsoft Graph API.
194
+ """
195
+ msal_app = self._get_msal_app()
196
+
197
+ # Check if a valid access token is already in the cache.
198
+ accounts = msal_app.get_accounts()
199
+ if accounts:
200
+ response = msal_app.acquire_token_silent(self.scopes, account=accounts[0])
201
+ if "access_token" in response:
202
+ return response["access_token"]
203
+
204
+ # If no valid access token is found in the cache, acquire a new token using client credentials.
205
+ response = msal_app.acquire_token_for_client(scopes=self.scopes)
206
+
207
+ if "access_token" in response:
208
+ return response["access_token"]
209
+ else:
210
+ raise AuthException(
211
+ f"Error getting access token: {response.get('error_description')}"
212
+ )