MindsDB 25.9.2.0a1__py3-none-any.whl → 25.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of MindsDB might be problematic. Click here for more details.

Files changed (164) hide show
  1. mindsdb/__about__.py +1 -1
  2. mindsdb/__main__.py +40 -29
  3. mindsdb/api/a2a/__init__.py +1 -1
  4. mindsdb/api/a2a/agent.py +16 -10
  5. mindsdb/api/a2a/common/server/server.py +7 -3
  6. mindsdb/api/a2a/common/server/task_manager.py +12 -5
  7. mindsdb/api/a2a/common/types.py +66 -0
  8. mindsdb/api/a2a/task_manager.py +65 -17
  9. mindsdb/api/common/middleware.py +10 -12
  10. mindsdb/api/executor/command_executor.py +51 -40
  11. mindsdb/api/executor/datahub/datanodes/datanode.py +2 -2
  12. mindsdb/api/executor/datahub/datanodes/information_schema_datanode.py +7 -13
  13. mindsdb/api/executor/datahub/datanodes/integration_datanode.py +101 -49
  14. mindsdb/api/executor/datahub/datanodes/project_datanode.py +8 -4
  15. mindsdb/api/executor/datahub/datanodes/system_tables.py +3 -2
  16. mindsdb/api/executor/exceptions.py +29 -10
  17. mindsdb/api/executor/planner/plan_join.py +17 -3
  18. mindsdb/api/executor/planner/query_prepare.py +2 -20
  19. mindsdb/api/executor/sql_query/sql_query.py +74 -74
  20. mindsdb/api/executor/sql_query/steps/fetch_dataframe.py +1 -2
  21. mindsdb/api/executor/sql_query/steps/subselect_step.py +0 -1
  22. mindsdb/api/executor/utilities/functions.py +6 -6
  23. mindsdb/api/executor/utilities/sql.py +37 -20
  24. mindsdb/api/http/gui.py +5 -11
  25. mindsdb/api/http/initialize.py +75 -61
  26. mindsdb/api/http/namespaces/agents.py +10 -15
  27. mindsdb/api/http/namespaces/analysis.py +13 -20
  28. mindsdb/api/http/namespaces/auth.py +1 -1
  29. mindsdb/api/http/namespaces/chatbots.py +0 -5
  30. mindsdb/api/http/namespaces/config.py +15 -11
  31. mindsdb/api/http/namespaces/databases.py +140 -201
  32. mindsdb/api/http/namespaces/file.py +17 -4
  33. mindsdb/api/http/namespaces/handlers.py +17 -7
  34. mindsdb/api/http/namespaces/knowledge_bases.py +28 -7
  35. mindsdb/api/http/namespaces/models.py +94 -126
  36. mindsdb/api/http/namespaces/projects.py +13 -22
  37. mindsdb/api/http/namespaces/sql.py +33 -25
  38. mindsdb/api/http/namespaces/tab.py +27 -37
  39. mindsdb/api/http/namespaces/views.py +1 -1
  40. mindsdb/api/http/start.py +16 -10
  41. mindsdb/api/mcp/__init__.py +2 -1
  42. mindsdb/api/mysql/mysql_proxy/executor/mysql_executor.py +15 -20
  43. mindsdb/api/mysql/mysql_proxy/mysql_proxy.py +26 -50
  44. mindsdb/api/mysql/mysql_proxy/utilities/__init__.py +0 -1
  45. mindsdb/api/mysql/mysql_proxy/utilities/dump.py +8 -2
  46. mindsdb/integrations/handlers/byom_handler/byom_handler.py +165 -190
  47. mindsdb/integrations/handlers/databricks_handler/databricks_handler.py +98 -46
  48. mindsdb/integrations/handlers/druid_handler/druid_handler.py +32 -40
  49. mindsdb/integrations/handlers/file_handler/file_handler.py +7 -0
  50. mindsdb/integrations/handlers/gitlab_handler/gitlab_handler.py +5 -2
  51. mindsdb/integrations/handlers/lightwood_handler/functions.py +45 -79
  52. mindsdb/integrations/handlers/mssql_handler/mssql_handler.py +438 -100
  53. mindsdb/integrations/handlers/mssql_handler/requirements_odbc.txt +3 -0
  54. mindsdb/integrations/handlers/mysql_handler/mysql_handler.py +235 -3
  55. mindsdb/integrations/handlers/oracle_handler/__init__.py +2 -0
  56. mindsdb/integrations/handlers/oracle_handler/connection_args.py +7 -1
  57. mindsdb/integrations/handlers/oracle_handler/oracle_handler.py +321 -16
  58. mindsdb/integrations/handlers/oracle_handler/requirements.txt +1 -1
  59. mindsdb/integrations/handlers/postgres_handler/postgres_handler.py +14 -2
  60. mindsdb/integrations/handlers/shopify_handler/requirements.txt +1 -0
  61. mindsdb/integrations/handlers/shopify_handler/shopify_handler.py +80 -13
  62. mindsdb/integrations/handlers/snowflake_handler/snowflake_handler.py +2 -1
  63. mindsdb/integrations/handlers/statsforecast_handler/requirements.txt +1 -0
  64. mindsdb/integrations/handlers/statsforecast_handler/requirements_extra.txt +1 -0
  65. mindsdb/integrations/handlers/web_handler/urlcrawl_helpers.py +4 -4
  66. mindsdb/integrations/handlers/zendesk_handler/zendesk_tables.py +144 -111
  67. mindsdb/integrations/libs/api_handler.py +10 -10
  68. mindsdb/integrations/libs/base.py +4 -4
  69. mindsdb/integrations/libs/llm/utils.py +2 -2
  70. mindsdb/integrations/libs/ml_handler_process/create_engine_process.py +4 -7
  71. mindsdb/integrations/libs/ml_handler_process/func_call_process.py +2 -7
  72. mindsdb/integrations/libs/ml_handler_process/learn_process.py +37 -47
  73. mindsdb/integrations/libs/ml_handler_process/update_engine_process.py +4 -7
  74. mindsdb/integrations/libs/ml_handler_process/update_process.py +2 -7
  75. mindsdb/integrations/libs/process_cache.py +132 -140
  76. mindsdb/integrations/libs/response.py +18 -12
  77. mindsdb/integrations/libs/vectordatabase_handler.py +26 -0
  78. mindsdb/integrations/utilities/files/file_reader.py +6 -7
  79. mindsdb/integrations/utilities/handlers/auth_utilities/snowflake/__init__.py +1 -0
  80. mindsdb/integrations/utilities/handlers/auth_utilities/snowflake/snowflake_jwt_gen.py +151 -0
  81. mindsdb/integrations/utilities/rag/config_loader.py +37 -26
  82. mindsdb/integrations/utilities/rag/rerankers/base_reranker.py +83 -30
  83. mindsdb/integrations/utilities/rag/rerankers/reranker_compressor.py +4 -4
  84. mindsdb/integrations/utilities/rag/retrievers/sql_retriever.py +55 -133
  85. mindsdb/integrations/utilities/rag/settings.py +58 -133
  86. mindsdb/integrations/utilities/rag/splitters/file_splitter.py +5 -15
  87. mindsdb/interfaces/agents/agents_controller.py +2 -3
  88. mindsdb/interfaces/agents/constants.py +0 -2
  89. mindsdb/interfaces/agents/litellm_server.py +34 -58
  90. mindsdb/interfaces/agents/mcp_client_agent.py +10 -10
  91. mindsdb/interfaces/agents/mindsdb_database_agent.py +5 -5
  92. mindsdb/interfaces/agents/run_mcp_agent.py +12 -21
  93. mindsdb/interfaces/chatbot/chatbot_task.py +20 -23
  94. mindsdb/interfaces/chatbot/polling.py +30 -18
  95. mindsdb/interfaces/data_catalog/data_catalog_loader.py +16 -17
  96. mindsdb/interfaces/data_catalog/data_catalog_reader.py +15 -4
  97. mindsdb/interfaces/database/data_handlers_cache.py +190 -0
  98. mindsdb/interfaces/database/database.py +3 -3
  99. mindsdb/interfaces/database/integrations.py +7 -110
  100. mindsdb/interfaces/database/projects.py +2 -6
  101. mindsdb/interfaces/database/views.py +1 -4
  102. mindsdb/interfaces/file/file_controller.py +6 -6
  103. mindsdb/interfaces/functions/controller.py +1 -1
  104. mindsdb/interfaces/functions/to_markdown.py +2 -2
  105. mindsdb/interfaces/jobs/jobs_controller.py +5 -9
  106. mindsdb/interfaces/jobs/scheduler.py +3 -9
  107. mindsdb/interfaces/knowledge_base/controller.py +244 -128
  108. mindsdb/interfaces/knowledge_base/evaluate.py +36 -41
  109. mindsdb/interfaces/knowledge_base/executor.py +11 -0
  110. mindsdb/interfaces/knowledge_base/llm_client.py +51 -17
  111. mindsdb/interfaces/knowledge_base/preprocessing/json_chunker.py +40 -61
  112. mindsdb/interfaces/model/model_controller.py +172 -168
  113. mindsdb/interfaces/query_context/context_controller.py +14 -2
  114. mindsdb/interfaces/skills/custom/text2sql/mindsdb_sql_toolkit.py +10 -14
  115. mindsdb/interfaces/skills/retrieval_tool.py +43 -50
  116. mindsdb/interfaces/skills/skill_tool.py +2 -2
  117. mindsdb/interfaces/skills/skills_controller.py +1 -4
  118. mindsdb/interfaces/skills/sql_agent.py +25 -19
  119. mindsdb/interfaces/storage/db.py +16 -6
  120. mindsdb/interfaces/storage/fs.py +114 -169
  121. mindsdb/interfaces/storage/json.py +19 -18
  122. mindsdb/interfaces/tabs/tabs_controller.py +49 -72
  123. mindsdb/interfaces/tasks/task_monitor.py +3 -9
  124. mindsdb/interfaces/tasks/task_thread.py +7 -9
  125. mindsdb/interfaces/triggers/trigger_task.py +7 -13
  126. mindsdb/interfaces/triggers/triggers_controller.py +47 -52
  127. mindsdb/migrations/migrate.py +16 -16
  128. mindsdb/utilities/api_status.py +58 -0
  129. mindsdb/utilities/config.py +68 -2
  130. mindsdb/utilities/exception.py +40 -1
  131. mindsdb/utilities/fs.py +0 -1
  132. mindsdb/utilities/hooks/profiling.py +17 -14
  133. mindsdb/utilities/json_encoder.py +24 -10
  134. mindsdb/utilities/langfuse.py +40 -45
  135. mindsdb/utilities/log.py +272 -0
  136. mindsdb/utilities/ml_task_queue/consumer.py +52 -58
  137. mindsdb/utilities/ml_task_queue/producer.py +26 -30
  138. mindsdb/utilities/render/sqlalchemy_render.py +22 -20
  139. mindsdb/utilities/starters.py +0 -10
  140. mindsdb/utilities/utils.py +2 -2
  141. {mindsdb-25.9.2.0a1.dist-info → mindsdb-25.10.0.dist-info}/METADATA +286 -267
  142. {mindsdb-25.9.2.0a1.dist-info → mindsdb-25.10.0.dist-info}/RECORD +145 -159
  143. mindsdb/api/mysql/mysql_proxy/utilities/exceptions.py +0 -14
  144. mindsdb/api/postgres/__init__.py +0 -0
  145. mindsdb/api/postgres/postgres_proxy/__init__.py +0 -0
  146. mindsdb/api/postgres/postgres_proxy/executor/__init__.py +0 -1
  147. mindsdb/api/postgres/postgres_proxy/executor/executor.py +0 -189
  148. mindsdb/api/postgres/postgres_proxy/postgres_packets/__init__.py +0 -0
  149. mindsdb/api/postgres/postgres_proxy/postgres_packets/errors.py +0 -322
  150. mindsdb/api/postgres/postgres_proxy/postgres_packets/postgres_fields.py +0 -34
  151. mindsdb/api/postgres/postgres_proxy/postgres_packets/postgres_message.py +0 -31
  152. mindsdb/api/postgres/postgres_proxy/postgres_packets/postgres_message_formats.py +0 -1265
  153. mindsdb/api/postgres/postgres_proxy/postgres_packets/postgres_message_identifiers.py +0 -31
  154. mindsdb/api/postgres/postgres_proxy/postgres_packets/postgres_packets.py +0 -253
  155. mindsdb/api/postgres/postgres_proxy/postgres_proxy.py +0 -477
  156. mindsdb/api/postgres/postgres_proxy/utilities/__init__.py +0 -10
  157. mindsdb/api/postgres/start.py +0 -11
  158. mindsdb/integrations/handlers/mssql_handler/tests/__init__.py +0 -0
  159. mindsdb/integrations/handlers/mssql_handler/tests/test_mssql_handler.py +0 -169
  160. mindsdb/integrations/handlers/oracle_handler/tests/__init__.py +0 -0
  161. mindsdb/integrations/handlers/oracle_handler/tests/test_oracle_handler.py +0 -32
  162. {mindsdb-25.9.2.0a1.dist-info → mindsdb-25.10.0.dist-info}/WHEEL +0 -0
  163. {mindsdb-25.9.2.0a1.dist-info → mindsdb-25.10.0.dist-info}/licenses/LICENSE +0 -0
  164. {mindsdb-25.9.2.0a1.dist-info → mindsdb-25.10.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,190 @@
1
+ import sys
2
+ import time
3
+ import threading
4
+ from dataclasses import dataclass, field
5
+ from collections import defaultdict
6
+
7
+ from mindsdb.integrations.libs.base import DatabaseHandler
8
+ from mindsdb.utilities.context import context as ctx
9
+ from mindsdb.utilities import log
10
+
11
+ logger = log.getLogger(__name__)
12
+
13
+
14
+ @dataclass(kw_only=True, slots=True)
15
+ class HandlersCacheRecord:
16
+ """Record for a handler in the cache
17
+
18
+ Args:
19
+ handler (DatabaseHandler): handler instance
20
+ expired_at (float): time when the handler will be expired
21
+ """
22
+
23
+ handler: DatabaseHandler
24
+ expired_at: float
25
+ connect_attempt_done: threading.Event = field(default_factory=threading.Event)
26
+
27
+ @property
28
+ def expired(self) -> bool:
29
+ """check if the handler is expired
30
+
31
+ Returns:
32
+ bool: True if the handler is expired, False otherwise
33
+ """
34
+ return self.expired_at < time.time()
35
+
36
+ @property
37
+ def has_references(self) -> bool:
38
+ """check if the handler has references
39
+
40
+ Returns:
41
+ bool: True if the handler has references, False otherwise
42
+ """
43
+ return sys.getrefcount(self.handler) > 2
44
+
45
+ def connect(self) -> None:
46
+ """connect to the handler"""
47
+ try:
48
+ if not self.handler.is_connected:
49
+ self.handler.connect()
50
+ except Exception:
51
+ logger.warning(f"Error connecting to handler: {self.handler.name}", exc_info=True)
52
+ finally:
53
+ self.connect_attempt_done.set()
54
+
55
+
56
+ class HandlersCache:
57
+ """Cache for data handlers that keep connections opened during ttl time from handler last use"""
58
+
59
+ def __init__(self, ttl: int = 60, clean_timeout: float = 3):
60
+ """init cache
61
+
62
+ Args:
63
+ ttl (int): time to live (in seconds) for record in cache
64
+ clean_timeout (float): interval between cleanups of expired handlers
65
+ """
66
+ self.ttl: int = ttl
67
+ self._clean_timeout: int = clean_timeout
68
+ self.handlers: dict[str, list[HandlersCacheRecord]] = defaultdict(list)
69
+ self._lock = threading.RLock()
70
+ self._stop_event = threading.Event()
71
+ self.cleaner_thread = None
72
+
73
+ def __del__(self):
74
+ self._stop_clean()
75
+
76
+ def _start_clean(self) -> None:
77
+ """start worker that close connections after ttl expired"""
78
+ if isinstance(self.cleaner_thread, threading.Thread) and self.cleaner_thread.is_alive():
79
+ return
80
+ with self._lock:
81
+ self._stop_event.clear()
82
+ self.cleaner_thread = threading.Thread(target=self._clean, name="HandlersCache.clean")
83
+ self.cleaner_thread.daemon = True
84
+ self.cleaner_thread.start()
85
+
86
+ def _stop_clean(self) -> None:
87
+ """stop clean worker"""
88
+ self._stop_event.set()
89
+
90
+ def set(self, handler: DatabaseHandler):
91
+ """add (or replace) handler in cache
92
+
93
+ NOTE: If the handler is not thread-safe, then use a lock when making connection. Otherwise, make connection in
94
+ the same thread without using a lock to speed up parallel queries. (They don't need to wait for a connection in
95
+ another thread.)
96
+
97
+ Args:
98
+ handler (DatabaseHandler)
99
+ """
100
+ thread_safe = getattr(handler, "thread_safe", True)
101
+ with self._lock:
102
+ try:
103
+ # If the handler is defined to be thread safe, set 0 as the last element of the key, otherwise set the thrad ID.
104
+ key = (
105
+ handler.name,
106
+ ctx.company_id,
107
+ 0 if thread_safe else threading.get_native_id(),
108
+ )
109
+ record = HandlersCacheRecord(handler=handler, expired_at=time.time() + self.ttl)
110
+ self.handlers[key].append(record)
111
+ except Exception:
112
+ logger.warning("Error setting data handler cache record:", exc_info=True)
113
+ return
114
+ self._start_clean()
115
+ record.connect()
116
+
117
+ def _get_cache_records(self, name: str) -> tuple[list[HandlersCacheRecord] | None, str]:
118
+ """get cache records by name
119
+
120
+ Args:
121
+ name (str): handler name
122
+
123
+ Returns:
124
+ tuple[list[HandlersCacheRecord] | None, str]: cache records and key of the handler in cache
125
+ """
126
+ # If the handler is not thread safe, the thread ID will be assigned to the last element of the key.
127
+ key = (name, ctx.company_id, 0)
128
+ if key not in self.handlers:
129
+ key = (name, ctx.company_id, threading.get_native_id())
130
+ return self.handlers.get(key, []), key
131
+
132
+ def get(self, name: str) -> DatabaseHandler | None:
133
+ """get handler from cache by name
134
+
135
+ Args:
136
+ name (str): handler name
137
+
138
+ Returns:
139
+ DatabaseHandler
140
+ """
141
+ with self._lock:
142
+ records, _ = self._get_cache_records(name)
143
+ for record in records:
144
+ if record.expired is False and record.has_references is False:
145
+ record.expired_at = time.time() + self.ttl
146
+ if record.connect_attempt_done.wait(timeout=10) is False:
147
+ logger.warning(f"Handler's connection attempt has not finished in 10s: {record.handler.name}")
148
+ return record.handler
149
+ return None
150
+
151
+ def delete(self, name: str) -> None:
152
+ """delete handler from cache
153
+
154
+ Args:
155
+ name (str): handler name
156
+ """
157
+ with self._lock:
158
+ records, key = self._get_cache_records(name)
159
+ if len(records) > 0:
160
+ del self.handlers[key]
161
+ for record in records:
162
+ try:
163
+ record.handler.disconnect()
164
+ except Exception:
165
+ logger.debug("Error disconnecting data handler:", exc_info=True)
166
+
167
+ if len(self.handlers) == 0:
168
+ self._stop_clean()
169
+
170
+ def _clean(self) -> None:
171
+ """worker that delete from cache handlers that was not in use for ttl"""
172
+ while self._stop_event.wait(timeout=self._clean_timeout) is False:
173
+ with self._lock:
174
+ for key in list(self.handlers.keys()):
175
+ active_handlers_list = []
176
+ for record in self.handlers[key]:
177
+ if record.expired and record.has_references is False:
178
+ try:
179
+ record.handler.disconnect()
180
+ except Exception:
181
+ logger.debug("Error disconnecting data handler:", exc_info=True)
182
+ else:
183
+ active_handlers_list.append(record)
184
+ if len(active_handlers_list) > 0:
185
+ self.handlers[key] = active_handlers_list
186
+ else:
187
+ del self.handlers[key]
188
+
189
+ if len(self.handlers) == 0:
190
+ self._stop_event.set()
@@ -32,10 +32,10 @@ class DatabaseController:
32
32
  Returns:
33
33
  None
34
34
  """
35
- databases = self.get_dict()
36
- if name.lower() not in databases:
35
+ databases = self.get_dict(lowercase=False)
36
+ if name not in databases:
37
37
  raise EntityNotExistsError("Database does not exists", name)
38
- db_type = databases[name.lower()]["type"]
38
+ db_type = databases[name]["type"]
39
39
  if db_type == "project":
40
40
  project = self.get_project(name, strict_case)
41
41
  project.delete()
@@ -1,5 +1,4 @@
1
1
  import os
2
- import sys
3
2
  import base64
4
3
  import shutil
5
4
  import ast
@@ -33,116 +32,11 @@ from mindsdb.integrations.libs.ml_exec_base import BaseMLEngineExec
33
32
  from mindsdb.integrations.libs.base import BaseHandler
34
33
  import mindsdb.utilities.profiler as profiler
35
34
  from mindsdb.interfaces.data_catalog.data_catalog_loader import DataCatalogLoader
35
+ from mindsdb.interfaces.database.data_handlers_cache import HandlersCache
36
36
 
37
37
  logger = log.getLogger(__name__)
38
38
 
39
39
 
40
- class HandlersCache:
41
- """Cache for data handlers that keep connections opened during ttl time from handler last use"""
42
-
43
- def __init__(self, ttl: int = 60):
44
- """init cache
45
-
46
- Args:
47
- ttl (int): time to live (in seconds) for record in cache
48
- """
49
- self.ttl = ttl
50
- self.handlers = {}
51
- self._lock = threading.RLock()
52
- self._stop_event = threading.Event()
53
- self.cleaner_thread = None
54
-
55
- def __del__(self):
56
- self._stop_clean()
57
-
58
- def _start_clean(self) -> None:
59
- """start worker that close connections after ttl expired"""
60
- if isinstance(self.cleaner_thread, threading.Thread) and self.cleaner_thread.is_alive():
61
- return
62
- self._stop_event.clear()
63
- self.cleaner_thread = threading.Thread(target=self._clean, name="HandlersCache.clean")
64
- self.cleaner_thread.daemon = True
65
- self.cleaner_thread.start()
66
-
67
- def _stop_clean(self) -> None:
68
- """stop clean worker"""
69
- self._stop_event.set()
70
-
71
- def set(self, handler: DatabaseHandler):
72
- """add (or replace) handler in cache
73
-
74
- Args:
75
- handler (DatabaseHandler)
76
- """
77
- with self._lock:
78
- try:
79
- # If the handler is defined to be thread safe, set 0 as the last element of the key, otherwise set the thrad ID.
80
- key = (
81
- handler.name,
82
- ctx.company_id,
83
- 0 if getattr(handler, "thread_safe", False) else threading.get_native_id(),
84
- )
85
- handler.connect()
86
- self.handlers[key] = {"handler": handler, "expired_at": time.time() + self.ttl}
87
- except Exception:
88
- pass
89
- self._start_clean()
90
-
91
- def get(self, name: str) -> Optional[DatabaseHandler]:
92
- """get handler from cache by name
93
-
94
- Args:
95
- name (str): handler name
96
-
97
- Returns:
98
- DatabaseHandler
99
- """
100
- with self._lock:
101
- # If the handler is not thread safe, the thread ID will be assigned to the last element of the key.
102
- key = (name, ctx.company_id, threading.get_native_id())
103
- if key not in self.handlers:
104
- # If the handler is thread safe, a 0 will be assigned to the last element of the key.
105
- key = (name, ctx.company_id, 0)
106
- if key not in self.handlers or self.handlers[key]["expired_at"] < time.time():
107
- return None
108
- self.handlers[key]["expired_at"] = time.time() + self.ttl
109
- return self.handlers[key]["handler"]
110
-
111
- def delete(self, name: str) -> None:
112
- """delete handler from cache
113
-
114
- Args:
115
- name (str): handler name
116
- """
117
- with self._lock:
118
- key = (name, ctx.company_id, threading.get_native_id())
119
- if key in self.handlers:
120
- try:
121
- self.handlers[key].disconnect()
122
- except Exception:
123
- pass
124
- del self.handlers[key]
125
- if len(self.handlers) == 0:
126
- self._stop_clean()
127
-
128
- def _clean(self) -> None:
129
- """worker that delete from cache handlers that was not in use for ttl"""
130
- while self._stop_event.wait(timeout=3) is False:
131
- with self._lock:
132
- for key in list(self.handlers.keys()):
133
- if (
134
- self.handlers[key]["expired_at"] < time.time()
135
- and sys.getrefcount(self.handlers[key]) == 2 # returned ref count is always 1 higher
136
- ):
137
- try:
138
- self.handlers[key].disconnect()
139
- except Exception:
140
- pass
141
- del self.handlers[key]
142
- if len(self.handlers) == 0:
143
- self._stop_event.set()
144
-
145
-
146
40
  class IntegrationController:
147
41
  @staticmethod
148
42
  def _is_not_empty_str(s):
@@ -172,9 +66,6 @@ class IntegrationController:
172
66
  )
173
67
  handler_meta = self.get_handler_meta(engine)
174
68
 
175
- if not name.islower():
176
- raise ValueError(f"The name must be in lower case: {name}")
177
-
178
69
  accept_connection_args = handler_meta.get("connection_args")
179
70
  logger.debug("%s: accept_connection_args - %s", self.__class__.__name__, accept_connection_args)
180
71
 
@@ -866,6 +757,8 @@ class IntegrationController:
866
757
 
867
758
  def import_handler(self, handler_name: str, base_import: str = None):
868
759
  with self._import_lock:
760
+ time_before_import = time.perf_counter()
761
+ logger.debug(f"Importing handler '{handler_name}'")
869
762
  handler_meta = self.handlers_import_status[handler_name]
870
763
  handler_dir = handler_meta["path"]
871
764
 
@@ -877,9 +770,13 @@ class IntegrationController:
877
770
  handler_module = importlib.import_module(f"{base_import}{handler_folder_name}")
878
771
  self.handler_modules[handler_name] = handler_module
879
772
  handler_meta = self._get_handler_meta(handler_name)
773
+ logger.debug(
774
+ f"Handler '{handler_name}' imported successfully in {(time.perf_counter() - time_before_import):.3f} seconds"
775
+ )
880
776
  except Exception as e:
881
777
  handler_meta["import"]["success"] = False
882
778
  handler_meta["import"]["error_message"] = str(e)
779
+ logger.debug(f"Failed to import handler '{handler_name}': {e}")
883
780
 
884
781
  self.handlers_import_status[handler_meta["name"]] = handler_meta
885
782
  return handler_meta
@@ -38,20 +38,16 @@ class Project:
38
38
  return p
39
39
 
40
40
  def create(self, name: str):
41
- name = name.lower()
42
-
43
41
  company_id = ctx.company_id if ctx.company_id is not None else 0
44
42
 
45
43
  existing_record = db.Integration.query.filter(
46
- sa.func.lower(db.Integration.name) == name, db.Integration.company_id == ctx.company_id
44
+ db.Integration.name == name, db.Integration.company_id == ctx.company_id
47
45
  ).first()
48
46
  if existing_record is not None:
49
47
  raise EntityExistsError("Database exists with this name ", name)
50
48
 
51
49
  existing_record = db.Project.query.filter(
52
- (sa.func.lower(db.Project.name) == name)
53
- & (db.Project.company_id == company_id)
54
- & (db.Project.deleted_at == sa.null())
50
+ (db.Project.name == name) & (db.Project.company_id == company_id) & (db.Project.deleted_at == sa.null())
55
51
  ).first()
56
52
  if existing_record is not None:
57
53
  raise EntityExistsError("Project already exists", name)
@@ -8,7 +8,6 @@ from mindsdb.interfaces.model.functions import get_project_record, get_project_r
8
8
 
9
9
  class ViewController:
10
10
  def add(self, name, query, project_name):
11
- name = name.lower()
12
11
  from mindsdb.interfaces.database.database import DatabaseController
13
12
 
14
13
  database_controller = DatabaseController()
@@ -20,9 +19,7 @@ class ViewController:
20
19
  project_id = project_databases_dict[project_name]["id"]
21
20
  view_record = (
22
21
  db.session.query(db.View.id)
23
- .filter(
24
- func.lower(db.View.name) == name, db.View.company_id == ctx.company_id, db.View.project_id == project_id
25
- )
22
+ .filter(db.View.name == name, db.View.company_id == ctx.company_id, db.View.project_id == project_id)
26
23
  .first()
27
24
  )
28
25
  if view_record is not None:
@@ -107,8 +107,8 @@ class FileController:
107
107
  self.fs_store.put(store_file_path, base_dir=self.dir)
108
108
  db.session.commit()
109
109
 
110
- except Exception as e:
111
- logger.error(e)
110
+ except Exception:
111
+ logger.exception("An error occurred while saving the file:")
112
112
  if file_dir is not None:
113
113
  shutil.rmtree(file_dir)
114
114
  raise
@@ -151,7 +151,7 @@ class FileController:
151
151
  def delete_file(self, name):
152
152
  file_record = db.session.query(db.File).filter_by(company_id=ctx.company_id, name=name).first()
153
153
  if file_record is None:
154
- return None
154
+ raise FileNotFoundError(f"File '{name}' does not exists")
155
155
  file_id = file_record.id
156
156
  db.session.delete(file_record)
157
157
  db.session.commit()
@@ -161,7 +161,7 @@ class FileController:
161
161
  def get_file_path(self, name):
162
162
  file_record = db.session.query(db.File).filter_by(company_id=ctx.company_id, name=name).first()
163
163
  if file_record is None:
164
- raise Exception(f"File '{name}' does not exists")
164
+ raise FileNotFoundError(f"File '{name}' does not exists")
165
165
  file_dir = f"file_{ctx.company_id}_{file_record.id}"
166
166
  self.fs_store.get(file_dir, base_dir=self.dir)
167
167
  return str(Path(self.dir).joinpath(file_dir).joinpath(Path(file_record.source_file_path).name))
@@ -176,7 +176,7 @@ class FileController:
176
176
  """
177
177
  file_record = db.session.query(db.File).filter_by(company_id=ctx.company_id, name=name).first()
178
178
  if file_record is None:
179
- raise Exception(f"File '{name}' does not exists")
179
+ raise FileNotFoundError(f"File '{name}' does not exists")
180
180
 
181
181
  file_dir = f"file_{ctx.company_id}_{file_record.id}"
182
182
  self.fs_store.get(file_dir, base_dir=self.dir)
@@ -217,7 +217,7 @@ class FileController:
217
217
 
218
218
  file_record = db.session.query(db.File).filter_by(company_id=ctx.company_id, name=name).first()
219
219
  if file_record is None:
220
- raise Exception(f"File '{name}' does not exists")
220
+ raise FileNotFoundError(f"File '{name}' does not exists")
221
221
 
222
222
  file_dir = f"file_{ctx.company_id}_{file_record.id}"
223
223
  self.fs_store.get(file_dir, base_dir=self.dir)
@@ -140,7 +140,7 @@ class FunctionController(BYOMFunctionsController):
140
140
 
141
141
  llm = create_chat_model(chat_model_params)
142
142
  except Exception as e:
143
- raise RuntimeError(f"Unable to use LLM function, check ENV variables: {e}")
143
+ raise RuntimeError(f"Unable to use LLM function, check ENV variables: {e}") from e
144
144
 
145
145
  def callback(question):
146
146
  resp = llm([HumanMessage(question)])
@@ -69,8 +69,8 @@ class ToMarkdown:
69
69
  ext = os.path.splitext(parsed_url.path)[1]
70
70
  if ext:
71
71
  return ext
72
- except requests.RequestException:
73
- raise RuntimeError(f"Unable to retrieve file extension from URL: {file_path_or_url}")
72
+ except requests.RequestException as e:
73
+ raise RuntimeError(f"Unable to retrieve file extension from URL: {file_path_or_url}") from e
74
74
  else:
75
75
  return os.path.splitext(file_path_or_url)[1]
76
76
 
@@ -128,8 +128,6 @@ class JobsController:
128
128
  at the moment supports: 'every <number> <dimension>' or 'every <dimension>'
129
129
  :return: name of created job
130
130
  """
131
- if not name.islower():
132
- raise ValueError(f"The name must be in lower case: {name}")
133
131
 
134
132
  project_controller = ProjectController()
135
133
  project = project_controller.get(name=project_name)
@@ -152,7 +150,7 @@ class JobsController:
152
150
 
153
151
  parse_sql(sql)
154
152
  except ParsingException as e:
155
- raise ParsingException(f"Unable to parse: {sql}: {e}")
153
+ raise ParsingException(f"Unable to parse: {sql}: {e}") from e
156
154
 
157
155
  if if_query is not None:
158
156
  for sql in split_sql(if_query):
@@ -162,7 +160,7 @@ class JobsController:
162
160
 
163
161
  parse_sql(sql)
164
162
  except ParsingException as e:
165
- raise ParsingException(f"Unable to parse: {sql}: {e}")
163
+ raise ParsingException(f"Unable to parse: {sql}: {e}") from e
166
164
 
167
165
  # plan next run
168
166
  next_run_at = start_at
@@ -174,8 +172,6 @@ class JobsController:
174
172
  # no schedule for job end_at is meaningless
175
173
  end_at = None
176
174
 
177
- name = name.lower()
178
-
179
175
  # create job record
180
176
  record = db.Jobs(
181
177
  company_id=ctx.company_id,
@@ -494,7 +490,7 @@ class JobsExecutor:
494
490
 
495
491
  data = ret.data
496
492
  except Exception as e:
497
- logger.error(e)
493
+ logger.exception("Error to execute job`s condition query")
498
494
  error = str(e)
499
495
  break
500
496
 
@@ -518,7 +514,7 @@ class JobsExecutor:
518
514
  error = ret.error_message
519
515
  break
520
516
  except Exception as e:
521
- logger.error(e)
517
+ logger.exception("Error to execute job`s query")
522
518
  error = str(e)
523
519
  break
524
520
 
@@ -526,7 +522,7 @@ class JobsExecutor:
526
522
  self.update_task_schedule(record)
527
523
  except Exception as e:
528
524
  db.session.rollback()
529
- logger.error(f"Error to update schedule: {e}")
525
+ logger.exception("Error to update schedule:")
530
526
  error += f"Error to update schedule: {e}"
531
527
 
532
528
  # stop scheduling
@@ -14,7 +14,6 @@ logger = log.getLogger(__name__)
14
14
 
15
15
 
16
16
  def execute_async(q_in, q_out):
17
-
18
17
  while True:
19
18
  task = q_in.get()
20
19
 
@@ -44,7 +43,7 @@ class Scheduler:
44
43
  self.q_in = queue.Queue()
45
44
  self.q_out = queue.Queue()
46
45
  self.work_thread = threading.Thread(
47
- target=execute_async, args=(self.q_in, self.q_out), name='Scheduler.execute_async'
46
+ target=execute_async, args=(self.q_in, self.q_out), name="Scheduler.execute_async"
48
47
  )
49
48
  self.work_thread.start()
50
49
 
@@ -58,14 +57,13 @@ class Scheduler:
58
57
  check_interval = self.config.get("jobs", {}).get("check_interval", 30)
59
58
 
60
59
  while True:
61
-
62
60
  logger.debug("Scheduler check timetable")
63
61
  try:
64
62
  self.check_timetable()
65
63
  except (SystemExit, KeyboardInterrupt):
66
64
  raise
67
- except Exception as e:
68
- logger.error(e)
65
+ except Exception:
66
+ logger.exception("Error in 'scheduler_monitor'")
69
67
 
70
68
  # different instances should start in not the same time
71
69
 
@@ -83,7 +81,6 @@ class Scheduler:
83
81
  db.session.remove()
84
82
 
85
83
  def execute_task(self, record_id, exec_method):
86
-
87
84
  executor = JobsExecutor()
88
85
  if exec_method == "local":
89
86
  history_id = executor.lock_record(record_id)
@@ -117,7 +114,6 @@ class Scheduler:
117
114
  raise NotImplementedError()
118
115
 
119
116
  def start(self):
120
-
121
117
  config = Config()
122
118
  db.init()
123
119
  self.config = config
@@ -127,13 +123,11 @@ class Scheduler:
127
123
  try:
128
124
  self.scheduler_monitor()
129
125
  except (KeyboardInterrupt, SystemExit):
130
-
131
126
  self.stop_thread()
132
127
  pass
133
128
 
134
129
 
135
130
  def start(verbose=False):
136
- logger.info("Jobs API is starting..")
137
131
  scheduler = Scheduler()
138
132
 
139
133
  scheduler.start()