mito-ai 0.1.50__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mito_ai/__init__.py +114 -0
- mito_ai/_version.py +4 -0
- mito_ai/anthropic_client.py +334 -0
- mito_ai/app_deploy/__init__.py +6 -0
- mito_ai/app_deploy/app_deploy_utils.py +44 -0
- mito_ai/app_deploy/handlers.py +345 -0
- mito_ai/app_deploy/models.py +98 -0
- mito_ai/app_manager/__init__.py +4 -0
- mito_ai/app_manager/handlers.py +167 -0
- mito_ai/app_manager/models.py +71 -0
- mito_ai/app_manager/utils.py +24 -0
- mito_ai/auth/README.md +18 -0
- mito_ai/auth/__init__.py +6 -0
- mito_ai/auth/handlers.py +96 -0
- mito_ai/auth/urls.py +13 -0
- mito_ai/chat_history/handlers.py +63 -0
- mito_ai/chat_history/urls.py +32 -0
- mito_ai/completions/completion_handlers/__init__.py +3 -0
- mito_ai/completions/completion_handlers/agent_auto_error_fixup_handler.py +59 -0
- mito_ai/completions/completion_handlers/agent_execution_handler.py +66 -0
- mito_ai/completions/completion_handlers/chat_completion_handler.py +141 -0
- mito_ai/completions/completion_handlers/code_explain_handler.py +113 -0
- mito_ai/completions/completion_handlers/completion_handler.py +42 -0
- mito_ai/completions/completion_handlers/inline_completer_handler.py +48 -0
- mito_ai/completions/completion_handlers/smart_debug_handler.py +160 -0
- mito_ai/completions/completion_handlers/utils.py +147 -0
- mito_ai/completions/handlers.py +415 -0
- mito_ai/completions/message_history.py +401 -0
- mito_ai/completions/models.py +404 -0
- mito_ai/completions/prompt_builders/__init__.py +3 -0
- mito_ai/completions/prompt_builders/agent_execution_prompt.py +57 -0
- mito_ai/completions/prompt_builders/agent_smart_debug_prompt.py +160 -0
- mito_ai/completions/prompt_builders/agent_system_message.py +472 -0
- mito_ai/completions/prompt_builders/chat_name_prompt.py +15 -0
- mito_ai/completions/prompt_builders/chat_prompt.py +116 -0
- mito_ai/completions/prompt_builders/chat_system_message.py +92 -0
- mito_ai/completions/prompt_builders/explain_code_prompt.py +32 -0
- mito_ai/completions/prompt_builders/inline_completer_prompt.py +197 -0
- mito_ai/completions/prompt_builders/prompt_constants.py +170 -0
- mito_ai/completions/prompt_builders/smart_debug_prompt.py +199 -0
- mito_ai/completions/prompt_builders/utils.py +84 -0
- mito_ai/completions/providers.py +284 -0
- mito_ai/constants.py +63 -0
- mito_ai/db/__init__.py +3 -0
- mito_ai/db/crawlers/__init__.py +6 -0
- mito_ai/db/crawlers/base_crawler.py +61 -0
- mito_ai/db/crawlers/constants.py +43 -0
- mito_ai/db/crawlers/snowflake.py +71 -0
- mito_ai/db/handlers.py +168 -0
- mito_ai/db/models.py +31 -0
- mito_ai/db/urls.py +34 -0
- mito_ai/db/utils.py +185 -0
- mito_ai/docker/mssql/compose.yml +37 -0
- mito_ai/docker/mssql/init/setup.sql +21 -0
- mito_ai/docker/mysql/compose.yml +18 -0
- mito_ai/docker/mysql/init/setup.sql +13 -0
- mito_ai/docker/oracle/compose.yml +17 -0
- mito_ai/docker/oracle/init/setup.sql +20 -0
- mito_ai/docker/postgres/compose.yml +17 -0
- mito_ai/docker/postgres/init/setup.sql +13 -0
- mito_ai/enterprise/__init__.py +3 -0
- mito_ai/enterprise/utils.py +15 -0
- mito_ai/file_uploads/__init__.py +3 -0
- mito_ai/file_uploads/handlers.py +248 -0
- mito_ai/file_uploads/urls.py +21 -0
- mito_ai/gemini_client.py +232 -0
- mito_ai/log/handlers.py +38 -0
- mito_ai/log/urls.py +21 -0
- mito_ai/logger.py +37 -0
- mito_ai/openai_client.py +382 -0
- mito_ai/path_utils.py +70 -0
- mito_ai/rules/handlers.py +44 -0
- mito_ai/rules/urls.py +22 -0
- mito_ai/rules/utils.py +56 -0
- mito_ai/settings/handlers.py +41 -0
- mito_ai/settings/urls.py +20 -0
- mito_ai/settings/utils.py +42 -0
- mito_ai/streamlit_conversion/agent_utils.py +37 -0
- mito_ai/streamlit_conversion/prompts/prompt_constants.py +172 -0
- mito_ai/streamlit_conversion/prompts/prompt_utils.py +10 -0
- mito_ai/streamlit_conversion/prompts/streamlit_app_creation_prompt.py +46 -0
- mito_ai/streamlit_conversion/prompts/streamlit_error_correction_prompt.py +28 -0
- mito_ai/streamlit_conversion/prompts/streamlit_finish_todo_prompt.py +45 -0
- mito_ai/streamlit_conversion/prompts/streamlit_system_prompt.py +56 -0
- mito_ai/streamlit_conversion/prompts/update_existing_app_prompt.py +50 -0
- mito_ai/streamlit_conversion/search_replace_utils.py +94 -0
- mito_ai/streamlit_conversion/streamlit_agent_handler.py +144 -0
- mito_ai/streamlit_conversion/streamlit_utils.py +85 -0
- mito_ai/streamlit_conversion/validate_streamlit_app.py +105 -0
- mito_ai/streamlit_preview/__init__.py +6 -0
- mito_ai/streamlit_preview/handlers.py +111 -0
- mito_ai/streamlit_preview/manager.py +152 -0
- mito_ai/streamlit_preview/urls.py +22 -0
- mito_ai/streamlit_preview/utils.py +29 -0
- mito_ai/tests/__init__.py +3 -0
- mito_ai/tests/chat_history/test_chat_history.py +211 -0
- mito_ai/tests/completions/completion_handlers_utils_test.py +190 -0
- mito_ai/tests/conftest.py +53 -0
- mito_ai/tests/create_agent_system_message_prompt_test.py +22 -0
- mito_ai/tests/data/prompt_lg.py +69 -0
- mito_ai/tests/data/prompt_sm.py +6 -0
- mito_ai/tests/data/prompt_xl.py +13 -0
- mito_ai/tests/data/stock_data.sqlite3 +0 -0
- mito_ai/tests/db/conftest.py +39 -0
- mito_ai/tests/db/connections_test.py +102 -0
- mito_ai/tests/db/mssql_test.py +29 -0
- mito_ai/tests/db/mysql_test.py +29 -0
- mito_ai/tests/db/oracle_test.py +29 -0
- mito_ai/tests/db/postgres_test.py +29 -0
- mito_ai/tests/db/schema_test.py +93 -0
- mito_ai/tests/db/sqlite_test.py +31 -0
- mito_ai/tests/db/test_db_constants.py +61 -0
- mito_ai/tests/deploy_app/test_app_deploy_utils.py +89 -0
- mito_ai/tests/file_uploads/__init__.py +2 -0
- mito_ai/tests/file_uploads/test_handlers.py +282 -0
- mito_ai/tests/message_history/test_generate_short_chat_name.py +120 -0
- mito_ai/tests/message_history/test_message_history_utils.py +469 -0
- mito_ai/tests/open_ai_utils_test.py +152 -0
- mito_ai/tests/performance_test.py +329 -0
- mito_ai/tests/providers/test_anthropic_client.py +447 -0
- mito_ai/tests/providers/test_azure.py +631 -0
- mito_ai/tests/providers/test_capabilities.py +120 -0
- mito_ai/tests/providers/test_gemini_client.py +195 -0
- mito_ai/tests/providers/test_mito_server_utils.py +448 -0
- mito_ai/tests/providers/test_model_resolution.py +130 -0
- mito_ai/tests/providers/test_openai_client.py +57 -0
- mito_ai/tests/providers/test_provider_completion_exception.py +66 -0
- mito_ai/tests/providers/test_provider_limits.py +42 -0
- mito_ai/tests/providers/test_providers.py +382 -0
- mito_ai/tests/providers/test_retry_logic.py +389 -0
- mito_ai/tests/providers/test_stream_mito_server_utils.py +140 -0
- mito_ai/tests/providers/utils.py +85 -0
- mito_ai/tests/rules/conftest.py +26 -0
- mito_ai/tests/rules/rules_test.py +117 -0
- mito_ai/tests/server_limits_test.py +406 -0
- mito_ai/tests/settings/conftest.py +26 -0
- mito_ai/tests/settings/settings_test.py +70 -0
- mito_ai/tests/settings/test_settings_constants.py +9 -0
- mito_ai/tests/streamlit_conversion/__init__.py +3 -0
- mito_ai/tests/streamlit_conversion/test_apply_search_replace.py +240 -0
- mito_ai/tests/streamlit_conversion/test_streamlit_agent_handler.py +246 -0
- mito_ai/tests/streamlit_conversion/test_streamlit_utils.py +193 -0
- mito_ai/tests/streamlit_conversion/test_validate_streamlit_app.py +112 -0
- mito_ai/tests/streamlit_preview/test_streamlit_preview_handler.py +118 -0
- mito_ai/tests/streamlit_preview/test_streamlit_preview_manager.py +292 -0
- mito_ai/tests/test_constants.py +47 -0
- mito_ai/tests/test_telemetry.py +12 -0
- mito_ai/tests/user/__init__.py +2 -0
- mito_ai/tests/user/test_user.py +120 -0
- mito_ai/tests/utils/__init__.py +3 -0
- mito_ai/tests/utils/test_anthropic_utils.py +162 -0
- mito_ai/tests/utils/test_gemini_utils.py +98 -0
- mito_ai/tests/version_check_test.py +169 -0
- mito_ai/user/handlers.py +45 -0
- mito_ai/user/urls.py +21 -0
- mito_ai/utils/__init__.py +3 -0
- mito_ai/utils/anthropic_utils.py +168 -0
- mito_ai/utils/create.py +94 -0
- mito_ai/utils/db.py +74 -0
- mito_ai/utils/error_classes.py +42 -0
- mito_ai/utils/gemini_utils.py +133 -0
- mito_ai/utils/message_history_utils.py +87 -0
- mito_ai/utils/mito_server_utils.py +242 -0
- mito_ai/utils/open_ai_utils.py +200 -0
- mito_ai/utils/provider_utils.py +49 -0
- mito_ai/utils/schema.py +86 -0
- mito_ai/utils/server_limits.py +152 -0
- mito_ai/utils/telemetry_utils.py +480 -0
- mito_ai/utils/utils.py +89 -0
- mito_ai/utils/version_utils.py +94 -0
- mito_ai/utils/websocket_base.py +88 -0
- mito_ai/version_check.py +60 -0
- mito_ai-0.1.50.data/data/etc/jupyter/jupyter_server_config.d/mito_ai.json +7 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/build_log.json +728 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/package.json +243 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/schemas/mito_ai/package.json.orig +238 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/schemas/mito_ai/toolbar-buttons.json +37 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/lib_index_js.8f1845da6bf2b128c049.js +21602 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/lib_index_js.8f1845da6bf2b128c049.js.map +1 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/node_modules_process_browser_js.4b128e94d31a81ebd209.js +198 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/node_modules_process_browser_js.4b128e94d31a81ebd209.js.map +1 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/remoteEntry.78d3ccb73e7ca1da3aae.js +619 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/remoteEntry.78d3ccb73e7ca1da3aae.js.map +1 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/style.js +4 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/style_index_js.5876024bb17dbd6a3ee6.js +712 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/style_index_js.5876024bb17dbd6a3ee6.js.map +1 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_auth_dist_esm_providers_cognito_apis_signOut_mjs-node_module-75790d.688c25857e7b81b1740f.js +533 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_auth_dist_esm_providers_cognito_apis_signOut_mjs-node_module-75790d.688c25857e7b81b1740f.js.map +1 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_auth_dist_esm_providers_cognito_tokenProvider_tokenProvider_-72f1c8.a917210f057fcfe224ad.js +6941 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_auth_dist_esm_providers_cognito_tokenProvider_tokenProvider_-72f1c8.a917210f057fcfe224ad.js.map +1 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_dist_esm_index_mjs.6bac1a8c4cc93f15f6b7.js +1021 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_dist_esm_index_mjs.6bac1a8c4cc93f15f6b7.js.map +1 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_ui-react_dist_esm_index_mjs.4fcecd65bef9e9847609.js +59698 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_ui-react_dist_esm_index_mjs.4fcecd65bef9e9847609.js.map +1 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_react-dom_client_js-node_modules_aws-amplify_ui-react_dist_styles_css.b43d4249e4d3dac9ad7b.js +7440 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_react-dom_client_js-node_modules_aws-amplify_ui-react_dist_styles_css.b43d4249e4d3dac9ad7b.js.map +1 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_semver_index_js.3f6754ac5116d47de76b.js +2792 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_semver_index_js.3f6754ac5116d47de76b.js.map +1 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_vscode-diff_dist_index_js.ea55f1f9346638aafbcf.js +4859 -0
- mito_ai-0.1.50.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_vscode-diff_dist_index_js.ea55f1f9346638aafbcf.js.map +1 -0
- mito_ai-0.1.50.dist-info/METADATA +221 -0
- mito_ai-0.1.50.dist-info/RECORD +205 -0
- mito_ai-0.1.50.dist-info/WHEEL +4 -0
- mito_ai-0.1.50.dist-info/entry_points.txt +2 -0
- mito_ai-0.1.50.dist-info/licenses/LICENSE +3 -0
mito_ai/db/handlers.py
ADDED
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
# Copyright (c) Saga Inc.
|
|
2
|
+
# Distributed under the terms of the GNU Affero General Public License v3.0 License.
|
|
3
|
+
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
import tornado
|
|
7
|
+
import uuid
|
|
8
|
+
from typing import Any, Final
|
|
9
|
+
from jupyter_server.base.handlers import APIHandler
|
|
10
|
+
from mito_ai.utils.schema import MITO_FOLDER
|
|
11
|
+
from mito_ai.utils.telemetry_utils import (
|
|
12
|
+
log_db_connection_attempt,
|
|
13
|
+
log_db_connection_success,
|
|
14
|
+
log_db_connection_error,
|
|
15
|
+
)
|
|
16
|
+
from mito_ai.db.utils import (
|
|
17
|
+
setup_database_dir,
|
|
18
|
+
save_connection,
|
|
19
|
+
delete_connection,
|
|
20
|
+
delete_schema,
|
|
21
|
+
crawl_and_store_schema,
|
|
22
|
+
install_db_drivers,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
DB_DIR_PATH: Final[str] = os.path.join(MITO_FOLDER, "db")
|
|
26
|
+
CONNECTIONS_PATH: Final[str] = os.path.join(DB_DIR_PATH, "connections.json")
|
|
27
|
+
SCHEMAS_PATH: Final[str] = os.path.join(DB_DIR_PATH, "schemas.json")
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class ConnectionsHandler(APIHandler):
|
|
31
|
+
"""
|
|
32
|
+
Endpoints for working with connections.json file.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
@tornado.web.authenticated
|
|
36
|
+
def get(self) -> None:
|
|
37
|
+
"""Get all connections."""
|
|
38
|
+
# If the db dir doesn't exist, create it
|
|
39
|
+
setup_database_dir(DB_DIR_PATH, CONNECTIONS_PATH, SCHEMAS_PATH)
|
|
40
|
+
|
|
41
|
+
with open(CONNECTIONS_PATH, "r") as f:
|
|
42
|
+
connections = json.load(f)
|
|
43
|
+
self.finish(json.dumps(connections))
|
|
44
|
+
|
|
45
|
+
@tornado.web.authenticated
|
|
46
|
+
def post(self) -> None:
|
|
47
|
+
"""Add a new connection."""
|
|
48
|
+
try:
|
|
49
|
+
# If the db dir doesn't exist, create it
|
|
50
|
+
setup_database_dir(DB_DIR_PATH, CONNECTIONS_PATH, SCHEMAS_PATH)
|
|
51
|
+
|
|
52
|
+
# Get the new connection data from the request body
|
|
53
|
+
connection_details = json.loads(self.request.body)
|
|
54
|
+
|
|
55
|
+
# Generate a UUID for the new connection
|
|
56
|
+
connection_id = str(uuid.uuid4())
|
|
57
|
+
|
|
58
|
+
db_type = connection_details["type"]
|
|
59
|
+
log_db_connection_attempt(db_type)
|
|
60
|
+
|
|
61
|
+
# Install database drivers
|
|
62
|
+
install_result = install_db_drivers(db_type)
|
|
63
|
+
if not install_result["success"]:
|
|
64
|
+
log_db_connection_error(db_type, install_result["error"])
|
|
65
|
+
self.set_status(500)
|
|
66
|
+
self.write({"error": install_result["error"]})
|
|
67
|
+
return
|
|
68
|
+
|
|
69
|
+
# First, try to validate the connection by building the schema
|
|
70
|
+
crawl_result = crawl_and_store_schema(
|
|
71
|
+
SCHEMAS_PATH,
|
|
72
|
+
connection_id,
|
|
73
|
+
connection_details,
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
if not crawl_result["success"]:
|
|
77
|
+
log_db_connection_error(
|
|
78
|
+
connection_details["type"], crawl_result["error_message"]
|
|
79
|
+
)
|
|
80
|
+
self.set_status(500)
|
|
81
|
+
self.write({"error": crawl_result["error_message"]})
|
|
82
|
+
return
|
|
83
|
+
|
|
84
|
+
# If schema building succeeded, save the connection
|
|
85
|
+
save_connection(CONNECTIONS_PATH, connection_id, connection_details)
|
|
86
|
+
|
|
87
|
+
log_db_connection_success(connection_details["type"], {})
|
|
88
|
+
|
|
89
|
+
self.write(
|
|
90
|
+
{
|
|
91
|
+
"status": "success",
|
|
92
|
+
"message": "Added new connection",
|
|
93
|
+
"connection_id": connection_id,
|
|
94
|
+
}
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
except json.JSONDecodeError:
|
|
98
|
+
self.set_status(400)
|
|
99
|
+
self.write({"error": "Invalid JSON in request body"})
|
|
100
|
+
except Exception as e:
|
|
101
|
+
self.set_status(500)
|
|
102
|
+
self.write({"error": str(e)})
|
|
103
|
+
finally:
|
|
104
|
+
self.finish()
|
|
105
|
+
|
|
106
|
+
@tornado.web.authenticated
|
|
107
|
+
def delete(self, *args: Any, **kwargs: Any) -> None:
|
|
108
|
+
"""Delete a connection by UUID."""
|
|
109
|
+
try:
|
|
110
|
+
# Get the connection UUID from the URL
|
|
111
|
+
connection_id = kwargs.get("uuid")
|
|
112
|
+
if not connection_id:
|
|
113
|
+
self.set_status(400)
|
|
114
|
+
self.write({"error": "Connection UUID is required"})
|
|
115
|
+
return
|
|
116
|
+
|
|
117
|
+
# Delete the connection
|
|
118
|
+
delete_connection(CONNECTIONS_PATH, connection_id)
|
|
119
|
+
|
|
120
|
+
# Delete the schema
|
|
121
|
+
delete_schema(SCHEMAS_PATH, connection_id)
|
|
122
|
+
|
|
123
|
+
self.set_status(200)
|
|
124
|
+
self.write(
|
|
125
|
+
{
|
|
126
|
+
"status": "success",
|
|
127
|
+
"message": "Connection deleted successfully",
|
|
128
|
+
}
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
except Exception as e:
|
|
132
|
+
self.set_status(500)
|
|
133
|
+
self.write({"error": str(e)})
|
|
134
|
+
finally:
|
|
135
|
+
self.finish()
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
class SchemaHandler(APIHandler):
|
|
139
|
+
"""
|
|
140
|
+
Endpoints for working with schemas.json file.
|
|
141
|
+
"""
|
|
142
|
+
|
|
143
|
+
@tornado.web.authenticated
|
|
144
|
+
def get(self) -> None:
|
|
145
|
+
"""Get all schemas."""
|
|
146
|
+
with open(SCHEMAS_PATH, "r") as f:
|
|
147
|
+
schemas = json.load(f)
|
|
148
|
+
|
|
149
|
+
self.write(schemas)
|
|
150
|
+
self.finish()
|
|
151
|
+
|
|
152
|
+
@tornado.web.authenticated
|
|
153
|
+
def delete(self, *args: Any, **kwargs: Any) -> None:
|
|
154
|
+
"""Delete a schema by UUID."""
|
|
155
|
+
# Get the schema UUID from kwargs
|
|
156
|
+
schema_id = kwargs.get("uuid")
|
|
157
|
+
|
|
158
|
+
if not schema_id:
|
|
159
|
+
self.set_status(400)
|
|
160
|
+
self.write({"error": "Schema UUID is required"})
|
|
161
|
+
self.finish()
|
|
162
|
+
return
|
|
163
|
+
|
|
164
|
+
delete_schema(SCHEMAS_PATH, schema_id)
|
|
165
|
+
|
|
166
|
+
self.set_status(200)
|
|
167
|
+
self.write({"status": "success", "message": "Schema deleted successfully"})
|
|
168
|
+
self.finish()
|
mito_ai/db/models.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
# Copyright (c) Saga Inc.
|
|
2
|
+
# Distributed under the terms of the GNU Affero General Public License v3.0 License.
|
|
3
|
+
|
|
4
|
+
from typing import TypedDict, List, Dict
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
# BASE CRAWLER MODELS
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ColumnInfo(TypedDict):
|
|
11
|
+
name: str
|
|
12
|
+
type: str
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class TableSchema(TypedDict):
|
|
16
|
+
tables: Dict[str, List[ColumnInfo]]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
# SNOWFLAKE MODELS
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class SchemaInfo(TypedDict):
|
|
23
|
+
tables: Dict[str, List[ColumnInfo]]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class DatabaseInfo(TypedDict):
|
|
27
|
+
schemas: Dict[str, SchemaInfo]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class WarehouseDetails(TypedDict):
|
|
31
|
+
databases: Dict[str, DatabaseInfo]
|
mito_ai/db/urls.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# Copyright (c) Saga Inc.
|
|
2
|
+
# Distributed under the terms of the GNU Affero General Public License v3.0 License.
|
|
3
|
+
|
|
4
|
+
from typing import List, Tuple, Any
|
|
5
|
+
from jupyter_server.utils import url_path_join
|
|
6
|
+
from mito_ai.db.handlers import ConnectionsHandler, SchemaHandler
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def get_db_urls(base_url: str) -> List[Tuple[str, Any, dict]]:
|
|
10
|
+
"""Get all database related URL patterns.
|
|
11
|
+
|
|
12
|
+
Args:
|
|
13
|
+
base_url: The base URL for the Jupyter server
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
List of (url_pattern, handler_class, handler_kwargs) tuples
|
|
17
|
+
"""
|
|
18
|
+
BASE_URL = base_url + "/mito-ai/db"
|
|
19
|
+
return [
|
|
20
|
+
(url_path_join(BASE_URL, "connections"), ConnectionsHandler, {}),
|
|
21
|
+
(
|
|
22
|
+
# URL for deleting a connection.
|
|
23
|
+
url_path_join(BASE_URL, "connections", "(?P<uuid>[^/]+)"),
|
|
24
|
+
ConnectionsHandler,
|
|
25
|
+
{},
|
|
26
|
+
),
|
|
27
|
+
(url_path_join(BASE_URL, "schemas"), SchemaHandler, {}),
|
|
28
|
+
(
|
|
29
|
+
# URL for deleting a schema.
|
|
30
|
+
url_path_join(BASE_URL, "schemas", "(?P<uuid>[^/]+)"),
|
|
31
|
+
SchemaHandler,
|
|
32
|
+
{},
|
|
33
|
+
),
|
|
34
|
+
]
|
mito_ai/db/utils.py
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
# Copyright (c) Saga Inc.
|
|
2
|
+
# Distributed under the terms of the GNU Affero General Public License v3.0 License.
|
|
3
|
+
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
from mito_ai.db.crawlers import snowflake, base_crawler
|
|
7
|
+
from mito_ai.db.crawlers.constants import SUPPORTED_DATABASES
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def setup_database_dir(
|
|
11
|
+
db_dir_path: str, connections_path: str, schemas_path: str
|
|
12
|
+
) -> None:
|
|
13
|
+
"""
|
|
14
|
+
Setup the database directory.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
# Ensure the db directory exists
|
|
18
|
+
os.makedirs(db_dir_path, exist_ok=True)
|
|
19
|
+
|
|
20
|
+
# Create connections.json if it doesn't exist
|
|
21
|
+
if not os.path.exists(connections_path):
|
|
22
|
+
with open(connections_path, "w") as f:
|
|
23
|
+
json.dump({}, f, indent=4)
|
|
24
|
+
|
|
25
|
+
# Create schemas.json if it doesn't exist
|
|
26
|
+
if not os.path.exists(schemas_path):
|
|
27
|
+
with open(schemas_path, "w") as f:
|
|
28
|
+
json.dump({}, f, indent=4)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def save_connection(
|
|
32
|
+
connections_path: str, connection_id: str, connection_details: dict
|
|
33
|
+
) -> None:
|
|
34
|
+
"""
|
|
35
|
+
Save a connection to the connections.json file.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
connections_path (str): The path to the connections.json file.
|
|
39
|
+
connection_id (str): The UUID of the connection to save.
|
|
40
|
+
connection_details (dict): The details of the connection to save.
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
with open(connections_path, "r") as f:
|
|
44
|
+
connections = json.load(f)
|
|
45
|
+
|
|
46
|
+
# Add the new connection
|
|
47
|
+
connections[connection_id] = connection_details
|
|
48
|
+
|
|
49
|
+
# Write back to file
|
|
50
|
+
with open(connections_path, "w") as f:
|
|
51
|
+
json.dump(connections, f, indent=4)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def delete_connection(connections_path: str, connection_id: str) -> None:
|
|
55
|
+
"""
|
|
56
|
+
Delete a connection by UUID.
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
# Read existing connections
|
|
60
|
+
with open(connections_path, "r") as f:
|
|
61
|
+
connections = json.load(f)
|
|
62
|
+
|
|
63
|
+
# Remove the connection
|
|
64
|
+
del connections[connection_id]
|
|
65
|
+
|
|
66
|
+
# Write back to file
|
|
67
|
+
with open(connections_path, "w") as f:
|
|
68
|
+
json.dump(connections, f, indent=4)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def delete_schema(schemas_path: str, schema_id: str) -> None:
|
|
72
|
+
"""
|
|
73
|
+
Delete a schema by UUID.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
schemas_path (str): The path to the schemas.json file.
|
|
77
|
+
schema_id (str): The UUID of the schema to delete.
|
|
78
|
+
"""
|
|
79
|
+
|
|
80
|
+
with open(schemas_path, "r") as f:
|
|
81
|
+
schemas = json.load(f)
|
|
82
|
+
|
|
83
|
+
del schemas[schema_id]
|
|
84
|
+
|
|
85
|
+
with open(schemas_path, "w") as f:
|
|
86
|
+
json.dump(schemas, f, indent=4)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def crawl_and_store_schema(
|
|
90
|
+
schemas_path: str,
|
|
91
|
+
connection_id: str,
|
|
92
|
+
connection_details: dict,
|
|
93
|
+
) -> dict:
|
|
94
|
+
"""
|
|
95
|
+
Crawl and store schema for a given connection.
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
schemas_path (str): The path to the schemas.json file.
|
|
99
|
+
connection_id (str): The UUID of the connection to crawl.
|
|
100
|
+
username (str): The username for the connection.
|
|
101
|
+
password (str): The password for the connection.
|
|
102
|
+
account (str): The account for the connection.
|
|
103
|
+
warehouse (str): The warehouse for the connection.
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
tuple[bool, str]: A tuple containing a boolean indicating success and an error message.
|
|
107
|
+
"""
|
|
108
|
+
if connection_details["type"] == "snowflake":
|
|
109
|
+
schema = snowflake.crawl_snowflake(
|
|
110
|
+
connection_details["username"],
|
|
111
|
+
connection_details["password"],
|
|
112
|
+
connection_details["account"],
|
|
113
|
+
connection_details["warehouse"],
|
|
114
|
+
)
|
|
115
|
+
elif connection_details["type"] == "postgres":
|
|
116
|
+
conn_str = f"postgresql+psycopg2://{connection_details['username']}:{connection_details['password']}@{connection_details['host']}:{connection_details['port']}/{connection_details['database']}"
|
|
117
|
+
schema = base_crawler.crawl_db(conn_str, "postgres")
|
|
118
|
+
elif connection_details["type"] == "sqlite":
|
|
119
|
+
conn_str = f"sqlite:///{connection_details['database']}"
|
|
120
|
+
schema = base_crawler.crawl_db(conn_str, "sqlite")
|
|
121
|
+
elif connection_details["type"] == "mysql":
|
|
122
|
+
conn_str = f"mysql+pymysql://{connection_details['username']}:{connection_details['password']}@{connection_details['host']}:{connection_details['port']}/{connection_details['database']}"
|
|
123
|
+
schema = base_crawler.crawl_db(conn_str, "mysql")
|
|
124
|
+
elif connection_details["type"] == "mssql":
|
|
125
|
+
odbc_driver_version = connection_details["odbc_driver_version"]
|
|
126
|
+
conn_str = f"mssql+pyodbc://{connection_details['username']}:{connection_details['password']}@{connection_details['host']}:{connection_details['port']}/{connection_details['database']}?driver=ODBC+Driver+{odbc_driver_version}+for+SQL+Server"
|
|
127
|
+
schema = base_crawler.crawl_db(conn_str, "mssql")
|
|
128
|
+
elif connection_details["type"] == "oracle":
|
|
129
|
+
conn_str = f"oracle+oracledb://{connection_details['username']}:{connection_details['password']}@{connection_details['host']}:{connection_details['port']}?service_name={connection_details['service_name']}"
|
|
130
|
+
schema = base_crawler.crawl_db(conn_str, "oracle")
|
|
131
|
+
|
|
132
|
+
if schema["error"]:
|
|
133
|
+
return {
|
|
134
|
+
"success": False,
|
|
135
|
+
"error_message": schema["error"],
|
|
136
|
+
"schema": {},
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
# If we successfully crawled the schema, write it to schemas.json
|
|
140
|
+
with open(schemas_path, "r+") as f:
|
|
141
|
+
# Load the existing schemas
|
|
142
|
+
schemas = json.load(f)
|
|
143
|
+
# Remove the error key from the schema and add the crawled schema
|
|
144
|
+
schema.pop("error", None)
|
|
145
|
+
schemas[connection_id] = schema["schema"]
|
|
146
|
+
# Move to the beginning of the file and write the new schema
|
|
147
|
+
f.seek(0)
|
|
148
|
+
json.dump(schemas, f, indent=4)
|
|
149
|
+
f.truncate()
|
|
150
|
+
return {
|
|
151
|
+
"success": True,
|
|
152
|
+
"error_message": "",
|
|
153
|
+
"schema": schema,
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def install_db_drivers(db_type: str) -> dict:
|
|
158
|
+
"""
|
|
159
|
+
Install required database drivers for the given database type.
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
db_type (str): The type of database (e.g. 'snowflake', 'postgres')
|
|
163
|
+
|
|
164
|
+
Returns:
|
|
165
|
+
dict: A dictionary containing success status and error message if any
|
|
166
|
+
"""
|
|
167
|
+
from mito_ai.utils.utils import get_installed_packages, install_packages
|
|
168
|
+
|
|
169
|
+
installed_packages = get_installed_packages()
|
|
170
|
+
required_packages = SUPPORTED_DATABASES[db_type].get("drivers", [])
|
|
171
|
+
packages_to_install = []
|
|
172
|
+
|
|
173
|
+
for package in required_packages:
|
|
174
|
+
if package not in installed_packages:
|
|
175
|
+
packages_to_install.append(package)
|
|
176
|
+
|
|
177
|
+
if len(packages_to_install) > 0:
|
|
178
|
+
install_result = install_packages(packages_to_install)
|
|
179
|
+
if not install_result["success"]:
|
|
180
|
+
return {
|
|
181
|
+
"success": False,
|
|
182
|
+
"error": f"Failed to install {db_type} drivers: {install_result['error']}",
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
return {"success": True, "error": None}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
version: '3.8'
|
|
2
|
+
|
|
3
|
+
services:
|
|
4
|
+
db:
|
|
5
|
+
image: mcr.microsoft.com/mssql/server:2022-latest
|
|
6
|
+
container_name: mssql_test_db
|
|
7
|
+
ports:
|
|
8
|
+
- "1433:1433"
|
|
9
|
+
environment:
|
|
10
|
+
# Default user is SA (System Administrator)
|
|
11
|
+
SA_PASSWORD: "YourStrong!Passw0rd" # Password must meet complexity requirements
|
|
12
|
+
ACCEPT_EULA: "Y"
|
|
13
|
+
volumes:
|
|
14
|
+
- mssql_data:/var/opt/mssql
|
|
15
|
+
restart: unless-stopped
|
|
16
|
+
|
|
17
|
+
# Run setup.sql
|
|
18
|
+
sqltools:
|
|
19
|
+
image: mcr.microsoft.com/mssql-tools
|
|
20
|
+
depends_on:
|
|
21
|
+
- db
|
|
22
|
+
volumes:
|
|
23
|
+
- ./init:/init
|
|
24
|
+
entrypoint: >
|
|
25
|
+
/bin/bash -c "
|
|
26
|
+
echo '⏳ Waiting for SQL Server to be ready...';
|
|
27
|
+
for i in {1..30}; do
|
|
28
|
+
/opt/mssql-tools/bin/sqlcmd -S db -U SA -P 'YourStrong!Passw0rd' -Q 'SELECT 1' && break
|
|
29
|
+
sleep 2
|
|
30
|
+
done;
|
|
31
|
+
echo '🚀 Running init script...';
|
|
32
|
+
/opt/mssql-tools/bin/sqlcmd -S db -U SA -P 'YourStrong!Passw0rd' -i /init/setup.sql
|
|
33
|
+
"
|
|
34
|
+
restart: "no"
|
|
35
|
+
|
|
36
|
+
volumes:
|
|
37
|
+
mssql_data:
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
-- Copyright (c) Saga Inc.
|
|
2
|
+
-- Distributed under the terms of the GNU Affero General Public License v3.0 License.
|
|
3
|
+
|
|
4
|
+
CREATE DATABASE test_db;
|
|
5
|
+
GO
|
|
6
|
+
|
|
7
|
+
USE test_db;
|
|
8
|
+
GO
|
|
9
|
+
|
|
10
|
+
CREATE TABLE users (
|
|
11
|
+
id INT IDENTITY(1,1) PRIMARY KEY,
|
|
12
|
+
name NVARCHAR(255) NOT NULL,
|
|
13
|
+
email NVARCHAR(255) UNIQUE NOT NULL
|
|
14
|
+
);
|
|
15
|
+
GO
|
|
16
|
+
|
|
17
|
+
INSERT INTO users (name, email) VALUES
|
|
18
|
+
('Alice', 'alice@example.com'),
|
|
19
|
+
('Bob', 'bob@example.com'),
|
|
20
|
+
('Charlie', 'charlie@example.com');
|
|
21
|
+
GO
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
services:
|
|
2
|
+
db:
|
|
3
|
+
image: mysql:8
|
|
4
|
+
container_name: mysql_test_db
|
|
5
|
+
ports:
|
|
6
|
+
- "3306:3306" # Maps container port 3306 to host port 3306
|
|
7
|
+
environment:
|
|
8
|
+
MYSQL_ROOT_PASSWORD: root_pass
|
|
9
|
+
MYSQL_USER: test_user
|
|
10
|
+
MYSQL_PASSWORD: test_pass
|
|
11
|
+
MYSQL_DATABASE: test_db
|
|
12
|
+
volumes:
|
|
13
|
+
- mysql_data:/var/lib/mysql # persistent data volume
|
|
14
|
+
- ./init:/docker-entrypoint-initdb.d # SQL files to run on init
|
|
15
|
+
restart: unless-stopped
|
|
16
|
+
|
|
17
|
+
volumes:
|
|
18
|
+
mysql_data:
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
-- Copyright (c) Saga Inc.
|
|
2
|
+
-- Distributed under the terms of the GNU Affero General Public License v3.0 License.
|
|
3
|
+
|
|
4
|
+
CREATE TABLE users (
|
|
5
|
+
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
6
|
+
name VARCHAR(255) NOT NULL,
|
|
7
|
+
email VARCHAR(255) UNIQUE NOT NULL
|
|
8
|
+
);
|
|
9
|
+
|
|
10
|
+
INSERT INTO users (name, email) VALUES
|
|
11
|
+
('Alice', 'alice@example.com'),
|
|
12
|
+
('Bob', 'bob@example.com'),
|
|
13
|
+
('Charlie', 'charlie@example.com');
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
services:
|
|
2
|
+
db:
|
|
3
|
+
image: gvenzl/oracle-xe:21.3.0-slim
|
|
4
|
+
container_name: oracle_test_db
|
|
5
|
+
ports:
|
|
6
|
+
- "1521:1521"
|
|
7
|
+
environment:
|
|
8
|
+
ORACLE_PASSWORD: test_pass
|
|
9
|
+
APP_USER: test_user
|
|
10
|
+
APP_USER_PASSWORD: test_pass
|
|
11
|
+
volumes:
|
|
12
|
+
- oracle_data:/opt/oracle/oradata
|
|
13
|
+
- ./init:/container-entrypoint-startdb.d
|
|
14
|
+
restart: unless-stopped
|
|
15
|
+
|
|
16
|
+
volumes:
|
|
17
|
+
oracle_data:
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
-- Copyright (c) Saga Inc.
|
|
2
|
+
-- Distributed under the terms of the GNU Affero General Public License v3.0 License.
|
|
3
|
+
|
|
4
|
+
connect test_user/test_pass@xepdb1;
|
|
5
|
+
|
|
6
|
+
create sequence hibernate_sequence start with 1 increment by 1;
|
|
7
|
+
|
|
8
|
+
create table department (id integer not null, name varchar(255), primary key (id));
|
|
9
|
+
|
|
10
|
+
create table employee (id integer not null, email varchar(255), first_name varchar(255), gender varchar(255), last_name varchar(255), salary numeric(19,2), department_id integer, primary key (id));
|
|
11
|
+
|
|
12
|
+
insert into department (id, name) values (1, 'IT');
|
|
13
|
+
insert into department (id, name) values (2, 'HR');
|
|
14
|
+
insert into department (id, name) values (3, 'Finance');
|
|
15
|
+
|
|
16
|
+
insert into employee (id, email, first_name, gender, last_name, salary, department_id) values (1, 'john.doe@example.com', 'John', 'M', 'Doe', 50000, 1);
|
|
17
|
+
insert into employee (id, email, first_name, gender, last_name, salary, department_id) values (2, 'jane.smith@example.com', 'Jane', 'F', 'Smith', 55000, 2);
|
|
18
|
+
insert into employee (id, email, first_name, gender, last_name, salary, department_id) values (3, 'jim.beam@example.com', 'Jim', 'M', 'Beam', 60000, 3);
|
|
19
|
+
|
|
20
|
+
COMMIT;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
services:
|
|
2
|
+
db:
|
|
3
|
+
image: postgres:17
|
|
4
|
+
container_name: pg_test_db
|
|
5
|
+
ports:
|
|
6
|
+
- "5432:5432" # Maps container port 5432 to host port 5432
|
|
7
|
+
environment:
|
|
8
|
+
POSTGRES_USER: test_user
|
|
9
|
+
POSTGRES_PASSWORD: test_pass
|
|
10
|
+
POSTGRES_DB: test_db
|
|
11
|
+
volumes:
|
|
12
|
+
- pg_data:/var/lib/postgresql/data # persistent data volume
|
|
13
|
+
- ./init:/docker-entrypoint-initdb.d # SQL files to run on init
|
|
14
|
+
restart: unless-stopped
|
|
15
|
+
|
|
16
|
+
volumes:
|
|
17
|
+
pg_data:
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
-- Copyright (c) Saga Inc.
|
|
2
|
+
-- Distributed under the terms of the GNU Affero General Public License v3.0 License.
|
|
3
|
+
|
|
4
|
+
CREATE TABLE users (
|
|
5
|
+
id SERIAL PRIMARY KEY,
|
|
6
|
+
name TEXT NOT NULL,
|
|
7
|
+
email TEXT UNIQUE NOT NULL
|
|
8
|
+
);
|
|
9
|
+
|
|
10
|
+
INSERT INTO users (name, email) VALUES
|
|
11
|
+
('Alice', 'alice@example.com'),
|
|
12
|
+
('Bob', 'bob@example.com'),
|
|
13
|
+
('Charlie', 'charlie@example.com');
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# coding: utf-8
|
|
3
|
+
|
|
4
|
+
# Copyright (c) Saga Inc.
|
|
5
|
+
# Distributed under the terms of the The Mito Enterprise license.
|
|
6
|
+
|
|
7
|
+
from mito_ai.utils.version_utils import is_enterprise, is_mitosheet_private
|
|
8
|
+
from mito_ai.constants import AZURE_OPENAI_API_KEY, AZURE_OPENAI_ENDPOINT, AZURE_OPENAI_API_VERSION, AZURE_OPENAI_MODEL
|
|
9
|
+
|
|
10
|
+
def is_azure_openai_configured() -> bool:
|
|
11
|
+
"""
|
|
12
|
+
Azure OpenAI is only supported for Mito Enterprise users
|
|
13
|
+
"""
|
|
14
|
+
is_allowed_to_use_azure = is_enterprise() or is_mitosheet_private()
|
|
15
|
+
return all([is_allowed_to_use_azure, AZURE_OPENAI_API_KEY, AZURE_OPENAI_ENDPOINT, AZURE_OPENAI_API_VERSION, AZURE_OPENAI_MODEL])
|