signalpilot-ai-internal 0.7.6__py3-none-any.whl → 0.10.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- signalpilot_ai_internal/__init__.py +1 -0
- signalpilot_ai_internal/_version.py +1 -1
- signalpilot_ai_internal/databricks_schema_service.py +902 -0
- signalpilot_ai_internal/file_scanner_service.py +2 -1
- signalpilot_ai_internal/handlers.py +72 -2
- signalpilot_ai_internal/mcp_handlers.py +508 -0
- signalpilot_ai_internal/mcp_server_manager.py +298 -0
- signalpilot_ai_internal/mcp_service.py +1303 -0
- signalpilot_ai_internal/schema_search_config.yml +8 -8
- signalpilot_ai_internal/schema_search_service.py +62 -1
- signalpilot_ai_internal/test_dbt_mcp_server.py +180 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json +5 -3
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig +4 -2
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/plugin.json +7 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/110.224e83db03814fd03955.js +7 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.972abe1d2d66f083f9cc.js +1 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.ad22ccddd74ee306fb56.js +1 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/57.c4232851631fb2e7e59a.js +1 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/726.318e4e791edb63cc788f.js +1 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.2d75de1a8d2c3131a8db.js +1 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/801.ca9e114a30896b669a3c.js +1 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/880.d9914229e4f120e7e9e4.js +1 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/936.d80de1e4da5b520d2f3b.js +1 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.b63c429ca81e743b403c.js +1 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/third-party-licenses.json +38 -20
- {signalpilot_ai_internal-0.7.6.dist-info → signalpilot_ai_internal-0.10.22.dist-info}/METADATA +3 -2
- signalpilot_ai_internal-0.10.22.dist-info/RECORD +56 -0
- {signalpilot_ai_internal-0.7.6.dist-info → signalpilot_ai_internal-0.10.22.dist-info}/WHEEL +1 -1
- signalpilot_ai_internal-0.7.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.72484b768a04f89bd3dd.js +0 -1
- signalpilot_ai_internal-0.7.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.9b4f05a99f5003f82094.js +0 -1
- signalpilot_ai_internal-0.7.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/490.b4ccb9601c8112407c5d.js +0 -1
- signalpilot_ai_internal-0.7.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.3aa564fc148b37d1d719.js +0 -1
- signalpilot_ai_internal-0.7.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/839.ed04fa601a43e8dd24d1.js +0 -1
- signalpilot_ai_internal-0.7.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/898.4e9edb7f224152c1dcb4.js +0 -2
- signalpilot_ai_internal-0.7.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/898.4e9edb7f224152c1dcb4.js.LICENSE.txt +0 -1
- signalpilot_ai_internal-0.7.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.ee8951353b00c13b8070.js +0 -1
- signalpilot_ai_internal-0.7.6.dist-info/RECORD +0 -49
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/etc/jupyter/jupyter_server_config.d/signalpilot_ai.json +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/install.json +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.e2dadf63dc64d7b5f1ee.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.328403b5545f268b95c6.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.726e1da31a50868cb297.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.dbec4c2dc12e7b050dcc.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.fa432bdb7fb6b1c95ad6.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.37e271d7a80336daabe2.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.73c7a9290b7d35a8b9c1.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.b58fc0093d080b8ee61c.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js.LICENSE.txt +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.9720593ee20b768da3ca.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.8e6edc9a965bdd578ca7.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/741.dc49867fafb03ea2ba4d.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/742.91e7b516c8699eea3373.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/888.34054db17bcf6e87ec95.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/style.js +0 -0
- {signalpilot_ai_internal-0.7.6.dist-info → signalpilot_ai_internal-0.10.22.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,22 +1,22 @@
|
|
|
1
1
|
logging:
|
|
2
|
-
level:
|
|
2
|
+
level: 'WARNING'
|
|
3
3
|
|
|
4
4
|
embedding:
|
|
5
|
-
location:
|
|
6
|
-
model:
|
|
7
|
-
metric:
|
|
5
|
+
location: 'memory'
|
|
6
|
+
model: 'multi-qa-MiniLM-L6-cos-v1'
|
|
7
|
+
metric: 'cosine'
|
|
8
8
|
batch_size: 32
|
|
9
9
|
show_progress: false
|
|
10
|
-
cache_dir:
|
|
10
|
+
cache_dir: '/tmp/.schema_search_cache'
|
|
11
11
|
|
|
12
12
|
chunking:
|
|
13
|
-
strategy:
|
|
13
|
+
strategy: 'raw'
|
|
14
14
|
max_tokens: 256
|
|
15
15
|
overlap_tokens: 50
|
|
16
|
-
model:
|
|
16
|
+
model: 'gpt-4o-mini'
|
|
17
17
|
|
|
18
18
|
search:
|
|
19
|
-
strategy:
|
|
19
|
+
strategy: 'bm25'
|
|
20
20
|
initial_top_k: 20
|
|
21
21
|
rerank_top_k: 5
|
|
22
22
|
semantic_weight: 0.67
|
|
@@ -21,10 +21,62 @@ class SchemaSearchHandler(APIHandler):
|
|
|
21
21
|
for key, value in os.environ.items():
|
|
22
22
|
if key.endswith("_CONNECTION_JSON") and isinstance(value, str) and value.strip().startswith("{"):
|
|
23
23
|
config = json.loads(value)
|
|
24
|
+
|
|
25
|
+
# Special handling for Databricks
|
|
26
|
+
if config.get("type") == "databricks":
|
|
27
|
+
return self._build_databricks_url(config)
|
|
28
|
+
|
|
24
29
|
url = config.get("connectionUrl")
|
|
25
30
|
if url:
|
|
26
31
|
return url
|
|
27
32
|
return os.environ.get("DB_URL")
|
|
33
|
+
|
|
34
|
+
def _build_databricks_url(self, config: dict) -> Optional[str]:
|
|
35
|
+
"""Build Databricks URL in the format: databricks://token:{token}@{host}:443/{catalog}?http_path={http_path}"""
|
|
36
|
+
import re
|
|
37
|
+
|
|
38
|
+
# Extract host from connectionUrl
|
|
39
|
+
connection_url = config.get('connectionUrl', '')
|
|
40
|
+
if not connection_url:
|
|
41
|
+
return None
|
|
42
|
+
|
|
43
|
+
url_match = re.match(r'https?://([^/]+)', connection_url)
|
|
44
|
+
if not url_match:
|
|
45
|
+
return None
|
|
46
|
+
|
|
47
|
+
host = url_match.group(1)
|
|
48
|
+
|
|
49
|
+
# Get access token based on auth type
|
|
50
|
+
auth_type = config.get('authType', 'pat')
|
|
51
|
+
if auth_type == 'pat':
|
|
52
|
+
token = config.get('accessToken', '')
|
|
53
|
+
else:
|
|
54
|
+
# For service principal, we would need to get OAuth token
|
|
55
|
+
# For now, return None to fallback to other methods
|
|
56
|
+
return None
|
|
57
|
+
|
|
58
|
+
if not token:
|
|
59
|
+
return None
|
|
60
|
+
|
|
61
|
+
# Get HTTP path
|
|
62
|
+
http_path = config.get('warehouseHttpPath') or config.get('httpPath', '')
|
|
63
|
+
if not http_path:
|
|
64
|
+
warehouse_id = config.get('warehouseId')
|
|
65
|
+
if warehouse_id:
|
|
66
|
+
http_path = f"/sql/1.0/warehouses/{warehouse_id}"
|
|
67
|
+
else:
|
|
68
|
+
return None
|
|
69
|
+
|
|
70
|
+
# Get catalog (optional)
|
|
71
|
+
catalog = config.get('catalog', '')
|
|
72
|
+
|
|
73
|
+
# Build the URL
|
|
74
|
+
if catalog:
|
|
75
|
+
db_url = f"databricks://token:{token}@{host}:443/{catalog}?http_path={http_path}"
|
|
76
|
+
else:
|
|
77
|
+
db_url = f"databricks://token:{token}@{host}:443?http_path={http_path}"
|
|
78
|
+
|
|
79
|
+
return db_url
|
|
28
80
|
|
|
29
81
|
@tornado.web.authenticated
|
|
30
82
|
async def post(self):
|
|
@@ -60,11 +112,13 @@ class SchemaSearchHandler(APIHandler):
|
|
|
60
112
|
|
|
61
113
|
if db_url_lower.startswith("snowflake://"):
|
|
62
114
|
self._ensure_snowflake_dependencies()
|
|
115
|
+
elif db_url_lower.startswith("databricks://"):
|
|
116
|
+
self._ensure_databricks_dependencies()
|
|
63
117
|
elif db_url_lower.startswith("postgresql") or db_url_lower.startswith("postgres") or db_url_lower.startswith("mysql+pymysql"):
|
|
64
118
|
pass
|
|
65
119
|
else:
|
|
66
120
|
self.set_status(400)
|
|
67
|
-
self.finish(json.dumps({"error": "Schema search currently supports PostgreSQL, MySQL, or
|
|
121
|
+
self.finish(json.dumps({"error": "Schema search currently supports PostgreSQL, MySQL, Snowflake, or Databricks connections"}))
|
|
68
122
|
return
|
|
69
123
|
|
|
70
124
|
engine = None
|
|
@@ -107,3 +161,10 @@ class SchemaSearchHandler(APIHandler):
|
|
|
107
161
|
except ImportError:
|
|
108
162
|
self._install_package("snowflake-sqlalchemy")
|
|
109
163
|
import snowflake.sqlalchemy # type: ignore # noqa: F401
|
|
164
|
+
|
|
165
|
+
def _ensure_databricks_dependencies(self) -> None:
|
|
166
|
+
try:
|
|
167
|
+
import databricks.sqlalchemy # type: ignore # noqa: F401
|
|
168
|
+
except ImportError:
|
|
169
|
+
self._install_package("databricks-sqlalchemy")
|
|
170
|
+
import databricks.sqlalchemy # type: ignore # noqa: F401
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Test script to run the dbt-mcp server with enhanced visibility.
|
|
3
|
+
|
|
4
|
+
This script provides detailed logging and output to help debug
|
|
5
|
+
and understand what's happening when the MCP server runs.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import logging
|
|
10
|
+
import os
|
|
11
|
+
import sys
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
|
|
14
|
+
# Add the dbt-mcp src directory to Python path
|
|
15
|
+
dbt_mcp_src = Path(__file__).parent / "dbt-mcp" / "src"
|
|
16
|
+
sys.path.insert(0, str(dbt_mcp_src))
|
|
17
|
+
|
|
18
|
+
from dbt_mcp.config.config import load_config
|
|
19
|
+
from dbt_mcp.config.transport import validate_transport
|
|
20
|
+
from dbt_mcp.mcp.server import create_dbt_mcp
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def setup_logging():
|
|
24
|
+
"""Configure detailed logging for debugging."""
|
|
25
|
+
logging.basicConfig(
|
|
26
|
+
level=logging.DEBUG,
|
|
27
|
+
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
|
28
|
+
handlers=[
|
|
29
|
+
logging.StreamHandler(sys.stdout),
|
|
30
|
+
logging.FileHandler('dbt_mcp_test.log', mode='w')
|
|
31
|
+
]
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
# Set specific loggers to DEBUG
|
|
35
|
+
logging.getLogger('dbt_mcp').setLevel(logging.DEBUG)
|
|
36
|
+
logging.getLogger('mcp').setLevel(logging.DEBUG)
|
|
37
|
+
|
|
38
|
+
return logging.getLogger(__name__)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
async def test_server_creation():
|
|
42
|
+
"""Test creating the dbt-mcp server."""
|
|
43
|
+
logger = setup_logging()
|
|
44
|
+
|
|
45
|
+
logger.info("=" * 80)
|
|
46
|
+
logger.info("Starting dbt-mcp Server Test")
|
|
47
|
+
logger.info("=" * 80)
|
|
48
|
+
|
|
49
|
+
try:
|
|
50
|
+
# Load configuration
|
|
51
|
+
logger.info("\n--- Loading Configuration ---")
|
|
52
|
+
logger.info(f"Current working directory: {os.getcwd()}")
|
|
53
|
+
logger.info(f"Environment variables (DBT related):")
|
|
54
|
+
for key, value in os.environ.items():
|
|
55
|
+
if 'DBT' in key.upper() or 'MCP' in key.upper():
|
|
56
|
+
logger.info(f" {key}: {value}")
|
|
57
|
+
|
|
58
|
+
config = load_config()
|
|
59
|
+
logger.info(f"Config loaded successfully")
|
|
60
|
+
logger.info(f" Enabled toolsets: {config.enabled_toolsets}")
|
|
61
|
+
logger.info(f" Disabled toolsets: {config.disabled_toolsets}")
|
|
62
|
+
logger.info(f" Enabled tools: {config.enable_tools}")
|
|
63
|
+
logger.info(f" Disabled tools: {config.disable_tools}")
|
|
64
|
+
|
|
65
|
+
# Create server
|
|
66
|
+
logger.info("\n--- Creating MCP Server ---")
|
|
67
|
+
server = await create_dbt_mcp(config)
|
|
68
|
+
logger.info(f"Server created: {server}")
|
|
69
|
+
logger.info(f"Server name: {server.name}")
|
|
70
|
+
|
|
71
|
+
# List registered tools
|
|
72
|
+
logger.info("\n--- Registered Tools ---")
|
|
73
|
+
if hasattr(server, '_tool_manager') and hasattr(server._tool_manager, 'tools'):
|
|
74
|
+
tools = server._tool_manager.tools
|
|
75
|
+
logger.info(f"Total tools registered: {len(tools)}")
|
|
76
|
+
for tool_name in sorted(tools.keys()):
|
|
77
|
+
logger.info(f" - {tool_name}")
|
|
78
|
+
else:
|
|
79
|
+
logger.warning("Could not access tool manager to list tools")
|
|
80
|
+
|
|
81
|
+
# List registered prompts (if any)
|
|
82
|
+
logger.info("\n--- Registered Prompts ---")
|
|
83
|
+
if hasattr(server, '_prompt_manager') and hasattr(server._prompt_manager, 'prompts'):
|
|
84
|
+
prompts = server._prompt_manager.prompts
|
|
85
|
+
logger.info(f"Total prompts registered: {len(prompts)}")
|
|
86
|
+
for prompt_name in sorted(prompts.keys()):
|
|
87
|
+
logger.info(f" - {prompt_name}")
|
|
88
|
+
else:
|
|
89
|
+
logger.info("No prompts registered")
|
|
90
|
+
|
|
91
|
+
# Get transport type
|
|
92
|
+
logger.info("\n--- Transport Configuration ---")
|
|
93
|
+
transport_type = os.environ.get("MCP_TRANSPORT", "stdio")
|
|
94
|
+
transport = validate_transport(transport_type)
|
|
95
|
+
logger.info(f"Transport type: {transport}")
|
|
96
|
+
|
|
97
|
+
logger.info("\n" + "=" * 80)
|
|
98
|
+
logger.info("Server initialization complete!")
|
|
99
|
+
logger.info("=" * 80)
|
|
100
|
+
logger.info("\nTo run the server, uncomment the server.run() line below")
|
|
101
|
+
logger.info("Note: server.run() will block and wait for MCP client connections")
|
|
102
|
+
|
|
103
|
+
# Uncomment the line below to actually run the server
|
|
104
|
+
# logger.info("\n--- Running Server (blocking) ---")
|
|
105
|
+
# server.run(transport=transport)
|
|
106
|
+
|
|
107
|
+
return server
|
|
108
|
+
|
|
109
|
+
except Exception as e:
|
|
110
|
+
logger.error(f"\n!!! Error occurred: {e}", exc_info=True)
|
|
111
|
+
raise
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
async def test_tool_call():
|
|
115
|
+
"""Test calling a specific tool if available."""
|
|
116
|
+
logger = logging.getLogger(__name__)
|
|
117
|
+
|
|
118
|
+
try:
|
|
119
|
+
logger.info("\n" + "=" * 80)
|
|
120
|
+
logger.info("Testing Tool Call")
|
|
121
|
+
logger.info("=" * 80)
|
|
122
|
+
|
|
123
|
+
config = load_config()
|
|
124
|
+
server = await create_dbt_mcp(config)
|
|
125
|
+
|
|
126
|
+
# Try to call a simple tool (e.g., list or search)
|
|
127
|
+
# This is just an example - adjust based on available tools
|
|
128
|
+
if hasattr(server, '_tool_manager') and hasattr(server._tool_manager, 'tools'):
|
|
129
|
+
tools = server._tool_manager.tools
|
|
130
|
+
|
|
131
|
+
# Look for a simple tool to test
|
|
132
|
+
test_tools = ['search', 'list', 'get_all_models']
|
|
133
|
+
for tool_name in test_tools:
|
|
134
|
+
if tool_name in tools:
|
|
135
|
+
logger.info(f"\nTesting tool: {tool_name}")
|
|
136
|
+
try:
|
|
137
|
+
# Call with minimal arguments
|
|
138
|
+
result = await server.call_tool(tool_name, {})
|
|
139
|
+
logger.info(f"Tool call successful!")
|
|
140
|
+
logger.info(f"Result type: {type(result)}")
|
|
141
|
+
logger.info(f"Result: {result}")
|
|
142
|
+
break
|
|
143
|
+
except Exception as tool_error:
|
|
144
|
+
logger.warning(f"Tool call failed (expected if missing config): {tool_error}")
|
|
145
|
+
else:
|
|
146
|
+
logger.info("No testable tools found in the expected list")
|
|
147
|
+
|
|
148
|
+
except Exception as e:
|
|
149
|
+
logger.error(f"Error in tool test: {e}", exc_info=True)
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def main():
|
|
153
|
+
"""Main entry point."""
|
|
154
|
+
logger = setup_logging()
|
|
155
|
+
|
|
156
|
+
try:
|
|
157
|
+
# Test 1: Create server and inspect
|
|
158
|
+
logger.info("\n### TEST 1: Server Creation and Inspection ###\n")
|
|
159
|
+
server = asyncio.run(test_server_creation())
|
|
160
|
+
|
|
161
|
+
# Test 2: Try calling a tool (optional)
|
|
162
|
+
logger.info("\n\n### TEST 2: Tool Call Test (Optional) ###\n")
|
|
163
|
+
try:
|
|
164
|
+
asyncio.run(test_tool_call())
|
|
165
|
+
except Exception as e:
|
|
166
|
+
logger.info(f"Tool test skipped or failed: {e}")
|
|
167
|
+
|
|
168
|
+
logger.info("\n\n" + "=" * 80)
|
|
169
|
+
logger.info("All tests complete! Check dbt_mcp_test.log for full output.")
|
|
170
|
+
logger.info("=" * 80)
|
|
171
|
+
|
|
172
|
+
except KeyboardInterrupt:
|
|
173
|
+
logger.info("\n\nInterrupted by user")
|
|
174
|
+
except Exception as e:
|
|
175
|
+
logger.error(f"\n\nFatal error: {e}", exc_info=True)
|
|
176
|
+
sys.exit(1)
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
if __name__ == "__main__":
|
|
180
|
+
main()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "signalpilot-ai-internal",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.10.22",
|
|
4
4
|
"description": "SignalPilot Agent - Your Jupyter Notebook Assistant",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"jupyter",
|
|
@@ -24,13 +24,14 @@
|
|
|
24
24
|
"style": "style/index.css",
|
|
25
25
|
"scripts": {
|
|
26
26
|
"build": "jlpm prebuild && jlpm build:lib && jlpm build:labextension:dev",
|
|
27
|
+
"build:no_tracking": "cross-env DISABLE_TRACKING=true jlpm prebuild && cross-env DISABLE_TRACKING=true jlpm build:lib && cross-env DISABLE_TRACKING=true jlpm build:labextension:dev",
|
|
27
28
|
"build:prod": "jlpm prebuild && jlpm clean && jlpm build:lib:prod && jlpm build:labextension",
|
|
28
29
|
"build:labextension": "jlpm prebuild && jupyter labextension build .",
|
|
29
30
|
"build:labextension:dev": "jlpm prebuild && jupyter labextension build --development True .",
|
|
30
31
|
"build:lib": "jlpm prebuild && tsc --sourceMap",
|
|
31
32
|
"build:lib:prod": "jlpm prebuild && tsc && jlpm build:remove-console",
|
|
32
33
|
"build:remove-console": "cross-env NODE_ENV=production babel lib --out-dir lib --extensions .js",
|
|
33
|
-
"prebuild": "mkdir -p lib/Config/prompts && cp src/Config/prompts/*.md lib/Config/prompts",
|
|
34
|
+
"prebuild": "mkdir -p lib/Config/prompts && cp src/Config/prompts/*.md lib/Config/prompts && node scripts/generate-tracking-config.js",
|
|
34
35
|
"clean": "jlpm clean:lib",
|
|
35
36
|
"clean:lib": "rimraf lib tsconfig.tsbuildinfo",
|
|
36
37
|
"clean:lintcache": "rimraf .eslintcache .stylelintcache",
|
|
@@ -79,6 +80,7 @@
|
|
|
79
80
|
"dompurify": "^3.2.5",
|
|
80
81
|
"driver.js": "^1.3.6",
|
|
81
82
|
"fuse.js": "^7.1.0",
|
|
83
|
+
"highlight.js": "^11.11.1",
|
|
82
84
|
"jwt-decode": "^4.0.0",
|
|
83
85
|
"marked": "^15.0.11",
|
|
84
86
|
"partial-json": "^0.1.7",
|
|
@@ -134,7 +136,7 @@
|
|
|
134
136
|
"outputDir": "signalpilot_ai_internal/labextension",
|
|
135
137
|
"schemaDir": "schema",
|
|
136
138
|
"_build": {
|
|
137
|
-
"load": "static/remoteEntry.
|
|
139
|
+
"load": "static/remoteEntry.b63c429ca81e743b403c.js",
|
|
138
140
|
"extension": "./extension",
|
|
139
141
|
"style": "./style"
|
|
140
142
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "signalpilot-ai-internal",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.10.22",
|
|
4
4
|
"description": "SignalPilot Agent - Your Jupyter Notebook Assistant",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"jupyter",
|
|
@@ -24,13 +24,14 @@
|
|
|
24
24
|
"style": "style/index.css",
|
|
25
25
|
"scripts": {
|
|
26
26
|
"build": "jlpm prebuild && jlpm build:lib && jlpm build:labextension:dev",
|
|
27
|
+
"build:no_tracking": "cross-env DISABLE_TRACKING=true jlpm prebuild && cross-env DISABLE_TRACKING=true jlpm build:lib && cross-env DISABLE_TRACKING=true jlpm build:labextension:dev",
|
|
27
28
|
"build:prod": "jlpm prebuild && jlpm clean && jlpm build:lib:prod && jlpm build:labextension",
|
|
28
29
|
"build:labextension": "jlpm prebuild && jupyter labextension build .",
|
|
29
30
|
"build:labextension:dev": "jlpm prebuild && jupyter labextension build --development True .",
|
|
30
31
|
"build:lib": "jlpm prebuild && tsc --sourceMap",
|
|
31
32
|
"build:lib:prod": "jlpm prebuild && tsc && jlpm build:remove-console",
|
|
32
33
|
"build:remove-console": "cross-env NODE_ENV=production babel lib --out-dir lib --extensions .js",
|
|
33
|
-
"prebuild": "mkdir -p lib/Config/prompts && cp src/Config/prompts/*.md lib/Config/prompts",
|
|
34
|
+
"prebuild": "mkdir -p lib/Config/prompts && cp src/Config/prompts/*.md lib/Config/prompts && node scripts/generate-tracking-config.js",
|
|
34
35
|
"clean": "jlpm clean:lib",
|
|
35
36
|
"clean:lib": "rimraf lib tsconfig.tsbuildinfo",
|
|
36
37
|
"clean:lintcache": "rimraf .eslintcache .stylelintcache",
|
|
@@ -79,6 +80,7 @@
|
|
|
79
80
|
"dompurify": "^3.2.5",
|
|
80
81
|
"driver.js": "^1.3.6",
|
|
81
82
|
"fuse.js": "^7.1.0",
|
|
83
|
+
"highlight.js": "^11.11.1",
|
|
82
84
|
"jwt-decode": "^4.0.0",
|
|
83
85
|
"marked": "^15.0.11",
|
|
84
86
|
"partial-json": "^0.1.7",
|
|
@@ -1,5 +1,11 @@
|
|
|
1
1
|
{
|
|
2
|
-
"jupyter.lab.shortcuts": [
|
|
2
|
+
"jupyter.lab.shortcuts": [
|
|
3
|
+
{
|
|
4
|
+
"command": "signalpilot-ai-internal:inline-edit",
|
|
5
|
+
"keys": ["Accel K"],
|
|
6
|
+
"selector": ".jp-Notebook .jp-Cell.jp-mod-active"
|
|
7
|
+
}
|
|
8
|
+
],
|
|
3
9
|
"jupyter.lab.setting-icon": "ui-components:settings",
|
|
4
10
|
"jupyter.lab.setting-icon-label": "Sage AI Settings",
|
|
5
11
|
"jupyter.lab.toolbars": {
|