altimate-datapilot-cli 0.0.18__py3-none-any.whl → 0.0.20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {altimate_datapilot_cli-0.0.18.dist-info → altimate_datapilot_cli-0.0.20.dist-info}/METADATA +4 -1
- {altimate_datapilot_cli-0.0.18.dist-info → altimate_datapilot_cli-0.0.20.dist-info}/RECORD +15 -10
- datapilot/__init__.py +1 -1
- datapilot/cli/main.py +86 -1
- datapilot/core/knowledge/__init__.py +0 -0
- datapilot/core/knowledge/cli.py +46 -0
- datapilot/core/knowledge/server.py +84 -0
- datapilot/core/mcp_utils/__init__.py +2 -0
- datapilot/core/mcp_utils/mcp.py +176 -0
- datapilot/core/platforms/dbt/cli/cli.py +24 -13
- {altimate_datapilot_cli-0.0.18.dist-info → altimate_datapilot_cli-0.0.20.dist-info}/AUTHORS.rst +0 -0
- {altimate_datapilot_cli-0.0.18.dist-info → altimate_datapilot_cli-0.0.20.dist-info}/LICENSE +0 -0
- {altimate_datapilot_cli-0.0.18.dist-info → altimate_datapilot_cli-0.0.20.dist-info}/WHEEL +0 -0
- {altimate_datapilot_cli-0.0.18.dist-info → altimate_datapilot_cli-0.0.20.dist-info}/entry_points.txt +0 -0
- {altimate_datapilot_cli-0.0.18.dist-info → altimate_datapilot_cli-0.0.20.dist-info}/top_level.txt +0 -0
{altimate_datapilot_cli-0.0.18.dist-info → altimate_datapilot_cli-0.0.20.dist-info}/METADATA
RENAMED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: altimate-datapilot-cli
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.20
|
4
4
|
Summary: Assistant for Data Teams
|
5
5
|
Home-page: https://github.com/AltimateAI/datapilot-cli
|
6
6
|
Author: Altimate Inc
|
@@ -36,6 +36,9 @@ Requires-Dist: ruamel.yaml~=0.18.6
|
|
36
36
|
Requires-Dist: tabulate~=0.9.0
|
37
37
|
Requires-Dist: requests>=2.31
|
38
38
|
Requires-Dist: sqlglot~=25.30.0
|
39
|
+
Requires-Dist: mcp~=1.9.0
|
40
|
+
Requires-Dist: pyperclip~=1.8.2
|
41
|
+
Requires-Dist: python-dotenv~=1.0.0
|
39
42
|
|
40
43
|
========
|
41
44
|
Overview
|
@@ -1,7 +1,7 @@
|
|
1
|
-
datapilot/__init__.py,sha256=
|
1
|
+
datapilot/__init__.py,sha256=wQP0zPwrPeGkZ12uVa4mTM7oYoqji6PECSRd7QD_QXE,23
|
2
2
|
datapilot/__main__.py,sha256=I9USmeNnK-cAHb6LZfydJC0LeNSE8enieeY55wpR6uw,380
|
3
3
|
datapilot/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
4
|
-
datapilot/cli/main.py,sha256=
|
4
|
+
datapilot/cli/main.py,sha256=rRSysEUFFPXTIm9JxSL-JqLD73i1QvJMtgscnAGV6Is,3170
|
5
5
|
datapilot/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
6
6
|
datapilot/clients/altimate/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
7
7
|
datapilot/clients/altimate/client.py,sha256=00TRe_ck8UgbhFMAnrLBmug3fAWxAggNl2do5Um_4oU,4083
|
@@ -21,6 +21,11 @@ datapilot/core/insights/sql/base/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRk
|
|
21
21
|
datapilot/core/insights/sql/base/insight.py,sha256=k8UUn0qrN-QG6NCunPl7Hd6L6kd1X1eUAeGEsyl8v0o,250
|
22
22
|
datapilot/core/insights/sql/runtime/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
23
23
|
datapilot/core/insights/sql/static/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
24
|
+
datapilot/core/knowledge/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
25
|
+
datapilot/core/knowledge/cli.py,sha256=-bxf6kNVnIqaFtCTS0k7XZ07O21P5YGg9Nh3Bj5mOkQ,1415
|
26
|
+
datapilot/core/knowledge/server.py,sha256=hO6WyMUKBkukx_7JSBDvLztoFIKqEezNl4flQdtmNwQ,3191
|
27
|
+
datapilot/core/mcp_utils/__init__.py,sha256=39zN2cGQCsEjRFeExv2bX4MoqVv4H14o_SYp_QG2jHU,18
|
28
|
+
datapilot/core/mcp_utils/mcp.py,sha256=e-FbHmpEr673hxVBFHv996qnHCuqKqiCrMMy_g5tMjg,6027
|
24
29
|
datapilot/core/platforms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
25
30
|
datapilot/core/platforms/dbt/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
26
31
|
datapilot/core/platforms/dbt/constants.py,sha256=kW4PEsYWosfDjBEZ9JUhWFtMxKMSNoIn0DNPlJEJuYc,515
|
@@ -30,7 +35,7 @@ datapilot/core/platforms/dbt/factory.py,sha256=YIQtb-FQQAJsifJ3KiLjjk0WIKTHtEPTN
|
|
30
35
|
datapilot/core/platforms/dbt/formatting.py,sha256=bpfa7XmVghTq4WnGDGYC6DruwOwH8YmjFHghoo5cPD8,1638
|
31
36
|
datapilot/core/platforms/dbt/utils.py,sha256=ozFHprR6LTLXQdrGyaRoyIBTua4P1NkP8T7LGgN-9c0,18577
|
32
37
|
datapilot/core/platforms/dbt/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
33
|
-
datapilot/core/platforms/dbt/cli/cli.py,sha256=
|
38
|
+
datapilot/core/platforms/dbt/cli/cli.py,sha256=kS19eIGf6iBRZtSw0stFW108jW1wM9lSU1BzBabhg3k,7263
|
34
39
|
datapilot/core/platforms/dbt/hooks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
35
40
|
datapilot/core/platforms/dbt/hooks/executor_hook.py,sha256=gSM50vAO7C-f1rdnHogWbqc87aCXPXysZepjp5L2qzw,2966
|
36
41
|
datapilot/core/platforms/dbt/insights/__init__.py,sha256=hk7BAzCTDkY8WNV6L0v-CPn9mrsDyJJusoQxNxGyzAY,7634
|
@@ -139,10 +144,10 @@ datapilot/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
139
144
|
datapilot/utils/utils.py,sha256=MY8q6ZBJ0hkrTuH7gWMxAlEAQGrajXFMabEhtGtT7sc,11524
|
140
145
|
datapilot/utils/formatting/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
141
146
|
datapilot/utils/formatting/utils.py,sha256=rAVmIYuldvw9VvCSwG2kMTEgiT7cEconp_F1sAWVyCo,1377
|
142
|
-
altimate_datapilot_cli-0.0.
|
143
|
-
altimate_datapilot_cli-0.0.
|
144
|
-
altimate_datapilot_cli-0.0.
|
145
|
-
altimate_datapilot_cli-0.0.
|
146
|
-
altimate_datapilot_cli-0.0.
|
147
|
-
altimate_datapilot_cli-0.0.
|
148
|
-
altimate_datapilot_cli-0.0.
|
147
|
+
altimate_datapilot_cli-0.0.20.dist-info/AUTHORS.rst,sha256=S4H4zw_v3GVyz5_55jF5Gf_YNG3s5Y0VgbQaEov9PFk,50
|
148
|
+
altimate_datapilot_cli-0.0.20.dist-info/LICENSE,sha256=Mf7VqpsmU2QR5_s2Cb_ZeeMB2Q9KW7YXJENZPFZRK1k,1100
|
149
|
+
altimate_datapilot_cli-0.0.20.dist-info/METADATA,sha256=N3AphCCJ77dpuI1JkMbqQxth2lKRaxnpwjKB1THvIp0,2474
|
150
|
+
altimate_datapilot_cli-0.0.20.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
|
151
|
+
altimate_datapilot_cli-0.0.20.dist-info/entry_points.txt,sha256=0zwgKxN40RLVB5jSmlJz7IH_FBqRtpFdbrdZn-xuQIY,141
|
152
|
+
altimate_datapilot_cli-0.0.20.dist-info/top_level.txt,sha256=gAOFOdwB00vcxv74y4M1J-nQtPvEatU8-mYViEBcToo,10
|
153
|
+
altimate_datapilot_cli-0.0.20.dist-info/RECORD,,
|
datapilot/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "0.0.
|
1
|
+
__version__ = "0.0.20"
|
datapilot/cli/main.py
CHANGED
@@ -1,11 +1,96 @@
|
|
1
|
+
import json
|
2
|
+
import os
|
3
|
+
import re
|
4
|
+
from pathlib import Path
|
5
|
+
|
1
6
|
import click
|
7
|
+
from dotenv import load_dotenv
|
2
8
|
|
9
|
+
from datapilot import __version__
|
10
|
+
from datapilot.core.knowledge.cli import cli as knowledge
|
11
|
+
from datapilot.core.mcp_utils.mcp import mcp
|
3
12
|
from datapilot.core.platforms.dbt.cli.cli import dbt
|
4
13
|
|
5
14
|
|
15
|
+
def load_config_from_file():
|
16
|
+
"""Load configuration from ~/.altimate/altimate.json if it exists."""
|
17
|
+
config_path = Path.home() / ".altimate" / "altimate.json"
|
18
|
+
|
19
|
+
if not config_path.exists():
|
20
|
+
return {}
|
21
|
+
|
22
|
+
try:
|
23
|
+
with config_path.open() as f:
|
24
|
+
config = json.load(f)
|
25
|
+
return config
|
26
|
+
except (OSError, json.JSONDecodeError) as e:
|
27
|
+
click.echo(f"Warning: Failed to load config from {config_path}: {e}", err=True)
|
28
|
+
return {}
|
29
|
+
|
30
|
+
|
31
|
+
def substitute_env_vars(value):
|
32
|
+
"""Replace ${env:ENV_VARIABLE} patterns with actual environment variable values."""
|
33
|
+
if not isinstance(value, str):
|
34
|
+
return value
|
35
|
+
|
36
|
+
# Pattern to match ${env:VARIABLE_NAME}
|
37
|
+
pattern = r"\$\{env:([^}]+)\}"
|
38
|
+
|
39
|
+
def replacer(match):
|
40
|
+
env_var = match.group(1)
|
41
|
+
return os.environ.get(env_var, match.group(0))
|
42
|
+
|
43
|
+
return re.sub(pattern, replacer, value)
|
44
|
+
|
45
|
+
|
46
|
+
def process_config(config):
|
47
|
+
"""Process configuration dictionary to substitute environment variables."""
|
48
|
+
processed = {}
|
49
|
+
for key, value in config.items():
|
50
|
+
processed[key] = substitute_env_vars(value)
|
51
|
+
return processed
|
52
|
+
|
53
|
+
|
6
54
|
@click.group()
|
7
|
-
|
55
|
+
@click.version_option(version=__version__, prog_name="datapilot")
|
56
|
+
@click.option("--token", required=False, help="Your API token for authentication.", hide_input=True)
|
57
|
+
@click.option("--instance-name", required=False, help="Your tenant ID.")
|
58
|
+
@click.option("--backend-url", required=False, help="Altimate's Backend URL", default="https://api.myaltimate.com")
|
59
|
+
@click.pass_context
|
60
|
+
def datapilot(ctx, token, instance_name, backend_url):
|
8
61
|
"""Altimate CLI for DBT project management."""
|
62
|
+
# Load .env file from current directory if it exists
|
63
|
+
load_dotenv()
|
64
|
+
|
65
|
+
# Load configuration from file
|
66
|
+
file_config = load_config_from_file()
|
67
|
+
file_config = process_config(file_config)
|
68
|
+
|
69
|
+
# Map config file keys to CLI option names
|
70
|
+
config_mapping = {"altimateApiKey": "token", "altimateInstanceName": "instance_name", "altimateUrl": "backend_url"}
|
71
|
+
|
72
|
+
# Store common options in context, with CLI args taking precedence
|
73
|
+
ctx.ensure_object(dict)
|
74
|
+
|
75
|
+
# Apply file config first
|
76
|
+
for file_key, cli_key in config_mapping.items():
|
77
|
+
if file_key in file_config:
|
78
|
+
ctx.obj[cli_key] = file_config[file_key]
|
79
|
+
|
80
|
+
# Override with CLI arguments if provided
|
81
|
+
if token is not None:
|
82
|
+
ctx.obj["token"] = token
|
83
|
+
if instance_name is not None:
|
84
|
+
ctx.obj["instance_name"] = instance_name
|
85
|
+
if backend_url != "https://api.myaltimate.com": # Only override if not default
|
86
|
+
ctx.obj["backend_url"] = backend_url
|
87
|
+
|
88
|
+
# Set defaults if nothing was provided
|
89
|
+
ctx.obj.setdefault("token", None)
|
90
|
+
ctx.obj.setdefault("instance_name", None)
|
91
|
+
ctx.obj.setdefault("backend_url", "https://api.myaltimate.com")
|
9
92
|
|
10
93
|
|
11
94
|
datapilot.add_command(dbt)
|
95
|
+
datapilot.add_command(mcp)
|
96
|
+
datapilot.add_command(knowledge)
|
File without changes
|
@@ -0,0 +1,46 @@
|
|
1
|
+
from http.server import HTTPServer
|
2
|
+
|
3
|
+
import click
|
4
|
+
|
5
|
+
from .server import KnowledgeBaseHandler
|
6
|
+
|
7
|
+
|
8
|
+
@click.group(name="knowledge")
|
9
|
+
def cli():
|
10
|
+
"""knowledge specific commands."""
|
11
|
+
|
12
|
+
|
13
|
+
@cli.command()
|
14
|
+
@click.option("--port", default=4000, help="Port to run the server on")
|
15
|
+
@click.pass_context
|
16
|
+
def serve(ctx, port):
|
17
|
+
"""Serve knowledge bases via HTTP server."""
|
18
|
+
# Get configuration from parent context
|
19
|
+
token = ctx.parent.obj.get("token")
|
20
|
+
instance_name = ctx.parent.obj.get("instance_name")
|
21
|
+
backend_url = ctx.parent.obj.get("backend_url")
|
22
|
+
|
23
|
+
if not token or not instance_name:
|
24
|
+
click.echo(
|
25
|
+
"Error: API token and instance name are required. Use --token and --instance-name options or set them in config.", err=True
|
26
|
+
)
|
27
|
+
ctx.exit(1)
|
28
|
+
|
29
|
+
# Set context data for the handler
|
30
|
+
KnowledgeBaseHandler.token = token
|
31
|
+
KnowledgeBaseHandler.instance_name = instance_name
|
32
|
+
KnowledgeBaseHandler.backend_url = backend_url
|
33
|
+
|
34
|
+
server_address = ("", port)
|
35
|
+
httpd = HTTPServer(server_address, KnowledgeBaseHandler)
|
36
|
+
|
37
|
+
click.echo(f"Starting knowledge base server on port {port}...")
|
38
|
+
click.echo(f"Backend URL: {backend_url}")
|
39
|
+
click.echo(f"Instance: {instance_name}")
|
40
|
+
click.echo(f"Server running at http://localhost:{port}")
|
41
|
+
|
42
|
+
try:
|
43
|
+
httpd.serve_forever()
|
44
|
+
except KeyboardInterrupt:
|
45
|
+
click.echo("\nShutting down server...")
|
46
|
+
httpd.shutdown()
|
@@ -0,0 +1,84 @@
|
|
1
|
+
import json
|
2
|
+
import re
|
3
|
+
from http.server import BaseHTTPRequestHandler
|
4
|
+
from urllib.error import HTTPError
|
5
|
+
from urllib.error import URLError
|
6
|
+
from urllib.parse import urlparse
|
7
|
+
from urllib.request import Request
|
8
|
+
from urllib.request import urlopen
|
9
|
+
|
10
|
+
import click
|
11
|
+
|
12
|
+
|
13
|
+
class KnowledgeBaseHandler(BaseHTTPRequestHandler):
|
14
|
+
"""HTTP request handler for serving knowledge bases and health checks."""
|
15
|
+
|
16
|
+
token: str = ""
|
17
|
+
instance_name: str = ""
|
18
|
+
backend_url: str = ""
|
19
|
+
|
20
|
+
def do_GET(self):
|
21
|
+
"""Handle GET requests."""
|
22
|
+
path = urlparse(self.path).path
|
23
|
+
|
24
|
+
# Match /knowledge_bases/{uuid} pattern
|
25
|
+
match = re.match(r"^/kb/([a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12})$", path)
|
26
|
+
|
27
|
+
if match:
|
28
|
+
public_id = match.group(1)
|
29
|
+
self.handle_knowledge_base(public_id)
|
30
|
+
elif path == "/health":
|
31
|
+
self.handle_health()
|
32
|
+
else:
|
33
|
+
self.send_error(404, "Not Found")
|
34
|
+
|
35
|
+
def handle_knowledge_base(self, public_id):
|
36
|
+
"""Fetch and return knowledge base data."""
|
37
|
+
url = f"{self.backend_url}/knowledge_bases/private/{public_id}"
|
38
|
+
|
39
|
+
# Validate URL scheme for security
|
40
|
+
parsed_url = urlparse(url)
|
41
|
+
if parsed_url.scheme not in ("http", "https"):
|
42
|
+
self.send_response(400)
|
43
|
+
self.send_header("Content-Type", "application/json")
|
44
|
+
self.end_headers()
|
45
|
+
error_msg = json.dumps({"error": "Invalid URL scheme. Only HTTP and HTTPS are allowed."})
|
46
|
+
self.wfile.write(error_msg.encode("utf-8"))
|
47
|
+
return
|
48
|
+
|
49
|
+
headers = {"Authorization": f"Bearer {self.token}", "X-Tenant": self.instance_name, "Content-Type": "application/json"}
|
50
|
+
|
51
|
+
req = Request(url, headers=headers) # noqa: S310
|
52
|
+
|
53
|
+
try:
|
54
|
+
# URL scheme validated above - only HTTP/HTTPS allowed
|
55
|
+
with urlopen(req, timeout=30) as response: # noqa: S310
|
56
|
+
data = response.read()
|
57
|
+
self.send_response(200)
|
58
|
+
self.send_header("Content-Type", "application/json")
|
59
|
+
self.end_headers()
|
60
|
+
self.wfile.write(data)
|
61
|
+
except HTTPError as e:
|
62
|
+
error_body = e.read()
|
63
|
+
error_data = error_body.decode("utf-8") if error_body else '{"error": "HTTP Error"}'
|
64
|
+
self.send_response(e.code)
|
65
|
+
self.send_header("Content-Type", "application/json")
|
66
|
+
self.end_headers()
|
67
|
+
self.wfile.write(error_data.encode("utf-8"))
|
68
|
+
except URLError as e:
|
69
|
+
self.send_response(500)
|
70
|
+
self.send_header("Content-Type", "application/json")
|
71
|
+
self.end_headers()
|
72
|
+
error_msg = json.dumps({"error": str(e)})
|
73
|
+
self.wfile.write(error_msg.encode("utf-8"))
|
74
|
+
|
75
|
+
def handle_health(self):
|
76
|
+
"""Handle health check endpoint."""
|
77
|
+
self.send_response(200)
|
78
|
+
self.send_header("Content-Type", "application/json")
|
79
|
+
self.end_headers()
|
80
|
+
self.wfile.write(json.dumps({"status": "ok"}).encode("utf-8"))
|
81
|
+
|
82
|
+
def log_message(self, format, *args):
|
83
|
+
"""Override to use click.echo for logging."""
|
84
|
+
click.echo(f"{self.address_string()} - {format % args}")
|
@@ -0,0 +1,176 @@
|
|
1
|
+
import asyncio
|
2
|
+
import json
|
3
|
+
import logging
|
4
|
+
import shutil
|
5
|
+
from dataclasses import dataclass
|
6
|
+
|
7
|
+
import click
|
8
|
+
import pyperclip
|
9
|
+
from mcp import ClientSession
|
10
|
+
from mcp import StdioServerParameters
|
11
|
+
from mcp.client.stdio import stdio_client
|
12
|
+
|
13
|
+
logging.basicConfig(level=logging.INFO)
|
14
|
+
|
15
|
+
|
16
|
+
@dataclass
|
17
|
+
class InputParameter:
|
18
|
+
name: str
|
19
|
+
type: str
|
20
|
+
required: bool
|
21
|
+
key: str
|
22
|
+
description: str
|
23
|
+
|
24
|
+
|
25
|
+
def find_input_tokens(data):
|
26
|
+
tokens = set()
|
27
|
+
if isinstance(data, list):
|
28
|
+
for item in data:
|
29
|
+
tokens.update(find_input_tokens(item))
|
30
|
+
elif isinstance(data, dict):
|
31
|
+
for value in data.values():
|
32
|
+
tokens.update(find_input_tokens(value))
|
33
|
+
elif isinstance(data, str) and data.startswith("${input:"):
|
34
|
+
tokens.add(data[8:-1].strip())
|
35
|
+
return tokens
|
36
|
+
|
37
|
+
|
38
|
+
# New mcp group
|
39
|
+
@click.group()
|
40
|
+
def mcp():
|
41
|
+
"""mcp specific commands."""
|
42
|
+
|
43
|
+
|
44
|
+
@mcp.command("inspect-mcp-server")
|
45
|
+
def create_mcp_proxy():
|
46
|
+
content = click.edit()
|
47
|
+
if content is None:
|
48
|
+
click.echo("No input provided.")
|
49
|
+
return
|
50
|
+
|
51
|
+
try:
|
52
|
+
config = json.loads(content)
|
53
|
+
except json.JSONDecodeError:
|
54
|
+
click.echo("Invalid JSON content.")
|
55
|
+
return
|
56
|
+
|
57
|
+
inputs = {}
|
58
|
+
mcp_config = config.get("mcp", {})
|
59
|
+
|
60
|
+
# Select server
|
61
|
+
# Support both "servers" and "mcpServers" naming conventions
|
62
|
+
servers = mcp_config.get("mcpServers", mcp_config.get("servers", {}))
|
63
|
+
server_names = list(servers.keys())
|
64
|
+
|
65
|
+
if not server_names:
|
66
|
+
ctx = click.get_current_context()
|
67
|
+
click.secho("Error: No servers configured in mcp config (tried keys: 'mcpServers' and 'servers')", fg="red")
|
68
|
+
ctx.exit(1)
|
69
|
+
|
70
|
+
if len(server_names) > 1:
|
71
|
+
server_name = click.prompt("Choose a server", type=click.Choice(server_names), show_choices=True)
|
72
|
+
else:
|
73
|
+
server_name = server_names[0]
|
74
|
+
|
75
|
+
if server_name in servers:
|
76
|
+
server_config = servers[server_name]
|
77
|
+
|
78
|
+
# Collect input tokens ONLY from this server's config
|
79
|
+
input_ids = find_input_tokens(server_config.get("args", []))
|
80
|
+
input_ids.update(find_input_tokens(server_config.get("env", {})))
|
81
|
+
|
82
|
+
# Create prompt definitions using BOTH discovered tokens AND configured inputs
|
83
|
+
existing_input_ids = {i["id"] for i in mcp_config.get("inputs", [])}
|
84
|
+
inputs_to_prompt = input_ids.intersection(existing_input_ids)
|
85
|
+
inputs_to_prompt.update(input_ids) # Add any undiscovered-by-config inputs
|
86
|
+
|
87
|
+
input_configs = []
|
88
|
+
for input_id in inputs_to_prompt:
|
89
|
+
input_def = next((d for d in mcp_config.get("inputs", []) if d["id"] == input_id), {})
|
90
|
+
inputs[input_id] = click.prompt(
|
91
|
+
input_def.get("description", input_id),
|
92
|
+
hide_input=True,
|
93
|
+
)
|
94
|
+
# Create InputParameters config entry
|
95
|
+
input_configs.append(
|
96
|
+
InputParameter(
|
97
|
+
name=input_def.get("name", input_id),
|
98
|
+
type="password",
|
99
|
+
required=True,
|
100
|
+
key=input_id,
|
101
|
+
description=input_def.get("description", ""),
|
102
|
+
).__dict__
|
103
|
+
)
|
104
|
+
|
105
|
+
# Replace input tokens in args
|
106
|
+
processed_args = [
|
107
|
+
inputs.get(arg[8:-1], arg) if isinstance(arg, str) and arg.startswith("${input:") else arg
|
108
|
+
for arg in server_config.get("args", [])
|
109
|
+
]
|
110
|
+
|
111
|
+
# Replace input tokens in environment variables
|
112
|
+
processed_env = {
|
113
|
+
k: inputs.get(v[8:-1], v) if isinstance(v, str) and v.startswith("${input:") else v
|
114
|
+
for k, v in server_config.get("env", {}).items()
|
115
|
+
}
|
116
|
+
|
117
|
+
# Execute with processed parameters
|
118
|
+
output = asyncio.run(
|
119
|
+
list_tools(server_config=server_config, command=server_config["command"], args=processed_args, env=processed_env)
|
120
|
+
)
|
121
|
+
# Add processed parameters to output
|
122
|
+
output_with_name = {
|
123
|
+
"name": server_name,
|
124
|
+
"config": input_configs,
|
125
|
+
"command": server_config["command"],
|
126
|
+
"args": [arg.replace("${input:", "${") if isinstance(arg, str) else arg for arg in server_config.get("args", [])],
|
127
|
+
"env": [
|
128
|
+
{"key": k, "value": v.replace("${input:", "${") if isinstance(v, str) else v}
|
129
|
+
for k, v in server_config.get("env", {}).items()
|
130
|
+
],
|
131
|
+
**output,
|
132
|
+
}
|
133
|
+
output_json = json.dumps(output_with_name, indent=2)
|
134
|
+
click.echo(output_json)
|
135
|
+
try:
|
136
|
+
pyperclip.copy(output_json)
|
137
|
+
click.secho("\nOutput copied to clipboard!", fg="green")
|
138
|
+
except pyperclip.PyperclipException as e:
|
139
|
+
click.secho(f"\nFailed to copy to clipboard: {e!s}", fg="yellow")
|
140
|
+
|
141
|
+
|
142
|
+
async def list_tools(server_config: dict, command: str, args: list[str], env: dict[str, str]):
|
143
|
+
command_path = shutil.which(command)
|
144
|
+
if not command_path:
|
145
|
+
raise click.UsageError(f"Command not found: {command}")
|
146
|
+
|
147
|
+
try:
|
148
|
+
# Only support stdio server type
|
149
|
+
server_type = server_config.get("type", "stdio")
|
150
|
+
if server_type != "stdio":
|
151
|
+
raise click.UsageError(f"Only stdio MCP servers are supported. Found type: {server_type}")
|
152
|
+
|
153
|
+
server_params = StdioServerParameters(
|
154
|
+
command=command_path,
|
155
|
+
args=args,
|
156
|
+
env=env,
|
157
|
+
)
|
158
|
+
|
159
|
+
async with stdio_client(server_params) as (read, write):
|
160
|
+
async with ClientSession(read, write) as session:
|
161
|
+
await session.initialize()
|
162
|
+
tools = await session.list_tools()
|
163
|
+
mcp_tools = [
|
164
|
+
{
|
165
|
+
"name": tool.name,
|
166
|
+
"description": tool.description,
|
167
|
+
"inputSchema": tool.inputSchema,
|
168
|
+
}
|
169
|
+
for tool in tools.tools
|
170
|
+
]
|
171
|
+
|
172
|
+
return {
|
173
|
+
"tools": mcp_tools,
|
174
|
+
}
|
175
|
+
except Exception as e:
|
176
|
+
raise click.UsageError("Could not connect to MCP server: " + str(e)) from e
|
@@ -24,13 +24,14 @@ logging.basicConfig(level=logging.INFO)
|
|
24
24
|
|
25
25
|
# New dbt group
|
26
26
|
@click.group()
|
27
|
-
|
27
|
+
@click.pass_context
|
28
|
+
def dbt(ctx):
|
28
29
|
"""DBT specific commands."""
|
30
|
+
# Ensure context object exists
|
31
|
+
ctx.ensure_object(dict)
|
29
32
|
|
30
33
|
|
31
34
|
@dbt.command("project-health")
|
32
|
-
@click.option("--token", required=False, help="Your API token for authentication.")
|
33
|
-
@click.option("--instance-name", required=False, help="Your tenant ID.")
|
34
35
|
@click.option(
|
35
36
|
"--manifest-path",
|
36
37
|
required=True,
|
@@ -57,21 +58,24 @@ def dbt():
|
|
57
58
|
default=None,
|
58
59
|
help="Selective model testing. Specify one or more models to run tests on.",
|
59
60
|
)
|
60
|
-
@click.
|
61
|
+
@click.pass_context
|
61
62
|
def project_health(
|
62
|
-
|
63
|
-
instance_name,
|
63
|
+
ctx,
|
64
64
|
manifest_path,
|
65
65
|
catalog_path,
|
66
66
|
config_path=None,
|
67
67
|
config_name=None,
|
68
68
|
select=None,
|
69
|
-
backend_url="https://api.myaltimate.com",
|
70
69
|
):
|
71
70
|
"""
|
72
71
|
Validate the DBT project's configuration and structure.
|
73
72
|
:param manifest_path: Path to the DBT manifest file.
|
74
73
|
"""
|
74
|
+
# Get common options from parent context
|
75
|
+
token = ctx.parent.obj.get("token")
|
76
|
+
instance_name = ctx.parent.obj.get("instance_name")
|
77
|
+
backend_url = ctx.parent.obj.get("backend_url")
|
78
|
+
|
75
79
|
config = None
|
76
80
|
if config_path:
|
77
81
|
config = load_config(config_path)
|
@@ -131,25 +135,32 @@ def project_health(
|
|
131
135
|
|
132
136
|
|
133
137
|
@dbt.command("onboard")
|
134
|
-
@click.option("--token", prompt="API Token", help="Your API token for authentication.")
|
135
|
-
@click.option("--instance-name", prompt="Instance Name", help="Your tenant ID.")
|
136
138
|
@click.option("--dbt_core_integration_id", prompt="DBT Core Integration ID", help="DBT Core Integration ID")
|
137
139
|
@click.option(
|
138
140
|
"--dbt_core_integration_environment", default="PROD", prompt="DBT Core Integration Environment", help="DBT Core Integration Environment"
|
139
141
|
)
|
140
142
|
@click.option("--manifest-path", required=True, prompt="Manifest Path", help="Path to the manifest file.")
|
141
143
|
@click.option("--catalog-path", required=False, prompt=False, help="Path to the catalog file.")
|
142
|
-
@click.
|
144
|
+
@click.pass_context
|
143
145
|
def onboard(
|
144
|
-
|
145
|
-
instance_name,
|
146
|
+
ctx,
|
146
147
|
dbt_core_integration_id,
|
147
148
|
dbt_core_integration_environment,
|
148
149
|
manifest_path,
|
149
150
|
catalog_path,
|
150
|
-
backend_url="https://api.myaltimate.com",
|
151
151
|
):
|
152
152
|
"""Onboard a manifest file to DBT."""
|
153
|
+
# Get common options from parent context
|
154
|
+
token = ctx.parent.obj.get("token")
|
155
|
+
instance_name = ctx.parent.obj.get("instance_name")
|
156
|
+
backend_url = ctx.parent.obj.get("backend_url")
|
157
|
+
|
158
|
+
# For onboard command, token and instance_name are required
|
159
|
+
if not token:
|
160
|
+
token = click.prompt("API Token")
|
161
|
+
if not instance_name:
|
162
|
+
instance_name = click.prompt("Instance Name")
|
163
|
+
|
153
164
|
check_token_and_instance(token, instance_name)
|
154
165
|
|
155
166
|
if not validate_credentials(token, backend_url, instance_name):
|
{altimate_datapilot_cli-0.0.18.dist-info → altimate_datapilot_cli-0.0.20.dist-info}/AUTHORS.rst
RENAMED
File without changes
|
File without changes
|
File without changes
|
{altimate_datapilot_cli-0.0.18.dist-info → altimate_datapilot_cli-0.0.20.dist-info}/entry_points.txt
RENAMED
File without changes
|
{altimate_datapilot_cli-0.0.18.dist-info → altimate_datapilot_cli-0.0.20.dist-info}/top_level.txt
RENAMED
File without changes
|