deepset-mcp 0.0.2rc2__py3-none-any.whl → 0.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deepset_mcp/api/pipeline_template/models.py +29 -2
- deepset_mcp/config.py +25 -0
- deepset_mcp/main.py +78 -27
- deepset_mcp/tool_factory.py +52 -34
- deepset_mcp/tools/doc_search.py +0 -25
- deepset_mcp/tools/pipeline_template.py +23 -14
- deepset_mcp/tools/secrets.py +60 -7
- {deepset_mcp-0.0.2rc2.dist-info → deepset_mcp-0.0.3.dist-info}/METADATA +2 -1
- {deepset_mcp-0.0.2rc2.dist-info → deepset_mcp-0.0.3.dist-info}/RECORD +11 -10
- {deepset_mcp-0.0.2rc2.dist-info → deepset_mcp-0.0.3.dist-info}/WHEEL +0 -0
- {deepset_mcp-0.0.2rc2.dist-info → deepset_mcp-0.0.3.dist-info}/entry_points.txt +0 -0
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from enum import StrEnum
|
|
2
|
+
from typing import Any
|
|
2
3
|
from uuid import UUID
|
|
3
4
|
|
|
4
|
-
from pydantic import BaseModel, Field
|
|
5
|
+
from pydantic import BaseModel, Field, model_validator
|
|
5
6
|
|
|
6
7
|
|
|
7
8
|
class PipelineType(StrEnum):
|
|
@@ -28,10 +29,36 @@ class PipelineTemplate(BaseModel):
|
|
|
28
29
|
display_name: str = Field(alias="name")
|
|
29
30
|
pipeline_template_id: UUID = Field(alias="pipeline_template_id")
|
|
30
31
|
potential_applications: list[str] = Field(alias="potential_applications")
|
|
31
|
-
yaml_config: str | None =
|
|
32
|
+
yaml_config: str | None = None
|
|
32
33
|
tags: list[PipelineTemplateTag]
|
|
33
34
|
pipeline_type: PipelineType
|
|
34
35
|
|
|
36
|
+
@model_validator(mode="before")
|
|
37
|
+
@classmethod
|
|
38
|
+
def populate_yaml_config(cls, values: Any) -> Any:
|
|
39
|
+
"""Populate yaml_config from query_yaml or indexing_yaml based on pipeline_type."""
|
|
40
|
+
if not isinstance(values, dict):
|
|
41
|
+
return values
|
|
42
|
+
|
|
43
|
+
# Skip if yaml_config is already set
|
|
44
|
+
if values.get("yaml_config") is not None:
|
|
45
|
+
return values
|
|
46
|
+
|
|
47
|
+
# Get pipeline_type from the model data
|
|
48
|
+
pipeline_type = values.get("pipeline_type")
|
|
49
|
+
|
|
50
|
+
if pipeline_type == PipelineType.INDEXING or pipeline_type == "indexing":
|
|
51
|
+
yaml_config = values.get("indexing_yaml")
|
|
52
|
+
elif pipeline_type == PipelineType.QUERY or pipeline_type == "query":
|
|
53
|
+
yaml_config = values.get("query_yaml")
|
|
54
|
+
else:
|
|
55
|
+
yaml_config = None
|
|
56
|
+
|
|
57
|
+
if yaml_config is not None:
|
|
58
|
+
values["yaml_config"] = yaml_config
|
|
59
|
+
|
|
60
|
+
return values
|
|
61
|
+
|
|
35
62
|
|
|
36
63
|
class PipelineTemplateList(BaseModel):
|
|
37
64
|
"""Response model for listing pipeline templates."""
|
deepset_mcp/config.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""This module contains static configuration for the deepset MCP server."""
|
|
2
|
+
|
|
3
|
+
# We need this mapping to which environment variables integrations are mapped to
|
|
4
|
+
# The mapping is maintained in the pipeline operator:
|
|
5
|
+
# https://github.com/deepset-ai/dc-pipeline-operator/blob/main/dc_operators/config.py#L279
|
|
6
|
+
TOKEN_DOMAIN_MAPPING = {
|
|
7
|
+
"huggingface.co": ["HF_API_TOKEN", "HF_TOKEN"],
|
|
8
|
+
"api.openai.com": ["OPENAI_API_KEY"],
|
|
9
|
+
"bedrock.amazonaws.com": ["BEDROCK"],
|
|
10
|
+
"api.cohere.ai": ["COHERE_API_KEY"],
|
|
11
|
+
"openai.azure.com": ["AZURE_OPENAI_API_KEY"],
|
|
12
|
+
"cognitive-services.azure.com": ["AZURE_AI_API_KEY"],
|
|
13
|
+
"unstructured.io": ["UNSTRUCTURED_API_KEY"],
|
|
14
|
+
"api.deepl.com": ["DEEPL_API_KEY"],
|
|
15
|
+
"generativelanguage.googleapis.com": ["GOOGLE_API_KEY"],
|
|
16
|
+
"api.nvidia.com": ["NVIDIA_API_KEY"],
|
|
17
|
+
"api.voyageai.com": ["VOYAGE_API_KEY"],
|
|
18
|
+
"searchapi.io": ["SEARCHAPI_API_KEY"],
|
|
19
|
+
"snowflakecomputing.com": ["SNOWFLAKE_API_KEY"],
|
|
20
|
+
"wandb.ai": ["WANDB_API_KEY"],
|
|
21
|
+
"mongodb.com": ["MONGO_CONNECTION_STRING"],
|
|
22
|
+
"together.ai": ["TOGETHERAI_API_KEY"],
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
DEEPSET_DOCS_DEFAULT_SHARE_URL = "https://cloud.deepset.ai/shared_prototypes?share_token=prototype_eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3ODM0MjE0OTguNTk5LCJhdWQiOiJleHRlcm5hbCB1c2VyIiwiaXNzIjoiZEMiLCJ3b3Jrc3BhY2VfaWQiOiI4YzI0ZjExMi1iMjljLTQ5MWMtOTkzOS1hZTkxMDRhNTQyMWMiLCJ3b3Jrc3BhY2VfbmFtZSI6ImRjLWRvY3MtY29udGVudCIsIm9yZ2FuaXphdGlvbl9pZCI6ImNhOWYxNGQ0LWMyYzktNDYwZC04ZDI2LWY4Y2IwYWNhMDI0ZiIsInNoYXJlX2lkIjoiY2Y3MTA3ODAtOThmNi00MzlmLThiNzYtMmMwNDkyODNiMDZhIiwibG9naW5fcmVxdWlyZWQiOmZhbHNlfQ.5j6DCNRQ1_KB8lhIJqHyw2hBIleEW1_Y_UBuH6MTYY0"
|
deepset_mcp/main.py
CHANGED
|
@@ -1,10 +1,15 @@
|
|
|
1
1
|
import argparse
|
|
2
|
+
import asyncio
|
|
2
3
|
import logging
|
|
3
4
|
import os
|
|
4
5
|
from pathlib import Path
|
|
6
|
+
from urllib.parse import parse_qs, urlparse
|
|
5
7
|
|
|
8
|
+
import jwt
|
|
6
9
|
from mcp.server.fastmcp import FastMCP
|
|
7
10
|
|
|
11
|
+
from deepset_mcp.api.client import AsyncDeepsetClient
|
|
12
|
+
from deepset_mcp.config import DEEPSET_DOCS_DEFAULT_SHARE_URL
|
|
8
13
|
from deepset_mcp.tool_factory import WorkspaceMode, register_tools
|
|
9
14
|
|
|
10
15
|
# Initialize MCP Server
|
|
@@ -34,6 +39,58 @@ async def deepset_recommended_prompt() -> str:
|
|
|
34
39
|
return prompt_path.read_text()
|
|
35
40
|
|
|
36
41
|
|
|
42
|
+
async def fetch_shared_prototype_details(share_url: str) -> tuple[str, str, str]:
|
|
43
|
+
"""Gets the pipeline name, workspace name and an API token for a shared prototype url.
|
|
44
|
+
|
|
45
|
+
:param share_url: The URL of a shared prototype on the deepset platform.
|
|
46
|
+
|
|
47
|
+
:returns: A tuple containing the pipeline name, workspace name and an API token.
|
|
48
|
+
"""
|
|
49
|
+
parsed_url = urlparse(share_url)
|
|
50
|
+
query_params = parse_qs(parsed_url.query)
|
|
51
|
+
share_token = query_params.get("share_token", [None])[0]
|
|
52
|
+
if not share_token:
|
|
53
|
+
raise ValueError("Invalid share URL: missing share_token parameter.")
|
|
54
|
+
|
|
55
|
+
jwt_token = share_token.replace("prototype_", "")
|
|
56
|
+
|
|
57
|
+
decoded_token = jwt.decode(jwt_token, options={"verify_signature": False})
|
|
58
|
+
workspace_name = decoded_token.get("workspace_name")
|
|
59
|
+
if not workspace_name:
|
|
60
|
+
raise ValueError("Invalid JWT in share_token: missing 'workspace_name'.")
|
|
61
|
+
|
|
62
|
+
share_id = decoded_token.get("share_id")
|
|
63
|
+
if not share_id:
|
|
64
|
+
raise ValueError("Invalid JWT in share_token: missing 'share_id'.")
|
|
65
|
+
|
|
66
|
+
# For shared prototypes, we need to:
|
|
67
|
+
# 1. Fetch prototype details (pipeline name) using the information encoded in the JWT
|
|
68
|
+
# 2. Create a shared prototype user
|
|
69
|
+
async with AsyncDeepsetClient(api_key=share_token) as client:
|
|
70
|
+
response = await client.request(f"/v1/workspaces/{workspace_name}/shared_prototypes/{share_id}")
|
|
71
|
+
if not response.success:
|
|
72
|
+
raise ValueError(f"Failed to fetch shared prototype details: {response.status_code} {response.json}")
|
|
73
|
+
|
|
74
|
+
data = response.json or {}
|
|
75
|
+
pipeline_names: list[str] = data.get("pipeline_names", [])
|
|
76
|
+
if not pipeline_names:
|
|
77
|
+
raise ValueError("No pipeline names found in shared prototype response.")
|
|
78
|
+
|
|
79
|
+
user_info = await client.request("/v1/workspaces/dc-docs-content/shared_prototype_users", method="POST")
|
|
80
|
+
|
|
81
|
+
if not user_info.success:
|
|
82
|
+
raise ValueError("Failed to fetch user information from shared prototype response.")
|
|
83
|
+
|
|
84
|
+
user_data = user_info.json or {}
|
|
85
|
+
|
|
86
|
+
try:
|
|
87
|
+
api_key = user_data["user_token"]
|
|
88
|
+
except KeyError:
|
|
89
|
+
raise ValueError("No user token in shared prototype response.") from None
|
|
90
|
+
|
|
91
|
+
return workspace_name, pipeline_names[0], api_key
|
|
92
|
+
|
|
93
|
+
|
|
37
94
|
def main() -> None:
|
|
38
95
|
"""Entrypoint for the deepset MCP server."""
|
|
39
96
|
parser = argparse.ArgumentParser(description="Run the Deepset MCP server.")
|
|
@@ -48,22 +105,18 @@ def main() -> None:
|
|
|
48
105
|
help="Deepset API key (env DEEPSET_API_KEY)",
|
|
49
106
|
)
|
|
50
107
|
parser.add_argument(
|
|
51
|
-
"--docs-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
parser.add_argument(
|
|
55
|
-
"--docs-pipeline-name",
|
|
56
|
-
help="Deepset docs pipeline name (env DEEPSET_DOCS_PIPELINE_NAME)",
|
|
57
|
-
)
|
|
58
|
-
parser.add_argument(
|
|
59
|
-
"--docs-api-key",
|
|
60
|
-
help="Deepset docs pipeline API key (env DEEPSET_DOCS_API_KEY)",
|
|
108
|
+
"--docs-share-url",
|
|
109
|
+
default=DEEPSET_DOCS_DEFAULT_SHARE_URL,
|
|
110
|
+
help="Deepset docs search share URL (env DEEPSET_DOCS_SHARE_URL)",
|
|
61
111
|
)
|
|
62
112
|
parser.add_argument(
|
|
63
113
|
"--workspace-mode",
|
|
64
|
-
choices=[
|
|
65
|
-
default=
|
|
66
|
-
help=
|
|
114
|
+
choices=[WorkspaceMode.STATIC, WorkspaceMode.DYNAMIC],
|
|
115
|
+
default=WorkspaceMode.STATIC,
|
|
116
|
+
help=(
|
|
117
|
+
"Whether workspace should be set statically or dynamically provided during a tool call. "
|
|
118
|
+
f"Default: '{WorkspaceMode.STATIC}'"
|
|
119
|
+
),
|
|
67
120
|
)
|
|
68
121
|
parser.add_argument(
|
|
69
122
|
"--tools",
|
|
@@ -89,17 +142,23 @@ def main() -> None:
|
|
|
89
142
|
# prefer flags, fallback to env
|
|
90
143
|
workspace = args.workspace or os.getenv("DEEPSET_WORKSPACE")
|
|
91
144
|
api_key = args.api_key or os.getenv("DEEPSET_API_KEY")
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
145
|
+
docs_share_url = args.docs_share_url or os.getenv("DEEPSET_DOCS_SHARE_URL")
|
|
146
|
+
|
|
147
|
+
if docs_share_url:
|
|
148
|
+
try:
|
|
149
|
+
workspace_name, pipeline_name, api_key_docs = asyncio.run(fetch_shared_prototype_details(docs_share_url))
|
|
150
|
+
os.environ["DEEPSET_DOCS_WORKSPACE"] = workspace_name
|
|
151
|
+
os.environ["DEEPSET_DOCS_PIPELINE_NAME"] = pipeline_name
|
|
152
|
+
os.environ["DEEPSET_DOCS_API_KEY"] = api_key_docs
|
|
153
|
+
except (ValueError, jwt.DecodeError) as e:
|
|
154
|
+
parser.error(f"Error processing --docs-share-url: {e}")
|
|
95
155
|
|
|
96
156
|
# Create server configuration
|
|
97
157
|
workspace_mode = WorkspaceMode(args.workspace_mode)
|
|
98
158
|
|
|
99
|
-
|
|
100
|
-
if workspace_mode == WorkspaceMode.IMPLICIT:
|
|
159
|
+
if workspace_mode == WorkspaceMode.STATIC:
|
|
101
160
|
if not workspace:
|
|
102
|
-
parser.error("Missing workspace: set --workspace or DEEPSET_WORKSPACE
|
|
161
|
+
parser.error("Missing workspace: set --workspace or DEEPSET_WORKSPACE")
|
|
103
162
|
|
|
104
163
|
if not api_key:
|
|
105
164
|
parser.error("Missing API key: set --api-key or DEEPSET_API_KEY")
|
|
@@ -109,14 +168,6 @@ def main() -> None:
|
|
|
109
168
|
os.environ["DEEPSET_WORKSPACE"] = workspace
|
|
110
169
|
os.environ["DEEPSET_API_KEY"] = api_key
|
|
111
170
|
|
|
112
|
-
# Set docs environment variables if provided
|
|
113
|
-
if docs_workspace:
|
|
114
|
-
os.environ["DEEPSET_DOCS_WORKSPACE"] = docs_workspace
|
|
115
|
-
if docs_pipeline_name:
|
|
116
|
-
os.environ["DEEPSET_DOCS_PIPELINE_NAME"] = docs_pipeline_name
|
|
117
|
-
if docs_api_key:
|
|
118
|
-
os.environ["DEEPSET_DOCS_API_KEY"] = docs_api_key
|
|
119
|
-
|
|
120
171
|
# Parse tool names if provided
|
|
121
172
|
tool_names = None
|
|
122
173
|
if args.tools:
|
deepset_mcp/tool_factory.py
CHANGED
|
@@ -19,7 +19,6 @@ from deepset_mcp.tools.custom_components import (
|
|
|
19
19
|
list_custom_component_installations as list_custom_component_installations_tool,
|
|
20
20
|
)
|
|
21
21
|
from deepset_mcp.tools.doc_search import (
|
|
22
|
-
get_docs_config,
|
|
23
22
|
search_docs as search_docs_tool,
|
|
24
23
|
)
|
|
25
24
|
from deepset_mcp.tools.haystack_service import (
|
|
@@ -48,9 +47,9 @@ from deepset_mcp.tools.pipeline import (
|
|
|
48
47
|
validate_pipeline as validate_pipeline_tool,
|
|
49
48
|
)
|
|
50
49
|
from deepset_mcp.tools.pipeline_template import (
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
50
|
+
get_template as get_pipeline_template_tool,
|
|
51
|
+
list_templates as list_pipeline_templates_tool,
|
|
52
|
+
search_templates as search_pipeline_templates_tool,
|
|
54
53
|
)
|
|
55
54
|
from deepset_mcp.tools.secrets import (
|
|
56
55
|
get_secret as get_secret_tool,
|
|
@@ -63,6 +62,16 @@ from deepset_mcp.tools.workspace import (
|
|
|
63
62
|
list_workspaces as list_workspaces_tool,
|
|
64
63
|
)
|
|
65
64
|
|
|
65
|
+
|
|
66
|
+
def are_docs_available() -> bool:
|
|
67
|
+
"""Checks if documentation search is available."""
|
|
68
|
+
return bool(
|
|
69
|
+
os.environ.get("DEEPSET_DOCS_WORKSPACE", False)
|
|
70
|
+
and os.environ.get("DEEPSET_DOCS_PIPELINE_NAME", False)
|
|
71
|
+
and os.environ.get("DEEPSET_DOCS_API_KEY", False)
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
|
|
66
75
|
EXPLORER = RichExplorer(store=STORE)
|
|
67
76
|
|
|
68
77
|
|
|
@@ -108,16 +117,11 @@ async def search_docs(query: str) -> str:
|
|
|
108
117
|
:param query: The search query to execute against the documentation.
|
|
109
118
|
:returns: The formatted search results from the documentation.
|
|
110
119
|
"""
|
|
111
|
-
|
|
112
|
-
if not docs_config:
|
|
113
|
-
raise RuntimeError("Documentation search configuration not available")
|
|
114
|
-
|
|
115
|
-
docs_workspace, docs_pipeline_name, docs_api_key = docs_config
|
|
116
|
-
async with AsyncDeepsetClient(api_key=docs_api_key) as client:
|
|
120
|
+
async with AsyncDeepsetClient(api_key=os.environ["DEEPSET_DOCS_API_KEY"]) as client:
|
|
117
121
|
response = await search_docs_tool(
|
|
118
122
|
client=client,
|
|
119
|
-
workspace=
|
|
120
|
-
pipeline_name=
|
|
123
|
+
workspace=os.environ["DEEPSET_DOCS_WORKSPACE"],
|
|
124
|
+
pipeline_name=os.environ["DEEPSET_DOCS_PIPELINE_NAME"],
|
|
121
125
|
query=query,
|
|
122
126
|
)
|
|
123
127
|
return response
|
|
@@ -126,8 +130,8 @@ async def search_docs(query: str) -> str:
|
|
|
126
130
|
class WorkspaceMode(StrEnum):
|
|
127
131
|
"""Configuration for how workspace is provided to tools."""
|
|
128
132
|
|
|
129
|
-
|
|
130
|
-
|
|
133
|
+
STATIC = "static" # workspace from env, no parameter in tool signature
|
|
134
|
+
DYNAMIC = "dynamic" # workspace as required parameter in tool signature
|
|
131
135
|
|
|
132
136
|
|
|
133
137
|
class MemoryType(StrEnum):
|
|
@@ -165,11 +169,21 @@ TOOL_REGISTRY: dict[str, tuple[Callable[..., Any], ToolConfig]] = {
|
|
|
165
169
|
),
|
|
166
170
|
"create_pipeline": (
|
|
167
171
|
create_pipeline_tool,
|
|
168
|
-
ToolConfig(
|
|
172
|
+
ToolConfig(
|
|
173
|
+
needs_client=True,
|
|
174
|
+
needs_workspace=True,
|
|
175
|
+
memory_type=MemoryType.BOTH,
|
|
176
|
+
custom_args={"skip_validation_errors": True},
|
|
177
|
+
),
|
|
169
178
|
),
|
|
170
179
|
"update_pipeline": (
|
|
171
180
|
update_pipeline_tool,
|
|
172
|
-
ToolConfig(
|
|
181
|
+
ToolConfig(
|
|
182
|
+
needs_client=True,
|
|
183
|
+
needs_workspace=True,
|
|
184
|
+
memory_type=MemoryType.BOTH,
|
|
185
|
+
custom_args={"skip_validation_errors": True},
|
|
186
|
+
),
|
|
173
187
|
),
|
|
174
188
|
"get_pipeline": (
|
|
175
189
|
get_pipeline_tool,
|
|
@@ -216,15 +230,20 @@ TOOL_REGISTRY: dict[str, tuple[Callable[..., Any], ToolConfig]] = {
|
|
|
216
230
|
deploy_index_tool,
|
|
217
231
|
ToolConfig(needs_client=True, needs_workspace=True, memory_type=MemoryType.EXPLORABLE),
|
|
218
232
|
),
|
|
219
|
-
"
|
|
233
|
+
"list_templates": (
|
|
220
234
|
list_pipeline_templates_tool,
|
|
221
|
-
ToolConfig(
|
|
235
|
+
ToolConfig(
|
|
236
|
+
needs_client=True,
|
|
237
|
+
needs_workspace=True,
|
|
238
|
+
memory_type=MemoryType.EXPLORABLE,
|
|
239
|
+
custom_args={"field": "created_at", "order": "DESC", "limit": 100},
|
|
240
|
+
),
|
|
222
241
|
),
|
|
223
|
-
"
|
|
242
|
+
"get_template": (
|
|
224
243
|
get_pipeline_template_tool,
|
|
225
244
|
ToolConfig(needs_client=True, needs_workspace=True, memory_type=MemoryType.EXPLORABLE),
|
|
226
245
|
),
|
|
227
|
-
"
|
|
246
|
+
"search_templates": (
|
|
228
247
|
search_pipeline_templates_tool,
|
|
229
248
|
ToolConfig(
|
|
230
249
|
needs_client=True,
|
|
@@ -290,8 +309,8 @@ def create_enhanced_tool(
|
|
|
290
309
|
Args:
|
|
291
310
|
base_func: The base tool function.
|
|
292
311
|
config: Tool configuration specifying dependencies and custom arguments.
|
|
293
|
-
workspace_mode: How the workspace should be handled
|
|
294
|
-
workspace: The workspace to use
|
|
312
|
+
workspace_mode: How the workspace should be handled.
|
|
313
|
+
workspace: The workspace to use when using a static workspace.
|
|
295
314
|
|
|
296
315
|
Returns:
|
|
297
316
|
An enhanced, awaitable tool function with an updated signature and docstring.
|
|
@@ -333,7 +352,7 @@ def create_enhanced_tool(
|
|
|
333
352
|
params_to_remove.update(config.custom_args.keys())
|
|
334
353
|
if config.needs_client:
|
|
335
354
|
params_to_remove.add("client")
|
|
336
|
-
if config.needs_workspace and workspace_mode == WorkspaceMode.
|
|
355
|
+
if config.needs_workspace and workspace_mode == WorkspaceMode.STATIC:
|
|
337
356
|
params_to_remove.add("workspace")
|
|
338
357
|
|
|
339
358
|
# Create the new signature from the original function
|
|
@@ -350,15 +369,15 @@ def create_enhanced_tool(
|
|
|
350
369
|
# Create the final wrapper function that handles client/workspace injection
|
|
351
370
|
if config.needs_client:
|
|
352
371
|
if config.needs_workspace:
|
|
353
|
-
if workspace_mode == WorkspaceMode.
|
|
372
|
+
if workspace_mode == WorkspaceMode.STATIC:
|
|
354
373
|
|
|
355
|
-
async def
|
|
374
|
+
async def workspace_environment_wrapper(**kwargs: Any) -> Any:
|
|
356
375
|
ws = workspace or get_workspace_from_env()
|
|
357
376
|
async with AsyncDeepsetClient() as client:
|
|
358
377
|
return await decorated_func(client=client, workspace=ws, **kwargs)
|
|
359
378
|
|
|
360
|
-
wrapper =
|
|
361
|
-
else: #
|
|
379
|
+
wrapper = workspace_environment_wrapper
|
|
380
|
+
else: # DYNAMIC mode
|
|
362
381
|
|
|
363
382
|
async def workspace_explicit_wrapper(**kwargs: Any) -> Any:
|
|
364
383
|
async with AsyncDeepsetClient() as client:
|
|
@@ -400,7 +419,7 @@ def create_enhanced_tool(
|
|
|
400
419
|
params_to_remove_from_doc = set()
|
|
401
420
|
if config.needs_client:
|
|
402
421
|
params_to_remove_from_doc.add("client")
|
|
403
|
-
if config.needs_workspace and workspace_mode == WorkspaceMode.
|
|
422
|
+
if config.needs_workspace and workspace_mode == WorkspaceMode.STATIC:
|
|
404
423
|
params_to_remove_from_doc.add("workspace")
|
|
405
424
|
if config.custom_args:
|
|
406
425
|
params_to_remove_from_doc.update(config.custom_args.keys())
|
|
@@ -428,11 +447,11 @@ def register_tools(
|
|
|
428
447
|
Args:
|
|
429
448
|
mcp: FastMCP server instance
|
|
430
449
|
workspace_mode: How workspace should be handled
|
|
431
|
-
workspace: Workspace to use for
|
|
450
|
+
workspace: Workspace to use for environment mode (if None, reads from env)
|
|
432
451
|
tool_names: Set of tool names to register (if None, registers all tools)
|
|
433
452
|
"""
|
|
434
453
|
# Check if docs search is available
|
|
435
|
-
docs_available =
|
|
454
|
+
docs_available = are_docs_available()
|
|
436
455
|
|
|
437
456
|
# Validate tool names if provided
|
|
438
457
|
if tool_names is not None:
|
|
@@ -446,8 +465,8 @@ def register_tools(
|
|
|
446
465
|
# Warn if search_docs was requested but config is missing
|
|
447
466
|
if "search_docs" in tool_names and not docs_available:
|
|
448
467
|
logging.warning(
|
|
449
|
-
"Documentation search tool requested but not available. To enable, set the
|
|
450
|
-
"
|
|
468
|
+
"Documentation search tool requested but not available. To enable, set the DEEPSET_DOCS_SHARE_URL "
|
|
469
|
+
"environment variable."
|
|
451
470
|
)
|
|
452
471
|
|
|
453
472
|
tools_to_register = tool_names.copy()
|
|
@@ -457,8 +476,7 @@ def register_tools(
|
|
|
457
476
|
# Warn if search_docs would be skipped in "all tools" mode
|
|
458
477
|
if not docs_available:
|
|
459
478
|
logging.warning(
|
|
460
|
-
"Documentation search tool not enabled. To enable, set the
|
|
461
|
-
"variables: DEEPSET_DOCS_WORKSPACE, DEEPSET_DOCS_PIPELINE_NAME, DEEPSET_DOCS_API_KEY"
|
|
479
|
+
"Documentation search tool not enabled. To enable, set the DEEPSET_DOCS_SHARE_URL environment variable."
|
|
462
480
|
)
|
|
463
481
|
|
|
464
482
|
# Remove search_docs if config is not available
|
deepset_mcp/tools/doc_search.py
CHANGED
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
import os
|
|
2
|
-
|
|
3
1
|
from deepset_mcp.api.exceptions import BadRequestError, ResourceNotFoundError, UnexpectedAPIError
|
|
4
2
|
from deepset_mcp.api.pipeline.models import DeepsetSearchResponse
|
|
5
3
|
from deepset_mcp.api.protocols import AsyncClientProtocol
|
|
@@ -46,14 +44,6 @@ async def search_docs(*, client: AsyncClientProtocol, workspace: str, pipeline_n
|
|
|
46
44
|
:returns: A string containing the formatted search results or error message.
|
|
47
45
|
"""
|
|
48
46
|
try:
|
|
49
|
-
# First, check if the pipeline exists and get its status
|
|
50
|
-
pipeline = await client.pipelines(workspace=workspace).get(pipeline_name=pipeline_name)
|
|
51
|
-
|
|
52
|
-
# Check if pipeline is deployed
|
|
53
|
-
if pipeline.status != "DEPLOYED":
|
|
54
|
-
return f"Documentation pipeline '{pipeline_name}' is not deployed (current status: {pipeline.status})."
|
|
55
|
-
|
|
56
|
-
# Execute the search
|
|
57
47
|
search_response = await client.pipelines(workspace=workspace).search(pipeline_name=pipeline_name, query=query)
|
|
58
48
|
|
|
59
49
|
return doc_search_results_to_llm_readable_string(results=search_response)
|
|
@@ -66,18 +56,3 @@ async def search_docs(*, client: AsyncClientProtocol, workspace: str, pipeline_n
|
|
|
66
56
|
return f"Failed to search documentation using pipeline '{pipeline_name}': {e}"
|
|
67
57
|
except Exception as e:
|
|
68
58
|
return f"An unexpected error occurred while searching documentation with pipeline '{pipeline_name}': {str(e)}"
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
def get_docs_config() -> tuple[str, str, str] | None:
|
|
72
|
-
"""Get docs search configuration from environment variables.
|
|
73
|
-
|
|
74
|
-
:returns: Tuple of (workspace, pipeline_name, api_key) if all are available, None otherwise.
|
|
75
|
-
"""
|
|
76
|
-
workspace = os.environ.get("DEEPSET_DOCS_WORKSPACE")
|
|
77
|
-
pipeline_name = os.environ.get("DEEPSET_DOCS_PIPELINE_NAME")
|
|
78
|
-
api_key = os.environ.get("DEEPSET_DOCS_API_KEY")
|
|
79
|
-
|
|
80
|
-
if workspace and pipeline_name and api_key:
|
|
81
|
-
return workspace, pipeline_name, api_key
|
|
82
|
-
|
|
83
|
-
return None
|
|
@@ -6,34 +6,38 @@ from deepset_mcp.api.pipeline_template.models import (
|
|
|
6
6
|
PipelineTemplateList,
|
|
7
7
|
PipelineTemplateSearchResult,
|
|
8
8
|
PipelineTemplateSearchResults,
|
|
9
|
+
PipelineType,
|
|
9
10
|
)
|
|
10
11
|
from deepset_mcp.api.protocols import AsyncClientProtocol
|
|
11
12
|
from deepset_mcp.tools.model_protocol import ModelProtocol
|
|
12
13
|
|
|
13
14
|
|
|
14
|
-
async def
|
|
15
|
+
async def list_templates(
|
|
15
16
|
*,
|
|
16
17
|
client: AsyncClientProtocol,
|
|
17
18
|
workspace: str,
|
|
18
19
|
limit: int = 100,
|
|
19
20
|
field: str = "created_at",
|
|
20
21
|
order: str = "DESC",
|
|
21
|
-
|
|
22
|
+
pipeline_type: PipelineType | str | None = None,
|
|
22
23
|
) -> PipelineTemplateList | str:
|
|
23
|
-
"""Retrieves a list of all available pipeline templates.
|
|
24
|
+
"""Retrieves a list of all available pipeline and indexing templates.
|
|
24
25
|
|
|
25
26
|
:param client: The async client for API requests.
|
|
26
27
|
:param workspace: The workspace to list templates from.
|
|
27
28
|
:param limit: Maximum number of templates to return (default: 100).
|
|
28
29
|
:param field: Field to sort by (default: "created_at").
|
|
29
30
|
:param order: Sort order, either "ASC" or "DESC" (default: "DESC").
|
|
30
|
-
:param
|
|
31
|
+
:param pipeline_type: The type of pipeline to return.
|
|
31
32
|
|
|
32
33
|
:returns: List of pipeline templates or error message.
|
|
33
34
|
"""
|
|
34
35
|
try:
|
|
35
36
|
return await client.pipeline_templates(workspace=workspace).list_templates(
|
|
36
|
-
limit=limit,
|
|
37
|
+
limit=limit,
|
|
38
|
+
field=field,
|
|
39
|
+
order=order,
|
|
40
|
+
filter=f"pipeline_type eq '{pipeline_type}'" if pipeline_type else None,
|
|
37
41
|
)
|
|
38
42
|
except ResourceNotFoundError:
|
|
39
43
|
return f"There is no workspace named '{workspace}'. Did you mean to configure it?"
|
|
@@ -41,16 +45,14 @@ async def list_pipeline_templates(
|
|
|
41
45
|
return f"Failed to list pipeline templates: {e}"
|
|
42
46
|
|
|
43
47
|
|
|
44
|
-
async def
|
|
45
|
-
|
|
46
|
-
) -> PipelineTemplate | str:
|
|
47
|
-
"""Fetches detailed information for a specific pipeline template, identified by its `template_name`.
|
|
48
|
+
async def get_template(*, client: AsyncClientProtocol, workspace: str, template_name: str) -> PipelineTemplate | str:
|
|
49
|
+
"""Fetches detailed information for a specific pipeline or indexing template, identified by its `template_name`.
|
|
48
50
|
|
|
49
51
|
:param client: The async client for API requests.
|
|
50
52
|
:param workspace: The workspace to fetch template from.
|
|
51
53
|
:param template_name: The name of the template to fetch.
|
|
52
54
|
|
|
53
|
-
:returns: Pipeline template details or error message.
|
|
55
|
+
:returns: Pipeline or indexing template details or error message.
|
|
54
56
|
"""
|
|
55
57
|
try:
|
|
56
58
|
return await client.pipeline_templates(workspace=workspace).get_template(template_name=template_name)
|
|
@@ -60,22 +62,29 @@ async def get_pipeline_template(
|
|
|
60
62
|
return f"Failed to fetch pipeline template '{template_name}': {e}"
|
|
61
63
|
|
|
62
64
|
|
|
63
|
-
async def
|
|
64
|
-
*,
|
|
65
|
+
async def search_templates(
|
|
66
|
+
*,
|
|
67
|
+
client: AsyncClientProtocol,
|
|
68
|
+
query: str,
|
|
69
|
+
model: ModelProtocol,
|
|
70
|
+
workspace: str,
|
|
71
|
+
top_k: int = 10,
|
|
72
|
+
pipeline_type: PipelineType | str = PipelineType.QUERY,
|
|
65
73
|
) -> PipelineTemplateSearchResults | str:
|
|
66
|
-
"""Searches for pipeline templates based on name or description using semantic similarity.
|
|
74
|
+
"""Searches for pipeline or indexing templates based on name or description using semantic similarity.
|
|
67
75
|
|
|
68
76
|
:param client: The API client to use.
|
|
69
77
|
:param query: The search query.
|
|
70
78
|
:param model: The model to use for computing embeddings.
|
|
71
79
|
:param workspace: The workspace to search templates from.
|
|
72
80
|
:param top_k: Maximum number of results to return (default: 10).
|
|
81
|
+
:param pipeline_type: The type of pipeline to return ('indexing' or 'query'; default: 'query').
|
|
73
82
|
|
|
74
83
|
:returns: Search results with similarity scores or error message.
|
|
75
84
|
"""
|
|
76
85
|
try:
|
|
77
86
|
response = await client.pipeline_templates(workspace=workspace).list_templates(
|
|
78
|
-
filter="pipeline_type eq '
|
|
87
|
+
filter=f"pipeline_type eq '{pipeline_type}'"
|
|
79
88
|
)
|
|
80
89
|
except UnexpectedAPIError as e:
|
|
81
90
|
return f"Failed to retrieve pipeline templates: {e}"
|
deepset_mcp/tools/secrets.py
CHANGED
|
@@ -1,9 +1,27 @@
|
|
|
1
|
+
from pydantic import BaseModel
|
|
2
|
+
|
|
1
3
|
from deepset_mcp.api.exceptions import ResourceNotFoundError, UnexpectedAPIError
|
|
2
4
|
from deepset_mcp.api.protocols import AsyncClientProtocol
|
|
3
|
-
from deepset_mcp.
|
|
5
|
+
from deepset_mcp.config import TOKEN_DOMAIN_MAPPING
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class EnvironmentSecret(BaseModel):
|
|
9
|
+
"""Model representing a secret or an integration."""
|
|
10
|
+
|
|
11
|
+
name: str
|
|
12
|
+
id: str
|
|
13
|
+
invalid: bool | None = None
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class EnvironmentSecretList(BaseModel):
|
|
17
|
+
"""Model representing a list of secrets and integrations."""
|
|
4
18
|
|
|
19
|
+
data: list[EnvironmentSecret]
|
|
20
|
+
has_more: bool
|
|
21
|
+
total: int
|
|
5
22
|
|
|
6
|
-
|
|
23
|
+
|
|
24
|
+
async def list_secrets(*, client: AsyncClientProtocol, limit: int = 10) -> EnvironmentSecretList | str:
|
|
7
25
|
"""Lists all secrets available in the user's deepset organization.
|
|
8
26
|
|
|
9
27
|
Use this tool to retrieve a list of secrets with their names and IDs.
|
|
@@ -15,7 +33,26 @@ async def list_secrets(*, client: AsyncClientProtocol, limit: int = 10) -> Secre
|
|
|
15
33
|
:returns: List of secrets or error message
|
|
16
34
|
"""
|
|
17
35
|
try:
|
|
18
|
-
|
|
36
|
+
secrets_list = await client.secrets().list(limit=limit)
|
|
37
|
+
integrations_list = await client.integrations().list()
|
|
38
|
+
|
|
39
|
+
env_secrets = [EnvironmentSecret(name=secret.name, id=secret.secret_id) for secret in secrets_list.data]
|
|
40
|
+
for integration in integrations_list.integrations:
|
|
41
|
+
env_vars = TOKEN_DOMAIN_MAPPING.get(integration.provider_domain, [])
|
|
42
|
+
for env_var in env_vars:
|
|
43
|
+
env_secrets.append(
|
|
44
|
+
EnvironmentSecret(
|
|
45
|
+
name=env_var,
|
|
46
|
+
id=str(integration.model_registry_token_id),
|
|
47
|
+
invalid=integration.invalid,
|
|
48
|
+
)
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
return EnvironmentSecretList(
|
|
52
|
+
data=env_secrets,
|
|
53
|
+
has_more=secrets_list.has_more,
|
|
54
|
+
total=len(env_secrets),
|
|
55
|
+
)
|
|
19
56
|
except ResourceNotFoundError as e:
|
|
20
57
|
return f"Error: {str(e)}"
|
|
21
58
|
except UnexpectedAPIError as e:
|
|
@@ -24,7 +61,7 @@ async def list_secrets(*, client: AsyncClientProtocol, limit: int = 10) -> Secre
|
|
|
24
61
|
return f"Unexpected error: {str(e)}"
|
|
25
62
|
|
|
26
63
|
|
|
27
|
-
async def get_secret(*, client: AsyncClientProtocol, secret_id: str) ->
|
|
64
|
+
async def get_secret(*, client: AsyncClientProtocol, secret_id: str) -> EnvironmentSecret | str:
|
|
28
65
|
"""Retrieves detailed information about a specific secret by its ID.
|
|
29
66
|
|
|
30
67
|
Use this tool to get information about a specific secret when you know its ID.
|
|
@@ -36,9 +73,25 @@ async def get_secret(*, client: AsyncClientProtocol, secret_id: str) -> Secret |
|
|
|
36
73
|
:returns: Secret information or error message
|
|
37
74
|
"""
|
|
38
75
|
try:
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
76
|
+
secret = await client.secrets().get(secret_id=secret_id)
|
|
77
|
+
return EnvironmentSecret(name=secret.name, id=secret.secret_id)
|
|
78
|
+
except ResourceNotFoundError:
|
|
79
|
+
try:
|
|
80
|
+
integrations_list = await client.integrations().list()
|
|
81
|
+
for integration in integrations_list.integrations:
|
|
82
|
+
if str(integration.model_registry_token_id) == secret_id:
|
|
83
|
+
env_vars = TOKEN_DOMAIN_MAPPING.get(integration.provider_domain, [])
|
|
84
|
+
if env_vars:
|
|
85
|
+
return EnvironmentSecret(
|
|
86
|
+
name=env_vars[0],
|
|
87
|
+
id=str(integration.model_registry_token_id),
|
|
88
|
+
invalid=integration.invalid,
|
|
89
|
+
)
|
|
90
|
+
return f"Error: Secret with ID '{secret_id}' not found."
|
|
91
|
+
except UnexpectedAPIError as e:
|
|
92
|
+
return f"API Error: {str(e)}"
|
|
93
|
+
except Exception as e:
|
|
94
|
+
return f"Unexpected error: {str(e)}"
|
|
42
95
|
except UnexpectedAPIError as e:
|
|
43
96
|
return f"API Error: {str(e)}"
|
|
44
97
|
except Exception as e:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: deepset-mcp
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.3
|
|
4
4
|
Summary: Collection of MCP tools and Agents to work with the deepset AI platform. Create, debug or learn about pipelines on the platform. Useable from the CLI, Cursor, Claude Code, or other MCP clients.
|
|
5
5
|
Project-URL: Homepage, https://deepset.ai
|
|
6
6
|
Author-email: Mathis Lucka <mathis.lucka@deepset.ai>, Tanay Soni <tanay.soni@deepset.ai>
|
|
@@ -22,6 +22,7 @@ Requires-Dist: mcp>=1.10.1
|
|
|
22
22
|
Requires-Dist: model2vec
|
|
23
23
|
Requires-Dist: numpy
|
|
24
24
|
Requires-Dist: pydantic>=2.0.0
|
|
25
|
+
Requires-Dist: pyjwt[crypto]
|
|
25
26
|
Requires-Dist: pyyaml
|
|
26
27
|
Requires-Dist: rich
|
|
27
28
|
Provides-Extra: analysis
|
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
deepset_mcp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
deepset_mcp/config.py,sha256=OkWTP04u57_t15ohiGcYbkx0DwKxYrpCDZjB-CcSDIY,1622
|
|
2
3
|
deepset_mcp/initialize_embedding_model.py,sha256=dgItYfml2LvEk_uxZJSga0aEstFUxTdqAvWWSpEZQEA,300
|
|
3
|
-
deepset_mcp/main.py,sha256=
|
|
4
|
+
deepset_mcp/main.py,sha256=tmQnjUkgjgFB-vlSo_ZFG9Aj-ZIESjpOFIQH3lNu_lI,6568
|
|
4
5
|
deepset_mcp/store.py,sha256=rhjAZgisgdmc7cr61qdTmp0ZEXCYBKZL5faPmQgDFT4,116
|
|
5
|
-
deepset_mcp/tool_factory.py,sha256=
|
|
6
|
+
deepset_mcp/tool_factory.py,sha256=ybwYu95_MAVsJU3uADSQAykb7YQjAZsAkTtWnZUdpRw,19561
|
|
6
7
|
deepset_mcp/agents/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
8
|
deepset_mcp/agents/debugging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
9
|
deepset_mcp/agents/debugging/debugging_agent.py,sha256=MRr88Af8AMrof1WLlxanppusaahp41WFCHkfaxrX_iI,1646
|
|
@@ -38,7 +39,7 @@ deepset_mcp/api/pipeline/models.py,sha256=JO5pvglyVNsXIIY1IFUnei2grarNoiPSEnAI3c
|
|
|
38
39
|
deepset_mcp/api/pipeline/protocols.py,sha256=6n9AiQkMAzHyr0Fs0JrhtYXcb5Adz1fI5DTOLCtym58,2469
|
|
39
40
|
deepset_mcp/api/pipeline/resource.py,sha256=Mov91pEDxeXoBtzQAsfvn8YVevnwjC2aJhey86we9bM,13989
|
|
40
41
|
deepset_mcp/api/pipeline_template/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
41
|
-
deepset_mcp/api/pipeline_template/models.py,sha256=
|
|
42
|
+
deepset_mcp/api/pipeline_template/models.py,sha256=TFuKQYv05BZs3eYOff4KOzqY4U2BOcE6P_p_hi4XCcg,2345
|
|
42
43
|
deepset_mcp/api/pipeline_template/protocols.py,sha256=nyOAxgd1ZMrKfCYsOgjlo16xLvbUUWadWFhSuMa0S7k,644
|
|
43
44
|
deepset_mcp/api/pipeline_template/resource.py,sha256=h7Umn70zJ3Klt-GPqPkrBwXZ-R8hgHsQKkhZ4DAsm7w,3113
|
|
44
45
|
deepset_mcp/api/secrets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -95,20 +96,20 @@ deepset_mcp/prompts/deepset_copilot_prompt.md,sha256=QctQQ4yQ9zl-uWv48dfr1DGhEKP
|
|
|
95
96
|
deepset_mcp/prompts/deepset_debugging_agent.md,sha256=m5Y-n9cXQGm9BZ3wZ3N_hQmMjrXVfc1cqV8i8Kle5uU,9488
|
|
96
97
|
deepset_mcp/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
97
98
|
deepset_mcp/tools/custom_components.py,sha256=fl0HYEoX9a_yaDOJ8zrAeetnm43P9zkprlYaa_j16FI,1844
|
|
98
|
-
deepset_mcp/tools/doc_search.py,sha256=
|
|
99
|
+
deepset_mcp/tools/doc_search.py,sha256=WnfnY9-y_MYsUCMYSyi3suy0WvjLzP4RWEMgkvfcRnA,2736
|
|
99
100
|
deepset_mcp/tools/haystack_service.py,sha256=bR8lL4cWMjJjM6gxpwdUt5xCJ01YMUrRKiG7s2lO7Ts,15050
|
|
100
101
|
deepset_mcp/tools/haystack_service_models.py,sha256=eizThOPKfiUo9ayRAk-sbuSCoY_6gL0vEnv2MwVEGZY,2336
|
|
101
102
|
deepset_mcp/tools/indexes.py,sha256=PSC0SLzo97x86LRwQuivz22_fPGUrBA8t6a8rE1b8V4,5306
|
|
102
103
|
deepset_mcp/tools/model_protocol.py,sha256=NiEo8YG2RCFJdU8GApsmGCq3ZzrbAylP_BD6C_-UaNI,418
|
|
103
104
|
deepset_mcp/tools/pipeline.py,sha256=mcbNoQw2l6QMkmfRvMF4PUoQq4KXxIEwHjiGpj46aGw,14732
|
|
104
|
-
deepset_mcp/tools/pipeline_template.py,sha256=
|
|
105
|
-
deepset_mcp/tools/secrets.py,sha256=
|
|
105
|
+
deepset_mcp/tools/pipeline_template.py,sha256=gvrrUN-_hQ_DRbbwvN4Wtna5QWo8qKadPwqdRQXmBlU,5147
|
|
106
|
+
deepset_mcp/tools/secrets.py,sha256=2YdfGKAg9IJwW390WfnQutq8oyRQ9XlWnIJlc53Eovk,3800
|
|
106
107
|
deepset_mcp/tools/workspace.py,sha256=JAyUKI_BMGn9FnmoAqmnnRKsTXnSnPuZmuAB3JgzuMY,2729
|
|
107
108
|
deepset_mcp/tools/tokonomics/__init__.py,sha256=QFDYoHPEpfaJ9fr5kfNJ1QXykDFzfy91i-wuqJLqrN0,1958
|
|
108
109
|
deepset_mcp/tools/tokonomics/decorators.py,sha256=msJNho6p6ZnZ5CNjPLbt0RM0CLz8gUzzQCWYT30M10Q,14833
|
|
109
110
|
deepset_mcp/tools/tokonomics/explorer.py,sha256=sSmvXesfl5GdJGiWgCj5ktTOub097pBhhF26BqqT_ig,12880
|
|
110
111
|
deepset_mcp/tools/tokonomics/object_store.py,sha256=OtmVal_6hJPb5R-3mAGLmgAblgM05jdw3f1R7PvvdP0,6415
|
|
111
|
-
deepset_mcp-0.0.
|
|
112
|
-
deepset_mcp-0.0.
|
|
113
|
-
deepset_mcp-0.0.
|
|
114
|
-
deepset_mcp-0.0.
|
|
112
|
+
deepset_mcp-0.0.3.dist-info/METADATA,sha256=v6eb9GT2SuVU7-KBkuRP_Dd8U_UPcrazQXoVKdBLIjQ,9687
|
|
113
|
+
deepset_mcp-0.0.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
114
|
+
deepset_mcp-0.0.3.dist-info/entry_points.txt,sha256=gm-y9dhJVhzslA3nh8W3tofranX76WAw_ATxWbzAhoQ,101
|
|
115
|
+
deepset_mcp-0.0.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|