deepset-mcp 0.0.6__py3-none-any.whl → 0.0.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. deepset_mcp/__init__.py +3 -4
  2. deepset_mcp/api/__init__.py +3 -0
  3. deepset_mcp/api/client.py +126 -107
  4. deepset_mcp/api/custom_components/__init__.py +3 -0
  5. deepset_mcp/api/custom_components/models.py +7 -8
  6. deepset_mcp/api/custom_components/protocols.py +4 -3
  7. deepset_mcp/api/custom_components/resource.py +39 -13
  8. deepset_mcp/api/haystack_service/__init__.py +3 -0
  9. deepset_mcp/api/haystack_service/protocols.py +21 -0
  10. deepset_mcp/api/haystack_service/resource.py +46 -0
  11. deepset_mcp/api/indexes/__init__.py +3 -0
  12. deepset_mcp/api/indexes/models.py +23 -11
  13. deepset_mcp/api/indexes/protocols.py +13 -4
  14. deepset_mcp/api/indexes/resource.py +86 -22
  15. deepset_mcp/api/integrations/__init__.py +4 -0
  16. deepset_mcp/api/integrations/models.py +4 -13
  17. deepset_mcp/api/integrations/protocols.py +3 -3
  18. deepset_mcp/api/integrations/resource.py +5 -5
  19. deepset_mcp/api/pipeline/__init__.py +1 -15
  20. deepset_mcp/api/pipeline/models.py +66 -28
  21. deepset_mcp/api/pipeline/protocols.py +6 -10
  22. deepset_mcp/api/pipeline/resource.py +101 -58
  23. deepset_mcp/api/pipeline_template/__init__.py +3 -0
  24. deepset_mcp/api/pipeline_template/models.py +12 -23
  25. deepset_mcp/api/pipeline_template/protocols.py +11 -5
  26. deepset_mcp/api/pipeline_template/resource.py +51 -39
  27. deepset_mcp/api/protocols.py +13 -11
  28. deepset_mcp/api/secrets/__init__.py +3 -0
  29. deepset_mcp/api/secrets/models.py +2 -8
  30. deepset_mcp/api/secrets/protocols.py +4 -3
  31. deepset_mcp/api/secrets/resource.py +32 -7
  32. deepset_mcp/api/shared_models.py +111 -1
  33. deepset_mcp/api/transport.py +30 -58
  34. deepset_mcp/api/user/__init__.py +3 -0
  35. deepset_mcp/api/workspace/__init__.py +1 -3
  36. deepset_mcp/api/workspace/models.py +4 -8
  37. deepset_mcp/api/workspace/protocols.py +3 -3
  38. deepset_mcp/api/workspace/resource.py +5 -9
  39. deepset_mcp/config.py +1 -1
  40. deepset_mcp/main.py +5 -20
  41. deepset_mcp/mcp/__init__.py +10 -0
  42. deepset_mcp/{server.py → mcp/server.py} +8 -18
  43. deepset_mcp/{store.py → mcp/store.py} +3 -3
  44. deepset_mcp/{tool_factory.py → mcp/tool_factory.py} +20 -37
  45. deepset_mcp/mcp/tool_models.py +57 -0
  46. deepset_mcp/{tool_registry.py → mcp/tool_registry.py} +16 -6
  47. deepset_mcp/{tools/tokonomics → tokonomics}/__init__.py +3 -1
  48. deepset_mcp/{tools/tokonomics → tokonomics}/decorators.py +2 -2
  49. deepset_mcp/{tools/tokonomics → tokonomics}/explorer.py +1 -1
  50. deepset_mcp/tools/__init__.py +58 -0
  51. deepset_mcp/tools/custom_components.py +7 -4
  52. deepset_mcp/tools/haystack_service.py +64 -22
  53. deepset_mcp/tools/haystack_service_models.py +40 -0
  54. deepset_mcp/tools/indexes.py +131 -32
  55. deepset_mcp/tools/object_store.py +1 -1
  56. deepset_mcp/tools/pipeline.py +40 -10
  57. deepset_mcp/tools/pipeline_template.py +35 -18
  58. deepset_mcp/tools/secrets.py +29 -13
  59. deepset_mcp/tools/workspace.py +2 -2
  60. deepset_mcp-0.0.8.dist-info/METADATA +100 -0
  61. deepset_mcp-0.0.8.dist-info/RECORD +74 -0
  62. deepset_mcp/api/README.md +0 -536
  63. deepset_mcp/api/pipeline/log_level.py +0 -13
  64. deepset_mcp/tool_models.py +0 -42
  65. deepset_mcp-0.0.6.dist-info/METADATA +0 -807
  66. deepset_mcp-0.0.6.dist-info/RECORD +0 -75
  67. /deepset_mcp/{tools/tokonomics → tokonomics}/object_store.py +0 -0
  68. {deepset_mcp-0.0.6.dist-info → deepset_mcp-0.0.8.dist-info}/WHEEL +0 -0
  69. {deepset_mcp-0.0.6.dist-info → deepset_mcp-0.0.8.dist-info}/entry_points.txt +0 -0
  70. {deepset_mcp-0.0.6.dist-info → deepset_mcp-0.0.8.dist-info}/licenses/LICENSE +0 -0
@@ -2,20 +2,130 @@
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
5
- from pydantic import BaseModel, Field
5
+ from collections.abc import AsyncIterator, Awaitable, Callable, Coroutine
6
+ from typing import Any, Generic, TypeVar
7
+
8
+ from pydantic import BaseModel, Field, PrivateAttr, model_validator
6
9
 
7
10
 
8
11
  class NoContentResponse(BaseModel):
9
12
  """Response model for an empty response."""
10
13
 
11
14
  success: bool = True
15
+ "Indicates whether the operation was successful"
12
16
  message: str = "No content"
17
+ "Human-readable message describing the response"
13
18
 
14
19
 
15
20
  class DeepsetUser(BaseModel):
16
21
  """Model representing a user on the deepset platform."""
17
22
 
18
23
  id: str = Field(alias="user_id")
24
+ "Unique identifier for the user"
19
25
  given_name: str | None = None
26
+ "User's given (first) name"
20
27
  family_name: str | None = None
28
+ "User's family (last) name"
21
29
  email: str | None = None
30
+ "User's email address"
31
+
32
+
33
+ T = TypeVar("T")
34
+
35
+
36
+ class PaginatedResponse(BaseModel, Generic[T]):
37
+ """
38
+ A response model for a single page of cursor-paginated results.
39
+
40
+ This model also acts as an async iterator to fetch subsequent pages.
41
+ """
42
+
43
+ # --- Public Data Fields ---
44
+ data: list[T]
45
+ "List of items for the current page"
46
+ has_more: bool
47
+ "Whether there are more items available beyond this page"
48
+ total: int | None = None
49
+ "Total number of items across all pages, if known"
50
+ next_cursor: str | None = None
51
+ "Cursor for fetching the next page of results"
52
+
53
+ # --- Internal Paginator State (Defaults to None) ---
54
+ _fetch_func: Callable[..., Coroutine[Any, Any, "PaginatedResponse[T]"]] | None = PrivateAttr(default=None)
55
+ _base_args: dict[str, Any] | None = PrivateAttr(default=None)
56
+ _cursor_param: str = PrivateAttr(default="before")
57
+
58
+ @model_validator(mode="before")
59
+ @classmethod
60
+ def populate_cursors_from_data(cls, data: dict[str, Any]) -> dict[str, Any]:
61
+ """Populate next_cursor from the last element of data."""
62
+ if isinstance(data, dict) and isinstance(data.get("data"), list):
63
+ data_list = data["data"]
64
+
65
+ if data_list and data.get("has_more"):
66
+ last_item = data_list[-1]
67
+ if isinstance(last_item, dict):
68
+ # Use the cursor field if specified, raise if not provided
69
+ cursor_field = data.get("_cursor_field")
70
+ if not cursor_field:
71
+ raise ValueError("Cursor field must be specified when creating PaginatedResponse")
72
+ cursor = last_item.get(cursor_field)
73
+ data["next_cursor"] = cursor
74
+
75
+ return data
76
+
77
+ @classmethod
78
+ def create_with_cursor_field(cls, data: dict[str, Any], cursor_field: str) -> "PaginatedResponse[T]":
79
+ """Factory method that allows specifying the cursor field."""
80
+ # Inject the cursor field into the data before validation
81
+ data_copy = data.copy()
82
+ data_copy["_cursor_field"] = cursor_field
83
+ return cls.model_validate(data_copy)
84
+
85
+ def _inject_paginator(
86
+ self,
87
+ fetch_func: Callable[..., Awaitable["PaginatedResponse[T]"]],
88
+ base_args: dict[str, Any],
89
+ cursor_param: str = "before",
90
+ ) -> None:
91
+ """Injects the necessary components to make this object iterable."""
92
+ # Convert Awaitable to Coroutine for typing compatibility
93
+ if callable(fetch_func):
94
+ # This is a runtime check - mypy doesn't understand the callable compatibility
95
+ self._fetch_func = fetch_func # type: ignore
96
+ self._base_args = {k: v for k, v in base_args.items() if v is not None}
97
+ self._cursor_param = cursor_param
98
+
99
+ async def _get_next_page(self) -> "PaginatedResponse[T] | None":
100
+ """Fetches the next page of results using the stored fetch function."""
101
+ if self._fetch_func is None or self._base_args is None:
102
+ raise TypeError(
103
+ "Paginator has not been initialized. Please use the resource's list() method to create this object."
104
+ )
105
+
106
+ if not self.has_more or not self.next_cursor:
107
+ return None
108
+
109
+ args = self._base_args.copy()
110
+ # TODO: Pagination in the deepset API is currently implemented in an unintuitive way.
111
+ # TODO: The cursor is always time based (created_at) and after signifies pipelines older than the current cursor
112
+ # TODO: while 'before' signals pipelines younger than the current cursor.
113
+ # TODO: This is applied irrespective of any sort (e.g. name) that would conflict with this approach.
114
+ # TODO: Change this to 'after' once the behaviour is fixed on the deepset API
115
+ args[self._cursor_param] = self.next_cursor
116
+
117
+ next_page = await self._fetch_func(**args)
118
+ next_page._inject_paginator(self._fetch_func, self._base_args, self._cursor_param)
119
+ return next_page
120
+
121
+ async def items(self) -> AsyncIterator[T]:
122
+ """Asynchronously iterates over each item across all pages, starting from this page."""
123
+ current_page: PaginatedResponse[T] | None = self
124
+ while current_page:
125
+ for item in current_page.data:
126
+ yield item
127
+ current_page = await current_page._get_next_page()
128
+
129
+ def __aiter__(self) -> AsyncIterator[T]:
130
+ """Make the object itself iterable for the most pythonic experience."""
131
+ return self.items()
@@ -187,17 +187,11 @@ class AsyncTransport:
187
187
  api_key: str,
188
188
  config: dict[str, Any] | None = None,
189
189
  ):
190
- """
191
- Initialize an instance of AsyncTransport.
192
-
193
- Parameters
194
- ----------
195
- base_url : str
196
- Base URL for the API
197
- api_key : str
198
- Bearer token for authentication
199
- config : dict, optional
200
- Configuration for httpx.AsyncClient, e.g., {'timeout': 10.0}
190
+ """Initialize an instance of AsyncTransport.
191
+
192
+ :param base_url: Base URL for the API
193
+ :param api_key: Bearer token for authentication
194
+ :param config: Configuration for httpx.AsyncClient, e.g., {'timeout': 10.0}
201
195
  """
202
196
  # We deepcopy the config so that we don't mutate it when used for subsequent initializations
203
197
  config = deepcopy(config) or {}
@@ -244,27 +238,15 @@ class AsyncTransport:
244
238
  timeout: float | None | Literal["config"] = "config",
245
239
  **kwargs: Any,
246
240
  ) -> TransportResponse[Any]:
247
- """
248
- Send a regular HTTP request and return the response.
249
-
250
- Parameters
251
- ----------
252
- method : str
253
- HTTP method
254
- url : str
255
- URL endpoint
256
- response_type : type[T], optional
257
- Expected response type for type checking
258
- timeout : float | None | Literal["config"], optional
259
- Request timeout in seconds. If "config", uses transport config timeout.
260
- If None, disables timeout. If float, uses specific timeout.
261
- **kwargs : Any
262
- Additional arguments to pass to httpx
263
-
264
- Returns
265
- -------
266
- TransportResponse[T]
267
- The response with parsed JSON if available
241
+ """Send a regular HTTP request and return the response.
242
+
243
+ :param method: HTTP method
244
+ :param url: URL endpoint
245
+ :param response_type: Expected response type for type checking
246
+ :param timeout: Request timeout in seconds. If "config", uses transport config timeout.
247
+ If None, disables timeout. If float, uses specific timeout.
248
+ :param kwargs: Additional arguments to pass to httpx
249
+ :returns: The response with parsed JSON if available
268
250
  """
269
251
  if timeout != "config":
270
252
  kwargs["timeout"] = timeout
@@ -300,32 +282,22 @@ class AsyncTransport:
300
282
  return TransportResponse(text=response.text, status_code=response.status_code, json=untyped_response)
301
283
 
302
284
  def stream(self, method: str, url: str, **kwargs: Any) -> AbstractAsyncContextManager[StreamingResponse]:
303
- """
304
- Open a streaming HTTP connection.
305
-
306
- Parameters
307
- ----------
308
- method : str
309
- HTTP method
310
- url : str
311
- URL endpoint
312
- **kwargs : Any
313
- Additional arguments to pass to httpx.stream()
314
-
315
- Yields
316
- ------
317
- StreamingResponse
318
- Response object with streaming capabilities
319
-
320
- Examples
321
- --------
322
- async with transport.stream("POST", "/api/stream", json=data) as response:
323
- if response.success:
324
- async for line in response.iter_lines():
325
- process_line(line)
326
- else:
327
- error = await response.read_body()
328
- handle_error(error)
285
+ """Open a streaming HTTP connection.
286
+
287
+ :param method: HTTP method
288
+ :param url: URL endpoint
289
+ :param kwargs: Additional arguments to pass to httpx.stream()
290
+ :yields: Response object with streaming capabilities
291
+
292
+ .. code-block:: python
293
+
294
+ async with transport.stream("POST", "/api/stream", json=data) as response:
295
+ if response.success:
296
+ async for line in response.iter_lines():
297
+ process_line(line)
298
+ else:
299
+ error = await response.read_body()
300
+ handle_error(error)
329
301
  """
330
302
 
331
303
  @asynccontextmanager
@@ -2,3 +2,6 @@
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
5
+ from .resource import UserResource
6
+
7
+ __all__ = ["UserResource"]
@@ -4,8 +4,6 @@
4
4
 
5
5
  """Workspace API module."""
6
6
 
7
- from .models import Workspace, WorkspaceList
8
- from .protocols import WorkspaceResourceProtocol
9
7
  from .resource import WorkspaceResource
10
8
 
11
- __all__ = ["Workspace", "WorkspaceList", "WorkspaceResourceProtocol", "WorkspaceResource"]
9
+ __all__ = ["WorkspaceResource"]
@@ -14,14 +14,10 @@ class Workspace(BaseModel):
14
14
  """Model representing a workspace on the deepset platform."""
15
15
 
16
16
  name: str
17
+ "Human-readable name of the workspace"
17
18
  workspace_id: UUID
19
+ "Unique identifier for the workspace"
18
20
  languages: dict[str, Any]
21
+ "Supported languages and their configuration settings"
19
22
  default_idle_timeout_in_seconds: int
20
-
21
-
22
- class WorkspaceList(BaseModel):
23
- """Model representing a list of workspaces."""
24
-
25
- data: list[Workspace]
26
- has_more: bool = False
27
- total: int
23
+ "Default timeout in seconds before workspace becomes idle"
@@ -7,16 +7,16 @@
7
7
  from typing import Protocol
8
8
 
9
9
  from deepset_mcp.api.shared_models import NoContentResponse
10
- from deepset_mcp.api.workspace.models import Workspace, WorkspaceList
10
+ from deepset_mcp.api.workspace.models import Workspace
11
11
 
12
12
 
13
13
  class WorkspaceResourceProtocol(Protocol):
14
14
  """Protocol defining the interface for workspace resources."""
15
15
 
16
- async def list(self) -> WorkspaceList:
16
+ async def list(self) -> list[Workspace]:
17
17
  """List all workspaces.
18
18
 
19
- :returns: A WorkspaceList containing all workspaces.
19
+ :returns: A list containing all workspaces.
20
20
  """
21
21
  ...
22
22
 
@@ -9,7 +9,7 @@ from typing import TYPE_CHECKING
9
9
 
10
10
  from deepset_mcp.api.shared_models import NoContentResponse
11
11
  from deepset_mcp.api.transport import raise_for_status
12
- from deepset_mcp.api.workspace.models import Workspace, WorkspaceList
12
+ from deepset_mcp.api.workspace.models import Workspace
13
13
  from deepset_mcp.api.workspace.protocols import WorkspaceResourceProtocol
14
14
 
15
15
  logger = logging.getLogger(__name__)
@@ -28,10 +28,10 @@ class WorkspaceResource(WorkspaceResourceProtocol):
28
28
  """
29
29
  self._client = client
30
30
 
31
- async def list(self) -> WorkspaceList:
31
+ async def list(self) -> list[Workspace]:
32
32
  """List all workspaces.
33
33
 
34
- :returns: A WorkspaceList containing all workspaces.
34
+ :returns: A list containing all workspaces.
35
35
  """
36
36
  resp = await self._client.request(
37
37
  endpoint="v1/workspaces",
@@ -42,13 +42,9 @@ class WorkspaceResource(WorkspaceResourceProtocol):
42
42
 
43
43
  if resp.json is not None and isinstance(resp.json, list):
44
44
  workspaces = [Workspace.model_validate(item) for item in resp.json]
45
- return WorkspaceList(
46
- data=workspaces,
47
- has_more=False,
48
- total=len(workspaces),
49
- )
45
+ return workspaces
50
46
  else:
51
- return WorkspaceList(data=[], has_more=False, total=0)
47
+ return []
52
48
 
53
49
  async def get(self, workspace_name: str) -> Workspace:
54
50
  """Get a specific workspace by name.
deepset_mcp/config.py CHANGED
@@ -35,7 +35,7 @@ TOKEN_DOMAIN_MAPPING = {
35
35
  "together.ai": ["TOGETHERAI_API_KEY"],
36
36
  }
37
37
 
38
- DEEPSET_DOCS_DEFAULT_SHARE_URL = "https://cloud.deepset.ai/shared_prototypes?share_token=prototype_eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3ODM0MjE0OTguNTk5LCJhdWQiOiJleHRlcm5hbCB1c2VyIiwiaXNzIjoiZEMiLCJ3b3Jrc3BhY2VfaWQiOiI4YzI0ZjExMi1iMjljLTQ5MWMtOTkzOS1hZTkxMDRhNTQyMWMiLCJ3b3Jrc3BhY2VfbmFtZSI6ImRjLWRvY3MtY29udGVudCIsIm9yZ2FuaXphdGlvbl9pZCI6ImNhOWYxNGQ0LWMyYzktNDYwZC04ZDI2LWY4Y2IwYWNhMDI0ZiIsInNoYXJlX2lkIjoiY2Y3MTA3ODAtOThmNi00MzlmLThiNzYtMmMwNDkyODNiMDZhIiwibG9naW5fcmVxdWlyZWQiOmZhbHNlfQ.5j6DCNRQ1_KB8lhIJqHyw2hBIleEW1_Y_UBuH6MTYY0"
38
+ DEEPSET_DOCS_DEFAULT_SHARE_URL = "https://cloud.deepset.ai/shared_prototypes?share_token=prototype_eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3ODg1MjA1NjYuMzY2LCJhdWQiOiJleHRlcm5hbCB1c2VyIiwiaXNzIjoiZEMiLCJ3b3Jrc3BhY2VfaWQiOiI4YzI0ZjExMi1iMjljLTQ5MWMtOTkzOS1hZTkxMDRhNTQyMWMiLCJ3b3Jrc3BhY2VfbmFtZSI6ImRjLWRvY3MtY29udGVudCIsIm9yZ2FuaXphdGlvbl9pZCI6ImNhOWYxNGQ0LWMyYzktNDYwZC04ZDI2LWY4Y2IwYWNhMDI0ZiIsInNoYXJlX2lkIjoiNTMwYWE2ODQtMTM0NC00M2MyLWJlZjQtMjA5MWNmMWFjYWJmIiwibG9naW5fcmVxdWlyZWQiOmZhbHNlfQ.SyKIoKI-Gl6ajRDgSECOuLkgEIjCIobDvveT0rVJUnM"
39
39
  DOCS_SEARCH_TOOL_NAME = "search_docs"
40
40
 
41
41
  DEFAULT_CLIENT_HEADER = {"headers": {"User-Agent": f"deepset-mcp/{__version__}"}}
deepset_mcp/main.py CHANGED
@@ -11,9 +11,8 @@ import typer
11
11
  from mcp.server.fastmcp import FastMCP
12
12
 
13
13
  from deepset_mcp.config import DEEPSET_DOCS_DEFAULT_SHARE_URL, DOCS_SEARCH_TOOL_NAME
14
- from deepset_mcp.server import configure_mcp_server
15
- from deepset_mcp.tool_models import WorkspaceMode
16
- from deepset_mcp.tool_registry import TOOL_REGISTRY
14
+ from deepset_mcp.mcp.server import configure_mcp_server
15
+ from deepset_mcp.mcp.tool_registry import TOOL_REGISTRY
17
16
 
18
17
 
19
18
  class TransportEnum(StrEnum):
@@ -60,13 +59,6 @@ def main(
60
59
  help="Deepset docs search share URL. Can also be set via DEEPSET_DOCS_SHARE_URL environment variable.",
61
60
  ),
62
61
  ] = None,
63
- workspace_mode: Annotated[
64
- WorkspaceMode,
65
- typer.Option(
66
- "--workspace-mode",
67
- help="Whether workspace should be set statically or dynamically provided during a tool call.",
68
- ),
69
- ] = WorkspaceMode.STATIC,
70
62
  tools: Annotated[
71
63
  list[str] | None,
72
64
  typer.Option(
@@ -126,11 +118,10 @@ def main(
126
118
  The Deepset MCP server provides tools to interact with the deepset AI platform,
127
119
  allowing you to create, debug, and learn about pipelines on the platform.
128
120
 
129
- :param workspace: Deepset workspace name
121
+ :param workspace: Deepset workspace name. Pass if you only want to run the tools on a specific workspace.
130
122
  :param api_key: Deepset API key for authentication
131
123
  :param api_url: Deepset API base URL
132
124
  :param docs_share_url: Deepset docs search share URL
133
- :param workspace_mode: Whether workspace should be set statically or dynamically
134
125
  :param tools: List of tools to register
135
126
  :param list_tools: List all available tools and exit
136
127
  :param api_key_from_auth_header: Get API key from authorization header
@@ -171,13 +162,8 @@ def main(
171
162
  )
172
163
  raise typer.Exit(1)
173
164
 
174
- if workspace_mode == WorkspaceMode.STATIC and not workspace:
175
- typer.echo(
176
- "Error: Workspace is required when using static workspace mode. "
177
- "Set --workspace or DEEPSET_WORKSPACE environment variable.",
178
- err=True,
179
- )
180
- raise typer.Exit(1)
165
+ if not workspace:
166
+ logging.info("No workspace specified. Workspace needs to be provided during tool calling.")
181
167
 
182
168
  if DOCS_SEARCH_TOOL_NAME in tool_names and docs_share_url is None:
183
169
  typer.echo(
@@ -190,7 +176,6 @@ def main(
190
176
  mcp = FastMCP("deepset AI platform MCP server")
191
177
  configure_mcp_server(
192
178
  mcp_server_instance=mcp,
193
- workspace_mode=workspace_mode,
194
179
  deepset_api_key=api_key,
195
180
  deepset_api_url=api_url,
196
181
  deepset_workspace=workspace,
@@ -0,0 +1,10 @@
1
+ # SPDX-FileCopyrightText: 2025-present deepset GmbH <info@deepset.ai>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+ from .server import configure_mcp_server
6
+ from .store import initialize_or_get_initialized_store
7
+ from .tool_factory import build_tool
8
+ from .tool_models import ToolConfig
9
+
10
+ __all__ = ["configure_mcp_server", "build_tool", "ToolConfig", "initialize_or_get_initialized_store"]
@@ -10,15 +10,14 @@ from mcp.server.fastmcp import FastMCP
10
10
 
11
11
  from deepset_mcp.api.client import AsyncDeepsetClient
12
12
  from deepset_mcp.config import DEEPSET_DOCS_DEFAULT_SHARE_URL
13
- from deepset_mcp.store import initialize_store
14
- from deepset_mcp.tool_factory import register_tools
15
- from deepset_mcp.tool_models import DeepsetDocsConfig, WorkspaceMode
16
- from deepset_mcp.tool_registry import TOOL_REGISTRY
13
+ from deepset_mcp.mcp.store import initialize_or_get_initialized_store
14
+ from deepset_mcp.mcp.tool_factory import register_tools
15
+ from deepset_mcp.mcp.tool_models import DeepsetDocsConfig
16
+ from deepset_mcp.mcp.tool_registry import TOOL_REGISTRY
17
17
 
18
18
 
19
19
  def configure_mcp_server(
20
20
  mcp_server_instance: FastMCP,
21
- workspace_mode: WorkspaceMode | str,
22
21
  tools_to_register: set[str] | None = None,
23
22
  deepset_api_key: str | None = None,
24
23
  deepset_api_url: str | None = None,
@@ -32,12 +31,11 @@ def configure_mcp_server(
32
31
  """Configure the MCP server with the specified tools and settings.
33
32
 
34
33
  :param mcp_server_instance: The FastMCP server instance to configure
35
- :param workspace_mode: The workspace mode ("static" or "dynamic")
36
34
  :param tools_to_register: Set of tool names to register with the server.
37
35
  Will register all tools if set to None.
38
36
  :param deepset_api_key: Optional Deepset API key for authentication
39
37
  :param deepset_api_url: Optional Deepset API base URL
40
- :param deepset_workspace: Optional workspace name for static mode
38
+ :param deepset_workspace: Pass a deepset workspace name if you only want to run the tools on a specific workspace.
41
39
  :param deepset_docs_shareable_prototype_url: Shareable prototype URL that allows access to a docs search pipeline.
42
40
  Will fall back to the default shareable prototype URL if set to None.
43
41
  :param get_api_key_from_authorization_header: Whether to extract API key from authorization header
@@ -52,15 +50,6 @@ def configure_mcp_server(
52
50
  if deepset_docs_shareable_prototype_url is None:
53
51
  deepset_docs_shareable_prototype_url = DEEPSET_DOCS_DEFAULT_SHARE_URL
54
52
 
55
- if isinstance(workspace_mode, str):
56
- workspace_mode = WorkspaceMode(workspace_mode)
57
-
58
- if workspace_mode == WorkspaceMode.STATIC and deepset_workspace is None:
59
- raise ValueError(
60
- "Static workspace mode requires a workspace name. "
61
- "Please provide 'deepset_workspace' when using WorkspaceMode.STATIC."
62
- )
63
-
64
53
  if deepset_api_key is None and not get_api_key_from_authorization_header:
65
54
  raise ValueError(
66
55
  "API key is required for authentication. "
@@ -73,11 +62,12 @@ def configure_mcp_server(
73
62
  docs_config = DeepsetDocsConfig(api_key=api_key_docs, workspace_name=workspace_name, pipeline_name=pipeline_name)
74
63
 
75
64
  # Initialize the store before registering tools
76
- store = initialize_store(backend=object_store_backend, redis_url=object_store_redis_url, ttl=object_store_ttl)
65
+ store = initialize_or_get_initialized_store(
66
+ backend=object_store_backend, redis_url=object_store_redis_url, ttl=object_store_ttl
67
+ )
77
68
 
78
69
  register_tools(
79
70
  mcp_server_instance=mcp_server_instance,
80
- workspace_mode=workspace_mode,
81
71
  workspace=deepset_workspace,
82
72
  tool_names=tools_to_register,
83
73
  docs_config=docs_config,
@@ -7,7 +7,7 @@
7
7
  import functools
8
8
  import logging
9
9
 
10
- from deepset_mcp.tools.tokonomics.object_store import InMemoryBackend, ObjectStore, ObjectStoreBackend, RedisBackend
10
+ from deepset_mcp.tokonomics import InMemoryBackend, ObjectStore, ObjectStoreBackend, RedisBackend
11
11
 
12
12
  logger = logging.getLogger(__name__)
13
13
 
@@ -30,12 +30,12 @@ def create_redis_backend(url: str) -> ObjectStoreBackend:
30
30
 
31
31
 
32
32
  @functools.lru_cache(maxsize=1)
33
- def initialize_store(
33
+ def initialize_or_get_initialized_store(
34
34
  backend: str = "memory",
35
35
  redis_url: str | None = None,
36
36
  ttl: int = 600,
37
37
  ) -> ObjectStore:
38
- """Initialize the object store.
38
+ """Initializes the object store or gets an existing object store instance if it was initialized before.
39
39
 
40
40
  :param backend: Backend type ('memory' or 'redis')
41
41
  :param redis_url: Redis connection URL (required if backend='redis')
@@ -14,9 +14,9 @@ from mcp.server.fastmcp import Context, FastMCP
14
14
 
15
15
  from deepset_mcp.api.client import AsyncDeepsetClient
16
16
  from deepset_mcp.config import DEFAULT_CLIENT_HEADER, DOCS_SEARCH_TOOL_NAME
17
- from deepset_mcp.tool_models import DeepsetDocsConfig, MemoryType, ToolConfig, WorkspaceMode
18
- from deepset_mcp.tool_registry import TOOL_REGISTRY
19
- from deepset_mcp.tools.tokonomics import (
17
+ from deepset_mcp.mcp.tool_models import DeepsetDocsConfig, MemoryType, ToolConfig
18
+ from deepset_mcp.mcp.tool_registry import TOOL_REGISTRY
19
+ from deepset_mcp.tokonomics import (
20
20
  ObjectStore,
21
21
  RichExplorer,
22
22
  explorable,
@@ -78,41 +78,35 @@ def remove_params_from_docstring(docstring: str | None, params_to_remove: set[st
78
78
 
79
79
 
80
80
  def apply_workspace(
81
- base_func: Callable[..., Any], config: ToolConfig, workspace_mode: WorkspaceMode, workspace: str | None = None
81
+ base_func: Callable[..., Any], config: ToolConfig, workspace: str | None = None
82
82
  ) -> Callable[..., Any]:
83
83
  """
84
- Applies a deepset workspace to the function depending on the workspace mode and the ToolConfig.
84
+ Applies a deepset workspace to the function depending on the ToolConfig.
85
85
 
86
86
  Removes the workspace argument from the function's signature and docstring if applied.
87
87
 
88
88
  :param base_func: The function to apply workspace to.
89
89
  :param config: The ToolConfig for the function.
90
- :param workspace_mode: The WorkspaceMode for the function.
91
- :param workspace: The workspace to use for static mode.
90
+ :param workspace: The workspace to use.
92
91
  :returns: Function with workspace handling applied and updated signature/docstring.
93
92
  :raises ValueError: If workspace is required but not available.
94
93
  """
95
- if not config.needs_workspace:
94
+ if not config.needs_workspace or not workspace:
96
95
  return base_func
97
96
 
98
- if workspace_mode == WorkspaceMode.STATIC:
99
-
100
- @functools.wraps(base_func)
101
- async def workspace_wrapper(*args: Any, **kwargs: Any) -> Any:
102
- return await base_func(*args, workspace=workspace, **kwargs)
97
+ @functools.wraps(base_func)
98
+ async def workspace_wrapper(*args: Any, **kwargs: Any) -> Any:
99
+ return await base_func(*args, workspace=workspace, **kwargs)
103
100
 
104
- # Remove workspace from signature
105
- original_sig = inspect.signature(base_func)
106
- new_params = [p for name, p in original_sig.parameters.items() if name != "workspace"]
107
- workspace_wrapper.__signature__ = original_sig.replace(parameters=new_params) # type: ignore
101
+ # Remove workspace from signature
102
+ original_sig = inspect.signature(base_func)
103
+ new_params = [p for name, p in original_sig.parameters.items() if name != "workspace"]
104
+ workspace_wrapper.__signature__ = original_sig.replace(parameters=new_params) # type: ignore
108
105
 
109
- # Remove workspace from docstring
110
- workspace_wrapper.__doc__ = remove_params_from_docstring(base_func.__doc__, {"workspace"})
106
+ # Remove workspace from docstring
107
+ workspace_wrapper.__doc__ = remove_params_from_docstring(base_func.__doc__, {"workspace"})
111
108
 
112
- return workspace_wrapper
113
- else:
114
- # For dynamic mode, workspace is passed as parameter
115
- return base_func
109
+ return workspace_wrapper
116
110
 
117
111
 
118
112
  def apply_memory(
@@ -139,7 +133,7 @@ def apply_memory(
139
133
  return explorable(object_store=store, explorer=explorer)(base_func)
140
134
  elif config.memory_type == MemoryType.REFERENCEABLE:
141
135
  return referenceable(object_store=store, explorer=explorer)(base_func)
142
- elif config.memory_type == MemoryType.BOTH:
136
+ elif config.memory_type == MemoryType.EXPLORABLE_AND_REFERENCEABLE:
143
137
  return explorable_and_referenceable(object_store=store, explorer=explorer)(base_func)
144
138
  else:
145
139
  raise ValueError(f"Invalid memory type: {config.memory_type}")
@@ -226,7 +220,6 @@ def apply_client(
226
220
  def build_tool(
227
221
  base_func: Callable[..., Any],
228
222
  config: ToolConfig,
229
- workspace_mode: WorkspaceMode,
230
223
  api_key: str | None = None,
231
224
  workspace: str | None = None,
232
225
  use_request_context: bool = True,
@@ -240,7 +233,6 @@ def build_tool(
240
233
 
241
234
  :param base_func: The base tool function.
242
235
  :param config: Tool configuration specifying dependencies and custom arguments.
243
- :param workspace_mode: How the workspace should be handled.
244
236
  :param api_key: The deepset API key to use.
245
237
  :param workspace: The workspace to use when using a static workspace.
246
238
  :param use_request_context: Whether to collect the API key from the request context.
@@ -257,7 +249,7 @@ def build_tool(
257
249
  enhanced_func = apply_memory(enhanced_func, config, object_store)
258
250
 
259
251
  # Apply workspace handling
260
- enhanced_func = apply_workspace(enhanced_func, config, workspace_mode, workspace)
252
+ enhanced_func = apply_workspace(base_func=enhanced_func, config=config, workspace=workspace)
261
253
 
262
254
  # Apply client injection (adds ctx parameter if needed)
263
255
  enhanced_func = apply_client(
@@ -282,7 +274,6 @@ def build_tool(
282
274
 
283
275
  def register_tools(
284
276
  mcp_server_instance: FastMCP,
285
- workspace_mode: WorkspaceMode,
286
277
  api_key: str | None = None,
287
278
  workspace: str | None = None,
288
279
  tool_names: set[str] | None = None,
@@ -295,9 +286,8 @@ def register_tools(
295
286
 
296
287
  Args:
297
288
  mcp_server_instance: FastMCP server instance
298
- workspace_mode: How workspace should be handled
299
289
  api_key: An api key for the deepset AI platform; only needs to be provided when not read from request context.
300
- workspace: Workspace to use; only needs to be provided if using a static workspace.
290
+ workspace: Pass a deepset workspace name if you only want to run the tools on a specific workspace.
301
291
  tool_names: Set of tool names to register (if None, registers all tools)
302
292
  get_api_key_from_authorization_header: Whether to use request context to retrieve an API key for tool execution.
303
293
  docs_config: Configuration for the deepset documentation search tool.
@@ -310,12 +300,6 @@ def register_tools(
310
300
  "Either pass 'api_key' or 'use_request_context'."
311
301
  )
312
302
 
313
- if workspace_mode == WorkspaceMode.STATIC and workspace is None:
314
- raise ValueError(
315
- "'workspace_mode' set to 'static' but no workspace provided. "
316
- "You need to set a deepset workspace name as 'workspace'."
317
- )
318
-
319
303
  if docs_config is None and tool_names is None:
320
304
  raise ValueError(
321
305
  f"'docs_config' cannot be None when requesting to register all tools. "
@@ -360,7 +344,6 @@ def register_tools(
360
344
  enhanced_tool = build_tool(
361
345
  base_func=base_func,
362
346
  config=config,
363
- workspace_mode=workspace_mode,
364
347
  workspace=workspace,
365
348
  use_request_context=get_api_key_from_authorization_header,
366
349
  base_url=base_url,