deepset-mcp 0.0.5rc1__py3-none-any.whl → 0.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deepset_mcp/__init__.py +3 -4
- deepset_mcp/api/__init__.py +3 -0
- deepset_mcp/api/client.py +126 -107
- deepset_mcp/api/custom_components/__init__.py +3 -0
- deepset_mcp/api/custom_components/models.py +7 -8
- deepset_mcp/api/custom_components/protocols.py +4 -3
- deepset_mcp/api/custom_components/resource.py +39 -13
- deepset_mcp/api/haystack_service/__init__.py +3 -0
- deepset_mcp/api/haystack_service/protocols.py +21 -0
- deepset_mcp/api/haystack_service/resource.py +46 -0
- deepset_mcp/api/indexes/__init__.py +3 -0
- deepset_mcp/api/indexes/models.py +23 -11
- deepset_mcp/api/indexes/protocols.py +13 -4
- deepset_mcp/api/indexes/resource.py +86 -22
- deepset_mcp/api/integrations/__init__.py +4 -0
- deepset_mcp/api/integrations/models.py +4 -13
- deepset_mcp/api/integrations/protocols.py +3 -3
- deepset_mcp/api/integrations/resource.py +5 -5
- deepset_mcp/api/pipeline/__init__.py +1 -15
- deepset_mcp/api/pipeline/models.py +66 -28
- deepset_mcp/api/pipeline/protocols.py +6 -10
- deepset_mcp/api/pipeline/resource.py +101 -58
- deepset_mcp/api/pipeline_template/__init__.py +3 -0
- deepset_mcp/api/pipeline_template/models.py +12 -23
- deepset_mcp/api/pipeline_template/protocols.py +11 -5
- deepset_mcp/api/pipeline_template/resource.py +51 -39
- deepset_mcp/api/protocols.py +13 -11
- deepset_mcp/api/secrets/__init__.py +3 -0
- deepset_mcp/api/secrets/models.py +2 -8
- deepset_mcp/api/secrets/protocols.py +4 -3
- deepset_mcp/api/secrets/resource.py +32 -7
- deepset_mcp/api/shared_models.py +111 -1
- deepset_mcp/api/transport.py +30 -58
- deepset_mcp/api/user/__init__.py +3 -0
- deepset_mcp/api/workspace/__init__.py +1 -3
- deepset_mcp/api/workspace/models.py +4 -8
- deepset_mcp/api/workspace/protocols.py +3 -3
- deepset_mcp/api/workspace/resource.py +5 -9
- deepset_mcp/main.py +5 -20
- deepset_mcp/mcp/__init__.py +10 -0
- deepset_mcp/{server.py → mcp/server.py} +8 -18
- deepset_mcp/{store.py → mcp/store.py} +3 -3
- deepset_mcp/{tool_factory.py → mcp/tool_factory.py} +21 -38
- deepset_mcp/mcp/tool_models.py +57 -0
- deepset_mcp/{tool_registry.py → mcp/tool_registry.py} +16 -6
- deepset_mcp/{tools/tokonomics → tokonomics}/__init__.py +3 -1
- deepset_mcp/{tools/tokonomics → tokonomics}/decorators.py +2 -2
- deepset_mcp/{tools/tokonomics → tokonomics}/explorer.py +1 -1
- deepset_mcp/tools/__init__.py +58 -0
- deepset_mcp/tools/custom_components.py +7 -4
- deepset_mcp/tools/haystack_service.py +64 -22
- deepset_mcp/tools/haystack_service_models.py +40 -0
- deepset_mcp/tools/indexes.py +131 -32
- deepset_mcp/tools/object_store.py +1 -1
- deepset_mcp/tools/pipeline.py +40 -10
- deepset_mcp/tools/pipeline_template.py +35 -18
- deepset_mcp/tools/secrets.py +29 -13
- deepset_mcp/tools/workspace.py +2 -2
- deepset_mcp-0.0.7.dist-info/METADATA +100 -0
- deepset_mcp-0.0.7.dist-info/RECORD +74 -0
- deepset_mcp/api/README.md +0 -536
- deepset_mcp/api/pipeline/log_level.py +0 -13
- deepset_mcp/tool_models.py +0 -42
- deepset_mcp-0.0.5rc1.dist-info/METADATA +0 -807
- deepset_mcp-0.0.5rc1.dist-info/RECORD +0 -75
- /deepset_mcp/{tools/tokonomics → tokonomics}/object_store.py +0 -0
- {deepset_mcp-0.0.5rc1.dist-info → deepset_mcp-0.0.7.dist-info}/WHEEL +0 -0
- {deepset_mcp-0.0.5rc1.dist-info → deepset_mcp-0.0.7.dist-info}/entry_points.txt +0 -0
- {deepset_mcp-0.0.5rc1.dist-info → deepset_mcp-0.0.7.dist-info}/licenses/LICENSE +0 -0
|
@@ -2,11 +2,15 @@
|
|
|
2
2
|
#
|
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
|
4
4
|
|
|
5
|
+
from typing import Any
|
|
6
|
+
from urllib.parse import quote
|
|
7
|
+
|
|
5
8
|
from deepset_mcp.api.exceptions import UnexpectedAPIError
|
|
6
|
-
from deepset_mcp.api.indexes.models import Index
|
|
9
|
+
from deepset_mcp.api.indexes.models import Index
|
|
7
10
|
from deepset_mcp.api.indexes.protocols import IndexResourceProtocol
|
|
8
11
|
from deepset_mcp.api.pipeline.models import PipelineValidationResult, ValidationError
|
|
9
12
|
from deepset_mcp.api.protocols import AsyncClientProtocol
|
|
13
|
+
from deepset_mcp.api.shared_models import PaginatedResponse
|
|
10
14
|
from deepset_mcp.api.transport import raise_for_status
|
|
11
15
|
|
|
12
16
|
|
|
@@ -22,24 +26,45 @@ class IndexResource(IndexResourceProtocol):
|
|
|
22
26
|
self._client = client
|
|
23
27
|
self._workspace = workspace
|
|
24
28
|
|
|
25
|
-
async def list(self, limit: int = 10,
|
|
26
|
-
"""
|
|
29
|
+
async def list(self, limit: int = 10, after: str | None = None) -> PaginatedResponse[Index]:
|
|
30
|
+
"""Lists indexes and returns the first page of results.
|
|
27
31
|
|
|
28
|
-
|
|
29
|
-
:param page_number: Page number for pagination.
|
|
32
|
+
The returned object can be iterated over to fetch subsequent pages.
|
|
30
33
|
|
|
31
|
-
:
|
|
34
|
+
:param limit: The maximum number of indexes to return per page.
|
|
35
|
+
:param after: The cursor to fetch the next page of results.
|
|
36
|
+
:returns: A `PaginatedResponse` object containing the first page of indexes.
|
|
32
37
|
"""
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
38
|
+
# 1. Prepare arguments for the initial API call
|
|
39
|
+
# TODO: Pagination in the deepset API is currently implemented in an unintuitive way.
|
|
40
|
+
# TODO: The cursor is always time based (created_at) and after signifies indexes older than the current cursor
|
|
41
|
+
# TODO: while 'before' signals indexes younger than the current cursor.
|
|
42
|
+
# TODO: This is applied irrespective of any sort (e.g. name) that would conflict with this approach.
|
|
43
|
+
# TODO: Change this to 'after' once the behaviour is fixed on the deepset API
|
|
44
|
+
request_params = {"limit": limit, "before": after}
|
|
45
|
+
request_params = {k: v for k, v in request_params.items() if v is not None}
|
|
46
|
+
|
|
47
|
+
# 2. Make the first API call using a private, stateless method
|
|
48
|
+
page = await self._list_api_call(**request_params)
|
|
49
|
+
|
|
50
|
+
# 3. Inject the logic needed for subsequent fetches into the response object
|
|
51
|
+
page._inject_paginator(
|
|
52
|
+
fetch_func=self._list_api_call,
|
|
53
|
+
# Base args for the *next* fetch don't include initial cursors
|
|
54
|
+
base_args={"limit": limit},
|
|
55
|
+
)
|
|
56
|
+
return page
|
|
39
57
|
|
|
40
|
-
|
|
58
|
+
async def _list_api_call(self, **kwargs: Any) -> PaginatedResponse[Index]:
|
|
59
|
+
"""A private, stateless method that performs the raw API call."""
|
|
60
|
+
resp = await self._client.request(
|
|
61
|
+
endpoint=f"v1/workspaces/{quote(self._workspace, safe='')}/indexes", method="GET", params=kwargs
|
|
62
|
+
)
|
|
63
|
+
raise_for_status(resp)
|
|
64
|
+
if resp.json is None:
|
|
65
|
+
raise UnexpectedAPIError(status_code=resp.status_code, message="Empty response", detail=None)
|
|
41
66
|
|
|
42
|
-
return
|
|
67
|
+
return PaginatedResponse[Index].create_with_cursor_field(resp.json, "pipeline_index_id")
|
|
43
68
|
|
|
44
69
|
async def get(self, index_name: str) -> Index:
|
|
45
70
|
"""Get a specific index.
|
|
@@ -48,28 +73,32 @@ class IndexResource(IndexResourceProtocol):
|
|
|
48
73
|
|
|
49
74
|
:returns: Index details.
|
|
50
75
|
"""
|
|
51
|
-
response = await self._client.request(
|
|
76
|
+
response = await self._client.request(
|
|
77
|
+
f"/v1/workspaces/{quote(self._workspace, safe='')}/indexes/{quote(index_name, safe='')}"
|
|
78
|
+
)
|
|
52
79
|
|
|
53
80
|
raise_for_status(response)
|
|
54
81
|
|
|
55
82
|
return Index.model_validate(response.json)
|
|
56
83
|
|
|
57
|
-
async def create(self,
|
|
84
|
+
async def create(self, index_name: str, yaml_config: str, description: str | None = None) -> Index:
|
|
58
85
|
"""Create a new index with the given name and configuration.
|
|
59
86
|
|
|
60
|
-
:param
|
|
87
|
+
:param index_name: Name of the index
|
|
61
88
|
:param yaml_config: YAML configuration for the index
|
|
62
89
|
:param description: Optional description for the index
|
|
63
90
|
:returns: Created index details
|
|
64
91
|
"""
|
|
65
92
|
data = {
|
|
66
|
-
"name":
|
|
93
|
+
"name": index_name,
|
|
67
94
|
"config_yaml": yaml_config,
|
|
68
95
|
}
|
|
69
96
|
if description is not None:
|
|
70
97
|
data["description"] = description
|
|
71
98
|
|
|
72
|
-
response = await self._client.request(
|
|
99
|
+
response = await self._client.request(
|
|
100
|
+
f"v1/workspaces/{quote(self._workspace, safe='')}/indexes", method="POST", data=data
|
|
101
|
+
)
|
|
73
102
|
|
|
74
103
|
raise_for_status(response)
|
|
75
104
|
|
|
@@ -95,7 +124,9 @@ class IndexResource(IndexResourceProtocol):
|
|
|
95
124
|
raise ValueError("At least one of updated_index_name or yaml_config must be provided")
|
|
96
125
|
|
|
97
126
|
response = await self._client.request(
|
|
98
|
-
f"/v1/workspaces/{self._workspace}/indexes/{index_name
|
|
127
|
+
f"/v1/workspaces/{quote(self._workspace, safe='')}/indexes/{quote(index_name, safe='')}",
|
|
128
|
+
method="PATCH",
|
|
129
|
+
data=data,
|
|
99
130
|
)
|
|
100
131
|
|
|
101
132
|
raise_for_status(response)
|
|
@@ -107,7 +138,9 @@ class IndexResource(IndexResourceProtocol):
|
|
|
107
138
|
|
|
108
139
|
:param index_name: Name of the index to delete.
|
|
109
140
|
"""
|
|
110
|
-
response = await self._client.request(
|
|
141
|
+
response = await self._client.request(
|
|
142
|
+
f"/v1/workspaces/{quote(self._workspace, safe='')}/indexes/{quote(index_name, safe='')}", method="DELETE"
|
|
143
|
+
)
|
|
111
144
|
|
|
112
145
|
raise_for_status(response)
|
|
113
146
|
|
|
@@ -119,7 +152,7 @@ class IndexResource(IndexResourceProtocol):
|
|
|
119
152
|
:raises UnexpectedAPIError: If the API returns an unexpected status code.
|
|
120
153
|
"""
|
|
121
154
|
resp = await self._client.request(
|
|
122
|
-
endpoint=f"v1/workspaces/{self._workspace}/indexes/{index_name}/deploy",
|
|
155
|
+
endpoint=f"v1/workspaces/{quote(self._workspace, safe='')}/indexes/{quote(index_name, safe='')}/deploy",
|
|
123
156
|
method="POST",
|
|
124
157
|
)
|
|
125
158
|
|
|
@@ -140,3 +173,34 @@ class IndexResource(IndexResourceProtocol):
|
|
|
140
173
|
return PipelineValidationResult(valid=False, errors=errors)
|
|
141
174
|
|
|
142
175
|
raise UnexpectedAPIError(status_code=resp.status_code, message=resp.text, detail=resp.json)
|
|
176
|
+
|
|
177
|
+
async def validate(self, yaml_config: str) -> PipelineValidationResult:
|
|
178
|
+
"""Validate an index's YAML configuration against the API.
|
|
179
|
+
|
|
180
|
+
:param yaml_config: The YAML configuration string to validate.
|
|
181
|
+
:returns: PipelineValidationResult containing validation status and any errors.
|
|
182
|
+
:raises ValueError: If the YAML is not valid (422 error) or contains syntax errors.
|
|
183
|
+
"""
|
|
184
|
+
data = {"indexing_yaml": yaml_config}
|
|
185
|
+
|
|
186
|
+
resp = await self._client.request(
|
|
187
|
+
endpoint=f"v1/workspaces/{quote(self._workspace, safe='')}/pipeline_validations",
|
|
188
|
+
method="POST",
|
|
189
|
+
data=data,
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
# If successful (status 200), the YAML is valid
|
|
193
|
+
if resp.success:
|
|
194
|
+
return PipelineValidationResult(valid=True)
|
|
195
|
+
|
|
196
|
+
if resp.status_code == 400 and resp.json is not None and isinstance(resp.json, dict) and "details" in resp.json:
|
|
197
|
+
errors = [ValidationError(code=error["code"], message=error["message"]) for error in resp.json["details"]]
|
|
198
|
+
|
|
199
|
+
return PipelineValidationResult(valid=False, errors=errors)
|
|
200
|
+
|
|
201
|
+
if resp.status_code == 422:
|
|
202
|
+
errors = [ValidationError(code="YAML_ERROR", message="Syntax error in YAML")]
|
|
203
|
+
|
|
204
|
+
return PipelineValidationResult(valid=False, errors=errors)
|
|
205
|
+
|
|
206
|
+
raise UnexpectedAPIError(status_code=resp.status_code, message=resp.text, detail=resp.json)
|
|
@@ -35,19 +35,10 @@ class Integration(BaseModel):
|
|
|
35
35
|
"""Model representing an integration."""
|
|
36
36
|
|
|
37
37
|
invalid: bool
|
|
38
|
+
"Whether the integration configuration is invalid or misconfigured"
|
|
38
39
|
model_registry_token_id: UUID
|
|
40
|
+
"Unique identifier for the model registry token"
|
|
39
41
|
provider: IntegrationProvider
|
|
42
|
+
"The integration provider type (e.g., OpenAI, Azure, etc.)"
|
|
40
43
|
provider_domain: str
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
class IntegrationList(BaseModel):
|
|
44
|
-
"""Model representing a list of integrations."""
|
|
45
|
-
|
|
46
|
-
integrations: list[Integration]
|
|
47
|
-
|
|
48
|
-
def __len__(self) -> int:
|
|
49
|
-
"""Return the length of the list.
|
|
50
|
-
|
|
51
|
-
:returns: Number of integrations.
|
|
52
|
-
"""
|
|
53
|
-
return len(self.integrations)
|
|
44
|
+
"Domain or endpoint URL for the integration provider"
|
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
|
|
7
7
|
from typing import TYPE_CHECKING, Protocol
|
|
8
8
|
|
|
9
|
-
from deepset_mcp.api.integrations.models import Integration,
|
|
9
|
+
from deepset_mcp.api.integrations.models import Integration, IntegrationProvider
|
|
10
10
|
|
|
11
11
|
if TYPE_CHECKING:
|
|
12
12
|
pass
|
|
@@ -15,10 +15,10 @@ if TYPE_CHECKING:
|
|
|
15
15
|
class IntegrationResourceProtocol(Protocol):
|
|
16
16
|
"""Protocol for integration resource operations."""
|
|
17
17
|
|
|
18
|
-
async def list(self) ->
|
|
18
|
+
async def list(self) -> list[Integration]:
|
|
19
19
|
"""Retrieve all integrations.
|
|
20
20
|
|
|
21
|
-
:returns:
|
|
21
|
+
:returns: list containing all available integrations.
|
|
22
22
|
"""
|
|
23
23
|
...
|
|
24
24
|
|
|
@@ -7,7 +7,7 @@
|
|
|
7
7
|
import logging
|
|
8
8
|
from typing import TYPE_CHECKING
|
|
9
9
|
|
|
10
|
-
from deepset_mcp.api.integrations.models import Integration,
|
|
10
|
+
from deepset_mcp.api.integrations.models import Integration, IntegrationProvider
|
|
11
11
|
from deepset_mcp.api.integrations.protocols import IntegrationResourceProtocol
|
|
12
12
|
from deepset_mcp.api.transport import raise_for_status
|
|
13
13
|
|
|
@@ -27,10 +27,10 @@ class IntegrationResource(IntegrationResourceProtocol):
|
|
|
27
27
|
"""
|
|
28
28
|
self._client = client
|
|
29
29
|
|
|
30
|
-
async def list(self) ->
|
|
30
|
+
async def list(self) -> list[Integration]:
|
|
31
31
|
"""Retrieve all integrations.
|
|
32
32
|
|
|
33
|
-
:returns:
|
|
33
|
+
:returns: list containing all available integrations.
|
|
34
34
|
"""
|
|
35
35
|
resp = await self._client.request(
|
|
36
36
|
endpoint="v1/model_registry_tokens",
|
|
@@ -41,9 +41,9 @@ class IntegrationResource(IntegrationResourceProtocol):
|
|
|
41
41
|
|
|
42
42
|
if resp.json is not None:
|
|
43
43
|
integrations = [Integration.model_validate(item) for item in resp.json]
|
|
44
|
-
return
|
|
44
|
+
return integrations
|
|
45
45
|
else:
|
|
46
|
-
return
|
|
46
|
+
return []
|
|
47
47
|
|
|
48
48
|
async def get(self, provider: IntegrationProvider) -> Integration:
|
|
49
49
|
"""Retrieve a specific integration by provider.
|
|
@@ -2,20 +2,6 @@
|
|
|
2
2
|
#
|
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
|
4
4
|
|
|
5
|
-
from .models import (
|
|
6
|
-
DeepsetPipeline,
|
|
7
|
-
PipelineLog,
|
|
8
|
-
PipelineLogList,
|
|
9
|
-
PipelineValidationResult,
|
|
10
|
-
ValidationError,
|
|
11
|
-
)
|
|
12
5
|
from .resource import PipelineResource
|
|
13
6
|
|
|
14
|
-
__all__ = [
|
|
15
|
-
"DeepsetPipeline",
|
|
16
|
-
"PipelineValidationResult",
|
|
17
|
-
"ValidationError",
|
|
18
|
-
"PipelineResource",
|
|
19
|
-
"PipelineLog",
|
|
20
|
-
"PipelineLogList",
|
|
21
|
-
]
|
|
7
|
+
__all__ = ["PipelineResource"]
|
|
@@ -25,17 +25,26 @@ class DeepsetPipeline(BaseModel):
|
|
|
25
25
|
"""Model representing a pipeline on the deepset platform."""
|
|
26
26
|
|
|
27
27
|
id: str = Field(alias="pipeline_id")
|
|
28
|
+
"Unique identifier for the pipeline"
|
|
28
29
|
name: str
|
|
30
|
+
"Human-readable name of the pipeline"
|
|
29
31
|
status: str
|
|
32
|
+
"Current operational status of the pipeline"
|
|
30
33
|
service_level: PipelineServiceLevel
|
|
34
|
+
"Service level indicating the deployment stage"
|
|
31
35
|
|
|
32
36
|
created_at: datetime
|
|
37
|
+
"Timestamp when the pipeline was created"
|
|
33
38
|
last_updated_at: datetime | None = Field(None, alias="last_edited_at") # Map API's last_edited_at
|
|
39
|
+
"Timestamp when the pipeline was last modified"
|
|
34
40
|
|
|
35
41
|
created_by: DeepsetUser
|
|
42
|
+
"User who created the pipeline"
|
|
36
43
|
last_updated_by: DeepsetUser | None = Field(None, alias="last_edited_by") # Map API's last_edited_by
|
|
44
|
+
"User who last modified the pipeline"
|
|
37
45
|
|
|
38
46
|
yaml_config: str | None = None
|
|
47
|
+
"YAML configuration defining the pipeline structure"
|
|
39
48
|
|
|
40
49
|
class Config:
|
|
41
50
|
"""Configuration for serialization and deserialization."""
|
|
@@ -67,14 +76,18 @@ class ValidationError(BaseModel):
|
|
|
67
76
|
"""Model representing a validation error from the pipeline validation API."""
|
|
68
77
|
|
|
69
78
|
code: str
|
|
79
|
+
"Error code identifying the type of validation error"
|
|
70
80
|
message: str
|
|
81
|
+
"Human-readable description of the validation error"
|
|
71
82
|
|
|
72
83
|
|
|
73
84
|
class PipelineValidationResult(BaseModel):
|
|
74
85
|
"""Result of validating a pipeline configuration."""
|
|
75
86
|
|
|
76
87
|
valid: bool
|
|
88
|
+
"Whether the pipeline configuration is valid"
|
|
77
89
|
errors: list[ValidationError] = []
|
|
90
|
+
"List of validation errors if the pipeline is invalid"
|
|
78
91
|
|
|
79
92
|
def __rich_repr__(self) -> Result:
|
|
80
93
|
"""Used to display the model in an LLM friendly way."""
|
|
@@ -86,36 +99,41 @@ class TraceFrame(BaseModel):
|
|
|
86
99
|
"""Model representing a single frame in a stack trace."""
|
|
87
100
|
|
|
88
101
|
filename: str
|
|
102
|
+
"Name of the file where the trace frame occurred"
|
|
89
103
|
line_number: int
|
|
104
|
+
"Line number in the file where the trace frame occurred"
|
|
90
105
|
name: str
|
|
106
|
+
"Function or method name where the trace frame occurred"
|
|
91
107
|
|
|
92
108
|
|
|
93
109
|
class ExceptionInfo(BaseModel):
|
|
94
110
|
"""Model representing exception information."""
|
|
95
111
|
|
|
96
112
|
type: str
|
|
113
|
+
"Exception class name"
|
|
97
114
|
value: str
|
|
115
|
+
"Exception message or string representation"
|
|
98
116
|
trace: list[TraceFrame]
|
|
117
|
+
"Stack trace frames leading to the exception"
|
|
99
118
|
|
|
100
119
|
|
|
101
120
|
class PipelineLog(BaseModel):
|
|
102
121
|
"""Model representing a single log entry from a pipeline."""
|
|
103
122
|
|
|
104
123
|
log_id: str
|
|
124
|
+
"Unique identifier for the log entry"
|
|
105
125
|
message: str
|
|
126
|
+
"Log message content"
|
|
106
127
|
logged_at: datetime
|
|
128
|
+
"Timestamp when the log entry was created"
|
|
107
129
|
level: str
|
|
130
|
+
"Log level (e.g., INFO, WARNING, ERROR)"
|
|
108
131
|
origin: str
|
|
132
|
+
"Source component or service that generated the log"
|
|
109
133
|
exceptions: list[ExceptionInfo] | None = None
|
|
134
|
+
"Exception information if the log contains error details"
|
|
110
135
|
extra_fields: dict[str, Any] = Field(default_factory=dict)
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
class PipelineLogList(BaseModel):
|
|
114
|
-
"""Model representing a paginated list of pipeline logs."""
|
|
115
|
-
|
|
116
|
-
data: list[PipelineLog]
|
|
117
|
-
has_more: bool
|
|
118
|
-
total: int
|
|
136
|
+
"Additional metadata fields associated with the log entry"
|
|
119
137
|
|
|
120
138
|
|
|
121
139
|
# Search-related models
|
|
@@ -125,48 +143,76 @@ class OffsetRange(BaseModel):
|
|
|
125
143
|
"""Model representing an offset range."""
|
|
126
144
|
|
|
127
145
|
start: int
|
|
146
|
+
"Starting position of the offset range"
|
|
128
147
|
end: int
|
|
148
|
+
"Ending position of the offset range"
|
|
129
149
|
|
|
130
150
|
|
|
131
151
|
class DeepsetAnswer(BaseModel):
|
|
132
152
|
"""Model representing a search answer."""
|
|
133
153
|
|
|
134
154
|
answer: str # Required field
|
|
155
|
+
"The generated answer text"
|
|
135
156
|
context: str | None = None
|
|
157
|
+
"Context text used to generate the answer"
|
|
136
158
|
document_id: str | None = None
|
|
159
|
+
"Identifier of the source document"
|
|
137
160
|
document_ids: list[str] | None = None
|
|
161
|
+
"List of source document identifiers"
|
|
138
162
|
file: dict[str, Any] | None = None
|
|
163
|
+
"File metadata associated with the answer"
|
|
139
164
|
files: list[dict[str, Any]] | None = None
|
|
165
|
+
"List of file metadata associated with the answer"
|
|
140
166
|
meta: dict[str, Any] | None = None
|
|
167
|
+
"Additional metadata about the answer"
|
|
141
168
|
offsets_in_context: list[OffsetRange] | None = None
|
|
169
|
+
"Character offset ranges within the context text"
|
|
142
170
|
offsets_in_document: list[OffsetRange] | None = None
|
|
171
|
+
"Character offset ranges within the source document"
|
|
143
172
|
prompt: str | None = None
|
|
173
|
+
"Prompt used to generate the answer"
|
|
144
174
|
result_id: UUID | None = None
|
|
175
|
+
"Unique identifier for this result"
|
|
145
176
|
score: float | None = None
|
|
177
|
+
"Confidence or relevance score for the answer"
|
|
146
178
|
type: str | None = None
|
|
179
|
+
"Type classification of the answer"
|
|
147
180
|
|
|
148
181
|
|
|
149
182
|
class DeepsetDocument(BaseModel):
|
|
150
183
|
"""Model representing a search document."""
|
|
151
184
|
|
|
152
185
|
content: str # Required field
|
|
186
|
+
"Text content of the document"
|
|
153
187
|
meta: dict[str, Any] # Required field - can hold any value
|
|
188
|
+
"Metadata dictionary containing document properties"
|
|
154
189
|
embedding: list[float] | None = None
|
|
190
|
+
"Vector embedding representation of the document"
|
|
155
191
|
file: dict[str, Any] | None = None
|
|
192
|
+
"File metadata if the document originated from a file"
|
|
156
193
|
id: str | None = None
|
|
194
|
+
"Unique identifier for the document"
|
|
157
195
|
result_id: UUID | None = None
|
|
196
|
+
"Unique identifier for this search result"
|
|
158
197
|
score: float | None = None
|
|
198
|
+
"Relevance or similarity score for the document"
|
|
159
199
|
|
|
160
200
|
|
|
161
201
|
class DeepsetSearchResponse(BaseModel):
|
|
162
202
|
"""Model representing a single search result."""
|
|
163
203
|
|
|
164
204
|
debug: dict[str, Any] | None = Field(default=None, alias="_debug")
|
|
205
|
+
"Debug information for the search operation"
|
|
165
206
|
answers: list[DeepsetAnswer] = Field(default_factory=list)
|
|
207
|
+
"List of generated answers from the search"
|
|
166
208
|
documents: list[DeepsetDocument] = Field(default_factory=list)
|
|
209
|
+
"List of retrieved documents from the search"
|
|
167
210
|
prompts: dict[str, str] | None = None
|
|
211
|
+
"Prompts used during the search operation"
|
|
168
212
|
query: str | None = None
|
|
213
|
+
"Original search query text"
|
|
169
214
|
query_id: UUID | None = None
|
|
215
|
+
"Unique identifier for the search query"
|
|
170
216
|
|
|
171
217
|
@model_validator(mode="before")
|
|
172
218
|
@classmethod
|
|
@@ -203,37 +249,29 @@ class StreamDelta(BaseModel):
|
|
|
203
249
|
"""Model representing a streaming delta."""
|
|
204
250
|
|
|
205
251
|
text: str
|
|
252
|
+
"Incremental text content for streaming responses"
|
|
206
253
|
meta: dict[str, Any] | None = None
|
|
254
|
+
"Metadata associated with the streaming delta"
|
|
207
255
|
|
|
208
256
|
|
|
209
257
|
class DeepsetStreamEvent(BaseModel):
|
|
210
258
|
"""Model representing a stream event."""
|
|
211
259
|
|
|
212
260
|
query_id: str | UUID | None = None
|
|
261
|
+
"Unique identifier for the associated query"
|
|
213
262
|
type: str # "delta", "result", or "error"
|
|
263
|
+
"Event type: 'delta', 'result', or 'error'"
|
|
214
264
|
delta: StreamDelta | None = None
|
|
265
|
+
"Streaming text delta if type is 'delta'"
|
|
215
266
|
result: DeepsetSearchResponse | None = None
|
|
267
|
+
"Complete search result if type is 'result'"
|
|
216
268
|
error: str | None = None
|
|
269
|
+
"Error message if type is 'error'"
|
|
217
270
|
|
|
218
271
|
|
|
219
|
-
class
|
|
220
|
-
"""
|
|
221
|
-
|
|
222
|
-
data: list[DeepsetPipeline]
|
|
223
|
-
has_more: bool
|
|
224
|
-
total: int
|
|
225
|
-
|
|
272
|
+
class LogLevel(StrEnum):
|
|
273
|
+
"""Log level filter options for pipeline logs."""
|
|
226
274
|
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
validation_result: PipelineValidationResult
|
|
231
|
-
yaml_config: str
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
class PipelineOperationWithErrors(BaseModel):
|
|
235
|
-
"""Model for pipeline operations that complete with validation errors."""
|
|
236
|
-
|
|
237
|
-
message: str
|
|
238
|
-
validation_result: PipelineValidationResult
|
|
239
|
-
pipeline: DeepsetPipeline
|
|
275
|
+
INFO = "info"
|
|
276
|
+
WARNING = "warning"
|
|
277
|
+
ERROR = "error"
|
|
@@ -5,16 +5,15 @@
|
|
|
5
5
|
from collections.abc import AsyncIterator
|
|
6
6
|
from typing import Any, Protocol
|
|
7
7
|
|
|
8
|
-
from deepset_mcp.api.pipeline.log_level import LogLevel
|
|
9
8
|
from deepset_mcp.api.pipeline.models import (
|
|
10
9
|
DeepsetPipeline,
|
|
11
10
|
DeepsetSearchResponse,
|
|
12
11
|
DeepsetStreamEvent,
|
|
13
|
-
|
|
14
|
-
|
|
12
|
+
LogLevel,
|
|
13
|
+
PipelineLog,
|
|
15
14
|
PipelineValidationResult,
|
|
16
15
|
)
|
|
17
|
-
from deepset_mcp.api.shared_models import NoContentResponse
|
|
16
|
+
from deepset_mcp.api.shared_models import NoContentResponse, PaginatedResponse
|
|
18
17
|
|
|
19
18
|
|
|
20
19
|
class PipelineResourceProtocol(Protocol):
|
|
@@ -28,11 +27,7 @@ class PipelineResourceProtocol(Protocol):
|
|
|
28
27
|
"""Fetch a single pipeline by its name."""
|
|
29
28
|
...
|
|
30
29
|
|
|
31
|
-
async def list(
|
|
32
|
-
self,
|
|
33
|
-
page_number: int = 1,
|
|
34
|
-
limit: int = 10,
|
|
35
|
-
) -> PipelineList:
|
|
30
|
+
async def list(self, limit: int = 10, after: str | None = None) -> PaginatedResponse[DeepsetPipeline]:
|
|
36
31
|
"""List pipelines in the configured workspace with optional pagination."""
|
|
37
32
|
...
|
|
38
33
|
|
|
@@ -54,7 +49,8 @@ class PipelineResourceProtocol(Protocol):
|
|
|
54
49
|
pipeline_name: str,
|
|
55
50
|
limit: int = 30,
|
|
56
51
|
level: LogLevel | None = None,
|
|
57
|
-
|
|
52
|
+
after: str | None = None,
|
|
53
|
+
) -> PaginatedResponse[PipelineLog]:
|
|
58
54
|
"""Fetch logs for a specific pipeline."""
|
|
59
55
|
...
|
|
60
56
|
|