deepset-mcp 0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. deepset_mcp/__init__.py +0 -0
  2. deepset_mcp/agents/__init__.py +0 -0
  3. deepset_mcp/agents/debugging/__init__.py +0 -0
  4. deepset_mcp/agents/debugging/debugging_agent.py +37 -0
  5. deepset_mcp/agents/debugging/system_prompt.md +214 -0
  6. deepset_mcp/agents/generalist/__init__.py +0 -0
  7. deepset_mcp/agents/generalist/generalist_agent.py +38 -0
  8. deepset_mcp/agents/generalist/system_prompt.md +241 -0
  9. deepset_mcp/api/README.md +536 -0
  10. deepset_mcp/api/__init__.py +0 -0
  11. deepset_mcp/api/client.py +277 -0
  12. deepset_mcp/api/custom_components/__init__.py +0 -0
  13. deepset_mcp/api/custom_components/models.py +25 -0
  14. deepset_mcp/api/custom_components/protocols.py +17 -0
  15. deepset_mcp/api/custom_components/resource.py +56 -0
  16. deepset_mcp/api/exceptions.py +70 -0
  17. deepset_mcp/api/haystack_service/__init__.py +0 -0
  18. deepset_mcp/api/haystack_service/protocols.py +13 -0
  19. deepset_mcp/api/haystack_service/resource.py +55 -0
  20. deepset_mcp/api/indexes/__init__.py +0 -0
  21. deepset_mcp/api/indexes/models.py +63 -0
  22. deepset_mcp/api/indexes/protocols.py +53 -0
  23. deepset_mcp/api/indexes/resource.py +138 -0
  24. deepset_mcp/api/integrations/__init__.py +1 -0
  25. deepset_mcp/api/integrations/models.py +49 -0
  26. deepset_mcp/api/integrations/protocols.py +27 -0
  27. deepset_mcp/api/integrations/resource.py +57 -0
  28. deepset_mcp/api/pipeline/__init__.py +17 -0
  29. deepset_mcp/api/pipeline/log_level.py +9 -0
  30. deepset_mcp/api/pipeline/models.py +235 -0
  31. deepset_mcp/api/pipeline/protocols.py +83 -0
  32. deepset_mcp/api/pipeline/resource.py +378 -0
  33. deepset_mcp/api/pipeline_template/__init__.py +0 -0
  34. deepset_mcp/api/pipeline_template/models.py +56 -0
  35. deepset_mcp/api/pipeline_template/protocols.py +17 -0
  36. deepset_mcp/api/pipeline_template/resource.py +88 -0
  37. deepset_mcp/api/protocols.py +122 -0
  38. deepset_mcp/api/secrets/__init__.py +0 -0
  39. deepset_mcp/api/secrets/models.py +16 -0
  40. deepset_mcp/api/secrets/protocols.py +29 -0
  41. deepset_mcp/api/secrets/resource.py +112 -0
  42. deepset_mcp/api/shared_models.py +17 -0
  43. deepset_mcp/api/transport.py +336 -0
  44. deepset_mcp/api/user/__init__.py +0 -0
  45. deepset_mcp/api/user/protocols.py +11 -0
  46. deepset_mcp/api/user/resource.py +38 -0
  47. deepset_mcp/api/workspace/__init__.py +7 -0
  48. deepset_mcp/api/workspace/models.py +23 -0
  49. deepset_mcp/api/workspace/protocols.py +41 -0
  50. deepset_mcp/api/workspace/resource.py +94 -0
  51. deepset_mcp/benchmark/README.md +425 -0
  52. deepset_mcp/benchmark/__init__.py +1 -0
  53. deepset_mcp/benchmark/agent_configs/debugging_agent.yml +10 -0
  54. deepset_mcp/benchmark/agent_configs/generalist_agent.yml +6 -0
  55. deepset_mcp/benchmark/dp_validation_error_analysis/__init__.py +0 -0
  56. deepset_mcp/benchmark/dp_validation_error_analysis/eda.ipynb +757 -0
  57. deepset_mcp/benchmark/dp_validation_error_analysis/prepare_interaction_data.ipynb +167 -0
  58. deepset_mcp/benchmark/dp_validation_error_analysis/preprocessing_utils.py +213 -0
  59. deepset_mcp/benchmark/runner/__init__.py +0 -0
  60. deepset_mcp/benchmark/runner/agent_benchmark_runner.py +561 -0
  61. deepset_mcp/benchmark/runner/agent_loader.py +110 -0
  62. deepset_mcp/benchmark/runner/cli.py +39 -0
  63. deepset_mcp/benchmark/runner/cli_agent.py +373 -0
  64. deepset_mcp/benchmark/runner/cli_index.py +71 -0
  65. deepset_mcp/benchmark/runner/cli_pipeline.py +73 -0
  66. deepset_mcp/benchmark/runner/cli_tests.py +226 -0
  67. deepset_mcp/benchmark/runner/cli_utils.py +61 -0
  68. deepset_mcp/benchmark/runner/config.py +73 -0
  69. deepset_mcp/benchmark/runner/config_loader.py +64 -0
  70. deepset_mcp/benchmark/runner/interactive.py +140 -0
  71. deepset_mcp/benchmark/runner/models.py +203 -0
  72. deepset_mcp/benchmark/runner/repl.py +67 -0
  73. deepset_mcp/benchmark/runner/setup_actions.py +238 -0
  74. deepset_mcp/benchmark/runner/streaming.py +360 -0
  75. deepset_mcp/benchmark/runner/teardown_actions.py +196 -0
  76. deepset_mcp/benchmark/runner/tracing.py +21 -0
  77. deepset_mcp/benchmark/tasks/chat_rag_answers_wrong_format.yml +16 -0
  78. deepset_mcp/benchmark/tasks/documents_output_wrong.yml +13 -0
  79. deepset_mcp/benchmark/tasks/jinja_str_instead_of_complex_type.yml +11 -0
  80. deepset_mcp/benchmark/tasks/jinja_syntax_error.yml +11 -0
  81. deepset_mcp/benchmark/tasks/missing_output_mapping.yml +14 -0
  82. deepset_mcp/benchmark/tasks/no_query_input.yml +13 -0
  83. deepset_mcp/benchmark/tasks/pipelines/chat_agent_jinja_str.yml +141 -0
  84. deepset_mcp/benchmark/tasks/pipelines/chat_agent_jinja_syntax.yml +141 -0
  85. deepset_mcp/benchmark/tasks/pipelines/chat_rag_answers_wrong_format.yml +181 -0
  86. deepset_mcp/benchmark/tasks/pipelines/chat_rag_missing_output_mapping.yml +189 -0
  87. deepset_mcp/benchmark/tasks/pipelines/rag_documents_wrong_format.yml +193 -0
  88. deepset_mcp/benchmark/tasks/pipelines/rag_no_query_input.yml +191 -0
  89. deepset_mcp/benchmark/tasks/pipelines/standard_index.yml +167 -0
  90. deepset_mcp/initialize_embedding_model.py +12 -0
  91. deepset_mcp/main.py +133 -0
  92. deepset_mcp/prompts/deepset_copilot_prompt.md +271 -0
  93. deepset_mcp/prompts/deepset_debugging_agent.md +214 -0
  94. deepset_mcp/store.py +5 -0
  95. deepset_mcp/tool_factory.py +473 -0
  96. deepset_mcp/tools/__init__.py +0 -0
  97. deepset_mcp/tools/custom_components.py +52 -0
  98. deepset_mcp/tools/doc_search.py +83 -0
  99. deepset_mcp/tools/haystack_service.py +358 -0
  100. deepset_mcp/tools/haystack_service_models.py +97 -0
  101. deepset_mcp/tools/indexes.py +129 -0
  102. deepset_mcp/tools/model_protocol.py +16 -0
  103. deepset_mcp/tools/pipeline.py +335 -0
  104. deepset_mcp/tools/pipeline_template.py +116 -0
  105. deepset_mcp/tools/secrets.py +45 -0
  106. deepset_mcp/tools/tokonomics/__init__.py +73 -0
  107. deepset_mcp/tools/tokonomics/decorators.py +396 -0
  108. deepset_mcp/tools/tokonomics/explorer.py +347 -0
  109. deepset_mcp/tools/tokonomics/object_store.py +177 -0
  110. deepset_mcp/tools/workspace.py +61 -0
  111. deepset_mcp-0.0.2.dist-info/METADATA +288 -0
  112. deepset_mcp-0.0.2.dist-info/RECORD +114 -0
  113. deepset_mcp-0.0.2.dist-info/WHEEL +4 -0
  114. deepset_mcp-0.0.2.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,138 @@
1
+ from deepset_mcp.api.exceptions import UnexpectedAPIError
2
+ from deepset_mcp.api.indexes.models import Index, IndexList
3
+ from deepset_mcp.api.indexes.protocols import IndexResourceProtocol
4
+ from deepset_mcp.api.pipeline.models import PipelineValidationResult, ValidationError
5
+ from deepset_mcp.api.protocols import AsyncClientProtocol
6
+ from deepset_mcp.api.transport import raise_for_status
7
+
8
+
9
+ class IndexResource(IndexResourceProtocol):
10
+ """Resource for interacting with deepset indexes."""
11
+
12
+ def __init__(self, client: AsyncClientProtocol, workspace: str) -> None:
13
+ """Initialize the index resource.
14
+
15
+ :param client: The async REST client.
16
+ :param workspace: The workspace to use.
17
+ """
18
+ self._client = client
19
+ self._workspace = workspace
20
+
21
+ async def list(self, limit: int = 10, page_number: int = 1) -> IndexList:
22
+ """List all indexes.
23
+
24
+ :param limit: Maximum number of indexes to return.
25
+ :param page_number: Page number for pagination.
26
+
27
+ :returns: List of indexes.
28
+ """
29
+ params = {
30
+ "limit": limit,
31
+ "page_number": page_number,
32
+ }
33
+
34
+ response = await self._client.request(f"/v1/workspaces/{self._workspace}/indexes", params=params)
35
+
36
+ raise_for_status(response)
37
+
38
+ return IndexList.model_validate(response.json)
39
+
40
+ async def get(self, index_name: str) -> Index:
41
+ """Get a specific index.
42
+
43
+ :param index_name: Name of the index.
44
+
45
+ :returns: Index details.
46
+ """
47
+ response = await self._client.request(f"/v1/workspaces/{self._workspace}/indexes/{index_name}")
48
+
49
+ raise_for_status(response)
50
+
51
+ return Index.model_validate(response.json)
52
+
53
+ async def create(self, name: str, yaml_config: str, description: str | None = None) -> Index:
54
+ """Create a new index with the given name and configuration.
55
+
56
+ :param name: Name of the index
57
+ :param yaml_config: YAML configuration for the index
58
+ :param description: Optional description for the index
59
+ :returns: Created index details
60
+ """
61
+ data = {
62
+ "name": name,
63
+ "config_yaml": yaml_config,
64
+ }
65
+ if description is not None:
66
+ data["description"] = description
67
+
68
+ response = await self._client.request(f"v1/workspaces/{self._workspace}/indexes", method="POST", data=data)
69
+
70
+ raise_for_status(response)
71
+
72
+ return Index.model_validate(response.json)
73
+
74
+ async def update(
75
+ self, index_name: str, updated_index_name: str | None = None, yaml_config: str | None = None
76
+ ) -> Index:
77
+ """Update name and/or configuration of an existing index.
78
+
79
+ :param index_name: Name of the index to update
80
+ :param updated_index_name: Optional new name for the index
81
+ :param yaml_config: Optional new YAML configuration
82
+ :returns: Updated index details
83
+ """
84
+ data = {}
85
+ if updated_index_name is not None:
86
+ data["name"] = updated_index_name
87
+ if yaml_config is not None:
88
+ data["config_yaml"] = yaml_config
89
+
90
+ if not data:
91
+ raise ValueError("At least one of updated_index_name or yaml_config must be provided")
92
+
93
+ response = await self._client.request(
94
+ f"/v1/workspaces/{self._workspace}/indexes/{index_name}", method="PATCH", data=data
95
+ )
96
+
97
+ raise_for_status(response)
98
+
99
+ return Index.model_validate(response.json)
100
+
101
+ async def delete(self, index_name: str) -> None:
102
+ """Delete an index.
103
+
104
+ :param index_name: Name of the index to delete.
105
+ """
106
+ response = await self._client.request(f"/v1/workspaces/{self._workspace}/indexes/{index_name}", method="DELETE")
107
+
108
+ raise_for_status(response)
109
+
110
+ async def deploy(self, index_name: str) -> PipelineValidationResult:
111
+ """Deploy an index.
112
+
113
+ :param index_name: Name of the index to deploy.
114
+ :returns: PipelineValidationResult containing deployment status and any errors.
115
+ :raises UnexpectedAPIError: If the API returns an unexpected status code.
116
+ """
117
+ resp = await self._client.request(
118
+ endpoint=f"v1/workspaces/{self._workspace}/indexes/{index_name}/deploy",
119
+ method="POST",
120
+ )
121
+
122
+ # If successful (status 200), the deployment was successful
123
+ if resp.success:
124
+ return PipelineValidationResult(valid=True)
125
+
126
+ # Handle validation errors (422)
127
+ if resp.status_code == 422 and resp.json is not None and isinstance(resp.json, dict) and "details" in resp.json:
128
+ errors = [ValidationError(code=error["code"], message=error["message"]) for error in resp.json["details"]]
129
+ return PipelineValidationResult(valid=False, errors=errors)
130
+
131
+ # Handle other 4xx errors (400, 404, 424)
132
+ if 400 <= resp.status_code < 500:
133
+ # For non-validation errors, create a generic error
134
+ error_message = resp.text if resp.text else f"HTTP {resp.status_code} error"
135
+ errors = [ValidationError(code="DEPLOYMENT_ERROR", message=error_message)]
136
+ return PipelineValidationResult(valid=False, errors=errors)
137
+
138
+ raise UnexpectedAPIError(status_code=resp.status_code, message=resp.text, detail=resp.json)
@@ -0,0 +1 @@
1
+ """Integration API resources and models."""
@@ -0,0 +1,49 @@
1
+ """Models for the integrations API."""
2
+
3
+ from enum import StrEnum
4
+ from uuid import UUID
5
+
6
+ from pydantic import BaseModel
7
+
8
+
9
+ class IntegrationProvider(StrEnum):
10
+ """Supported integration providers."""
11
+
12
+ AWS_BEDROCK = "aws-bedrock"
13
+ AZURE_DOCUMENT_INTELLIGENCE = "azure-document-intelligence"
14
+ AZURE_OPENAI = "azure-openai"
15
+ COHERE = "cohere"
16
+ DEEPL = "deepl"
17
+ GOOGLE = "google"
18
+ HUGGINGFACE = "huggingface"
19
+ NVIDIA = "nvidia"
20
+ OPENAI = "openai"
21
+ SEARCHAPI = "searchapi"
22
+ SNOWFLAKE = "snowflake"
23
+ UNSTRUCTURED = "unstructured"
24
+ VOYAGE_AI = "voyage-ai"
25
+ WANDB_AI = "wandb-ai"
26
+ MONGODB = "mongodb"
27
+ TOGETHER_AI = "together-ai"
28
+
29
+
30
+ class Integration(BaseModel):
31
+ """Model representing an integration."""
32
+
33
+ invalid: bool
34
+ model_registry_token_id: UUID
35
+ provider: IntegrationProvider
36
+ provider_domain: str
37
+
38
+
39
+ class IntegrationList(BaseModel):
40
+ """Model representing a list of integrations."""
41
+
42
+ integrations: list[Integration]
43
+
44
+ def __len__(self) -> int:
45
+ """Return the length of the list.
46
+
47
+ :returns: Number of integrations.
48
+ """
49
+ return len(self.integrations)
@@ -0,0 +1,27 @@
1
+ """Protocol definitions for integrations resource."""
2
+
3
+ from typing import TYPE_CHECKING, Protocol
4
+
5
+ from deepset_mcp.api.integrations.models import Integration, IntegrationList, IntegrationProvider
6
+
7
+ if TYPE_CHECKING:
8
+ pass
9
+
10
+
11
+ class IntegrationResourceProtocol(Protocol):
12
+ """Protocol for integration resource operations."""
13
+
14
+ async def list(self) -> IntegrationList:
15
+ """Retrieve all integrations.
16
+
17
+ :returns: IntegrationList containing all available integrations.
18
+ """
19
+ ...
20
+
21
+ async def get(self, provider: IntegrationProvider) -> Integration:
22
+ """Retrieve a specific integration by provider.
23
+
24
+ :param provider: The integration provider to retrieve.
25
+ :returns: Integration instance for the specified provider.
26
+ """
27
+ ...
@@ -0,0 +1,57 @@
1
+ """Resource implementation for integrations API."""
2
+
3
+ import logging
4
+ from typing import TYPE_CHECKING
5
+
6
+ from deepset_mcp.api.integrations.models import Integration, IntegrationList, IntegrationProvider
7
+ from deepset_mcp.api.integrations.protocols import IntegrationResourceProtocol
8
+ from deepset_mcp.api.transport import raise_for_status
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+ if TYPE_CHECKING:
13
+ from deepset_mcp.api.protocols import AsyncClientProtocol
14
+
15
+
16
+ class IntegrationResource(IntegrationResourceProtocol):
17
+ """Manages interactions with the deepset integrations API."""
18
+
19
+ def __init__(self, client: "AsyncClientProtocol") -> None:
20
+ """Initialize an IntegrationResource instance.
21
+
22
+ :param client: The async client protocol instance.
23
+ """
24
+ self._client = client
25
+
26
+ async def list(self) -> IntegrationList:
27
+ """Retrieve all integrations.
28
+
29
+ :returns: IntegrationList containing all available integrations.
30
+ """
31
+ resp = await self._client.request(
32
+ endpoint="v1/model_registry_tokens",
33
+ method="GET",
34
+ )
35
+
36
+ raise_for_status(resp)
37
+
38
+ if resp.json is not None:
39
+ integrations = [Integration.model_validate(item) for item in resp.json]
40
+ return IntegrationList(integrations=integrations)
41
+ else:
42
+ return IntegrationList(integrations=[])
43
+
44
+ async def get(self, provider: IntegrationProvider) -> Integration:
45
+ """Retrieve a specific integration by provider.
46
+
47
+ :param provider: The integration provider to retrieve.
48
+ :returns: Integration instance for the specified provider.
49
+ """
50
+ resp = await self._client.request(
51
+ endpoint=f"v1/model_registry_tokens/{provider.value}",
52
+ method="GET",
53
+ )
54
+
55
+ raise_for_status(resp)
56
+
57
+ return Integration.model_validate(resp.json)
@@ -0,0 +1,17 @@
1
+ from .models import (
2
+ DeepsetPipeline,
3
+ PipelineLog,
4
+ PipelineLogList,
5
+ PipelineValidationResult,
6
+ ValidationError,
7
+ )
8
+ from .resource import PipelineResource
9
+
10
+ __all__ = [
11
+ "DeepsetPipeline",
12
+ "PipelineValidationResult",
13
+ "ValidationError",
14
+ "PipelineResource",
15
+ "PipelineLog",
16
+ "PipelineLogList",
17
+ ]
@@ -0,0 +1,9 @@
1
+ from enum import StrEnum
2
+
3
+
4
+ class LogLevel(StrEnum):
5
+ """Log level filter options for pipeline logs."""
6
+
7
+ INFO = "info"
8
+ WARNING = "warning"
9
+ ERROR = "error"
@@ -0,0 +1,235 @@
1
+ from datetime import datetime
2
+ from enum import StrEnum
3
+ from typing import Any
4
+ from uuid import UUID
5
+
6
+ from pydantic import BaseModel, Field, model_validator
7
+ from rich.repr import Result
8
+
9
+ from deepset_mcp.api.shared_models import DeepsetUser
10
+
11
+
12
+ class PipelineServiceLevel(StrEnum):
13
+ """Describes the service level of a pipeline."""
14
+
15
+ PRODUCTION = "PRODUCTION"
16
+ DEVELOPMENT = "DEVELOPMENT"
17
+ DRAFT = "DRAFT"
18
+
19
+
20
+ class DeepsetPipeline(BaseModel):
21
+ """Model representing a pipeline on the deepset platform."""
22
+
23
+ id: str = Field(alias="pipeline_id")
24
+ name: str
25
+ status: str
26
+ service_level: PipelineServiceLevel
27
+
28
+ created_at: datetime
29
+ last_updated_at: datetime | None = Field(None, alias="last_edited_at") # Map API's last_edited_at
30
+
31
+ created_by: DeepsetUser
32
+ last_updated_by: DeepsetUser | None = Field(None, alias="last_edited_by") # Map API's last_edited_by
33
+
34
+ yaml_config: str | None = None
35
+
36
+ class Config:
37
+ """Configuration for serialization and deserialization."""
38
+
39
+ populate_by_name = True # Allow both alias and model field names
40
+ json_encoders = {
41
+ # When serializing back to JSON, convert datetimes to ISO format
42
+ datetime: lambda dt: dt.isoformat()
43
+ }
44
+
45
+ def __rich_repr__(self) -> Result:
46
+ """Used to display the model in an LLM friendly way."""
47
+ yield "name", self.name
48
+ yield "service_level", self.service_level.value
49
+ yield "status", self.status
50
+ yield "created_by", f"{self.created_by.given_name} {self.created_by.family_name} ({self.created_by.id})"
51
+ yield "created_at", self.created_at.strftime("%m/%d/%Y %I:%M:%S %p")
52
+ yield (
53
+ "last_updated_by",
54
+ f"{self.last_updated_by.given_name} {self.last_updated_by.family_name} ({self.last_updated_by.id})"
55
+ if self.last_updated_by
56
+ else None,
57
+ )
58
+ yield "last_updated_at", self.last_updated_at.strftime("%m/%d/%Y %I:%M:%S %p") if self.last_updated_at else None
59
+ yield "yaml_config", self.yaml_config if self.yaml_config is not None else "Get full pipeline to see config."
60
+
61
+
62
+ class ValidationError(BaseModel):
63
+ """Model representing a validation error from the pipeline validation API."""
64
+
65
+ code: str
66
+ message: str
67
+
68
+
69
+ class PipelineValidationResult(BaseModel):
70
+ """Result of validating a pipeline configuration."""
71
+
72
+ valid: bool
73
+ errors: list[ValidationError] = []
74
+
75
+ def __rich_repr__(self) -> Result:
76
+ """Used to display the model in an LLM friendly way."""
77
+ yield "valid", self.valid
78
+ yield "errors", [f"{e.message} ({e.code})" for e in self.errors]
79
+
80
+
81
+ class TraceFrame(BaseModel):
82
+ """Model representing a single frame in a stack trace."""
83
+
84
+ filename: str
85
+ line_number: int
86
+ name: str
87
+
88
+
89
+ class ExceptionInfo(BaseModel):
90
+ """Model representing exception information."""
91
+
92
+ type: str
93
+ value: str
94
+ trace: list[TraceFrame]
95
+
96
+
97
+ class PipelineLog(BaseModel):
98
+ """Model representing a single log entry from a pipeline."""
99
+
100
+ log_id: str
101
+ message: str
102
+ logged_at: datetime
103
+ level: str
104
+ origin: str
105
+ exceptions: list[ExceptionInfo] | None = None
106
+ extra_fields: dict[str, Any] = Field(default_factory=dict)
107
+
108
+
109
+ class PipelineLogList(BaseModel):
110
+ """Model representing a paginated list of pipeline logs."""
111
+
112
+ data: list[PipelineLog]
113
+ has_more: bool
114
+ total: int
115
+
116
+
117
+ # Search-related models
118
+
119
+
120
+ class OffsetRange(BaseModel):
121
+ """Model representing an offset range."""
122
+
123
+ start: int
124
+ end: int
125
+
126
+
127
+ class DeepsetAnswer(BaseModel):
128
+ """Model representing a search answer."""
129
+
130
+ answer: str # Required field
131
+ context: str | None = None
132
+ document_id: str | None = None
133
+ document_ids: list[str] | None = None
134
+ file: dict[str, Any] | None = None
135
+ files: list[dict[str, Any]] | None = None
136
+ meta: dict[str, Any] | None = None
137
+ offsets_in_context: list[OffsetRange] | None = None
138
+ offsets_in_document: list[OffsetRange] | None = None
139
+ prompt: str | None = None
140
+ result_id: UUID | None = None
141
+ score: float | None = None
142
+ type: str | None = None
143
+
144
+
145
+ class DeepsetDocument(BaseModel):
146
+ """Model representing a search document."""
147
+
148
+ content: str # Required field
149
+ meta: dict[str, Any] # Required field - can hold any value
150
+ embedding: list[float] | None = None
151
+ file: dict[str, Any] | None = None
152
+ id: str | None = None
153
+ result_id: UUID | None = None
154
+ score: float | None = None
155
+
156
+
157
+ class DeepsetSearchResponse(BaseModel):
158
+ """Model representing a single search result."""
159
+
160
+ debug: dict[str, Any] | None = Field(default=None, alias="_debug")
161
+ answers: list[DeepsetAnswer] = Field(default_factory=list)
162
+ documents: list[DeepsetDocument] = Field(default_factory=list)
163
+ prompts: dict[str, str] | None = None
164
+ query: str | None = None
165
+ query_id: UUID | None = None
166
+
167
+ @model_validator(mode="before")
168
+ @classmethod
169
+ def normalize_response(cls, data: dict[str, Any]) -> dict[str, Any]:
170
+ """Normalize the response from the search and search-stream endpoints.
171
+
172
+ The search endpoint returns a list of results, but we only ever use the first result.
173
+ We are not sending batch queries, so there will never be more than one result.
174
+ We use this validator to transform the data so that we can use the same response model for search and
175
+ search-stream endpoints.
176
+ """
177
+ # Handle non-stream format with 'results' array
178
+ if "results" in data and isinstance(data["results"], list):
179
+ if len(data["results"]) > 0:
180
+ first_result = data["results"][
181
+ 0
182
+ ] # we only ever care for the first result as we don't use batch queries
183
+ normalized = {
184
+ "query_id": data.get("query_id", first_result.get("query_id")),
185
+ "query": first_result.get("query"),
186
+ "answers": first_result.get("answers", []),
187
+ "documents": first_result.get("documents", []),
188
+ "prompts": first_result.get("prompts"),
189
+ "_debug": first_result.get("_debug") or first_result.get("debug"),
190
+ }
191
+ return normalized
192
+ else:
193
+ return {}
194
+ else:
195
+ return data
196
+
197
+
198
+ class StreamDelta(BaseModel):
199
+ """Model representing a streaming delta."""
200
+
201
+ text: str
202
+ meta: dict[str, Any] | None = None
203
+
204
+
205
+ class DeepsetStreamEvent(BaseModel):
206
+ """Model representing a stream event."""
207
+
208
+ query_id: str | UUID | None = None
209
+ type: str # "delta", "result", or "error"
210
+ delta: StreamDelta | None = None
211
+ result: DeepsetSearchResponse | None = None
212
+ error: str | None = None
213
+
214
+
215
+ class PipelineList(BaseModel):
216
+ """Response model for listing pipelines."""
217
+
218
+ data: list[DeepsetPipeline]
219
+ has_more: bool
220
+ total: int
221
+
222
+
223
+ class PipelineValidationResultWithYaml(BaseModel):
224
+ """Model for pipeline validation result that includes the original YAML."""
225
+
226
+ validation_result: PipelineValidationResult
227
+ yaml_config: str
228
+
229
+
230
+ class PipelineOperationWithErrors(BaseModel):
231
+ """Model for pipeline operations that complete with validation errors."""
232
+
233
+ message: str
234
+ validation_result: PipelineValidationResult
235
+ pipeline: DeepsetPipeline
@@ -0,0 +1,83 @@
1
+ from collections.abc import AsyncIterator
2
+ from typing import Any, Protocol
3
+
4
+ from deepset_mcp.api.pipeline.log_level import LogLevel
5
+ from deepset_mcp.api.pipeline.models import (
6
+ DeepsetPipeline,
7
+ DeepsetSearchResponse,
8
+ DeepsetStreamEvent,
9
+ PipelineList,
10
+ PipelineLogList,
11
+ PipelineValidationResult,
12
+ )
13
+ from deepset_mcp.api.shared_models import NoContentResponse
14
+
15
+
16
+ class PipelineResourceProtocol(Protocol):
17
+ """Protocol defining the implementation for PipelineResource."""
18
+
19
+ async def validate(self, yaml_config: str) -> PipelineValidationResult:
20
+ """Validate a pipeline's YAML configuration against the API."""
21
+ ...
22
+
23
+ async def get(self, pipeline_name: str, include_yaml: bool = True) -> DeepsetPipeline:
24
+ """Fetch a single pipeline by its name."""
25
+ ...
26
+
27
+ async def list(
28
+ self,
29
+ page_number: int = 1,
30
+ limit: int = 10,
31
+ ) -> PipelineList:
32
+ """List pipelines in the configured workspace with optional pagination."""
33
+ ...
34
+
35
+ async def create(self, name: str, yaml_config: str) -> NoContentResponse:
36
+ """Create a new pipeline with a name and YAML config."""
37
+ ...
38
+
39
+ async def update(
40
+ self,
41
+ pipeline_name: str,
42
+ updated_pipeline_name: str | None = None,
43
+ yaml_config: str | None = None,
44
+ ) -> NoContentResponse:
45
+ """Update name and/or YAML config of an existing pipeline."""
46
+ ...
47
+
48
+ async def get_logs(
49
+ self,
50
+ pipeline_name: str,
51
+ limit: int = 30,
52
+ level: LogLevel | None = None,
53
+ ) -> PipelineLogList:
54
+ """Fetch logs for a specific pipeline."""
55
+ ...
56
+
57
+ async def deploy(self, pipeline_name: str) -> PipelineValidationResult:
58
+ """Deploy a pipeline."""
59
+ ...
60
+
61
+ async def search(
62
+ self,
63
+ pipeline_name: str,
64
+ query: str,
65
+ debug: bool = False,
66
+ view_prompts: bool = False,
67
+ params: dict[str, Any] | None = None,
68
+ filters: dict[str, Any] | None = None,
69
+ ) -> DeepsetSearchResponse:
70
+ """Search using a pipeline."""
71
+ ...
72
+
73
+ def search_stream(
74
+ self,
75
+ pipeline_name: str,
76
+ query: str,
77
+ debug: bool = False,
78
+ view_prompts: bool = False,
79
+ params: dict[str, Any] | None = None,
80
+ filters: dict[str, Any] | None = None,
81
+ ) -> AsyncIterator[DeepsetStreamEvent]:
82
+ """Search using a pipeline with response streaming."""
83
+ ...