deepset-mcp 0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. deepset_mcp/__init__.py +0 -0
  2. deepset_mcp/agents/__init__.py +0 -0
  3. deepset_mcp/agents/debugging/__init__.py +0 -0
  4. deepset_mcp/agents/debugging/debugging_agent.py +37 -0
  5. deepset_mcp/agents/debugging/system_prompt.md +214 -0
  6. deepset_mcp/agents/generalist/__init__.py +0 -0
  7. deepset_mcp/agents/generalist/generalist_agent.py +38 -0
  8. deepset_mcp/agents/generalist/system_prompt.md +241 -0
  9. deepset_mcp/api/README.md +536 -0
  10. deepset_mcp/api/__init__.py +0 -0
  11. deepset_mcp/api/client.py +277 -0
  12. deepset_mcp/api/custom_components/__init__.py +0 -0
  13. deepset_mcp/api/custom_components/models.py +25 -0
  14. deepset_mcp/api/custom_components/protocols.py +17 -0
  15. deepset_mcp/api/custom_components/resource.py +56 -0
  16. deepset_mcp/api/exceptions.py +70 -0
  17. deepset_mcp/api/haystack_service/__init__.py +0 -0
  18. deepset_mcp/api/haystack_service/protocols.py +13 -0
  19. deepset_mcp/api/haystack_service/resource.py +55 -0
  20. deepset_mcp/api/indexes/__init__.py +0 -0
  21. deepset_mcp/api/indexes/models.py +63 -0
  22. deepset_mcp/api/indexes/protocols.py +53 -0
  23. deepset_mcp/api/indexes/resource.py +138 -0
  24. deepset_mcp/api/integrations/__init__.py +1 -0
  25. deepset_mcp/api/integrations/models.py +49 -0
  26. deepset_mcp/api/integrations/protocols.py +27 -0
  27. deepset_mcp/api/integrations/resource.py +57 -0
  28. deepset_mcp/api/pipeline/__init__.py +17 -0
  29. deepset_mcp/api/pipeline/log_level.py +9 -0
  30. deepset_mcp/api/pipeline/models.py +235 -0
  31. deepset_mcp/api/pipeline/protocols.py +83 -0
  32. deepset_mcp/api/pipeline/resource.py +378 -0
  33. deepset_mcp/api/pipeline_template/__init__.py +0 -0
  34. deepset_mcp/api/pipeline_template/models.py +56 -0
  35. deepset_mcp/api/pipeline_template/protocols.py +17 -0
  36. deepset_mcp/api/pipeline_template/resource.py +88 -0
  37. deepset_mcp/api/protocols.py +122 -0
  38. deepset_mcp/api/secrets/__init__.py +0 -0
  39. deepset_mcp/api/secrets/models.py +16 -0
  40. deepset_mcp/api/secrets/protocols.py +29 -0
  41. deepset_mcp/api/secrets/resource.py +112 -0
  42. deepset_mcp/api/shared_models.py +17 -0
  43. deepset_mcp/api/transport.py +336 -0
  44. deepset_mcp/api/user/__init__.py +0 -0
  45. deepset_mcp/api/user/protocols.py +11 -0
  46. deepset_mcp/api/user/resource.py +38 -0
  47. deepset_mcp/api/workspace/__init__.py +7 -0
  48. deepset_mcp/api/workspace/models.py +23 -0
  49. deepset_mcp/api/workspace/protocols.py +41 -0
  50. deepset_mcp/api/workspace/resource.py +94 -0
  51. deepset_mcp/benchmark/README.md +425 -0
  52. deepset_mcp/benchmark/__init__.py +1 -0
  53. deepset_mcp/benchmark/agent_configs/debugging_agent.yml +10 -0
  54. deepset_mcp/benchmark/agent_configs/generalist_agent.yml +6 -0
  55. deepset_mcp/benchmark/dp_validation_error_analysis/__init__.py +0 -0
  56. deepset_mcp/benchmark/dp_validation_error_analysis/eda.ipynb +757 -0
  57. deepset_mcp/benchmark/dp_validation_error_analysis/prepare_interaction_data.ipynb +167 -0
  58. deepset_mcp/benchmark/dp_validation_error_analysis/preprocessing_utils.py +213 -0
  59. deepset_mcp/benchmark/runner/__init__.py +0 -0
  60. deepset_mcp/benchmark/runner/agent_benchmark_runner.py +561 -0
  61. deepset_mcp/benchmark/runner/agent_loader.py +110 -0
  62. deepset_mcp/benchmark/runner/cli.py +39 -0
  63. deepset_mcp/benchmark/runner/cli_agent.py +373 -0
  64. deepset_mcp/benchmark/runner/cli_index.py +71 -0
  65. deepset_mcp/benchmark/runner/cli_pipeline.py +73 -0
  66. deepset_mcp/benchmark/runner/cli_tests.py +226 -0
  67. deepset_mcp/benchmark/runner/cli_utils.py +61 -0
  68. deepset_mcp/benchmark/runner/config.py +73 -0
  69. deepset_mcp/benchmark/runner/config_loader.py +64 -0
  70. deepset_mcp/benchmark/runner/interactive.py +140 -0
  71. deepset_mcp/benchmark/runner/models.py +203 -0
  72. deepset_mcp/benchmark/runner/repl.py +67 -0
  73. deepset_mcp/benchmark/runner/setup_actions.py +238 -0
  74. deepset_mcp/benchmark/runner/streaming.py +360 -0
  75. deepset_mcp/benchmark/runner/teardown_actions.py +196 -0
  76. deepset_mcp/benchmark/runner/tracing.py +21 -0
  77. deepset_mcp/benchmark/tasks/chat_rag_answers_wrong_format.yml +16 -0
  78. deepset_mcp/benchmark/tasks/documents_output_wrong.yml +13 -0
  79. deepset_mcp/benchmark/tasks/jinja_str_instead_of_complex_type.yml +11 -0
  80. deepset_mcp/benchmark/tasks/jinja_syntax_error.yml +11 -0
  81. deepset_mcp/benchmark/tasks/missing_output_mapping.yml +14 -0
  82. deepset_mcp/benchmark/tasks/no_query_input.yml +13 -0
  83. deepset_mcp/benchmark/tasks/pipelines/chat_agent_jinja_str.yml +141 -0
  84. deepset_mcp/benchmark/tasks/pipelines/chat_agent_jinja_syntax.yml +141 -0
  85. deepset_mcp/benchmark/tasks/pipelines/chat_rag_answers_wrong_format.yml +181 -0
  86. deepset_mcp/benchmark/tasks/pipelines/chat_rag_missing_output_mapping.yml +189 -0
  87. deepset_mcp/benchmark/tasks/pipelines/rag_documents_wrong_format.yml +193 -0
  88. deepset_mcp/benchmark/tasks/pipelines/rag_no_query_input.yml +191 -0
  89. deepset_mcp/benchmark/tasks/pipelines/standard_index.yml +167 -0
  90. deepset_mcp/initialize_embedding_model.py +12 -0
  91. deepset_mcp/main.py +133 -0
  92. deepset_mcp/prompts/deepset_copilot_prompt.md +271 -0
  93. deepset_mcp/prompts/deepset_debugging_agent.md +214 -0
  94. deepset_mcp/store.py +5 -0
  95. deepset_mcp/tool_factory.py +473 -0
  96. deepset_mcp/tools/__init__.py +0 -0
  97. deepset_mcp/tools/custom_components.py +52 -0
  98. deepset_mcp/tools/doc_search.py +83 -0
  99. deepset_mcp/tools/haystack_service.py +358 -0
  100. deepset_mcp/tools/haystack_service_models.py +97 -0
  101. deepset_mcp/tools/indexes.py +129 -0
  102. deepset_mcp/tools/model_protocol.py +16 -0
  103. deepset_mcp/tools/pipeline.py +335 -0
  104. deepset_mcp/tools/pipeline_template.py +116 -0
  105. deepset_mcp/tools/secrets.py +45 -0
  106. deepset_mcp/tools/tokonomics/__init__.py +73 -0
  107. deepset_mcp/tools/tokonomics/decorators.py +396 -0
  108. deepset_mcp/tools/tokonomics/explorer.py +347 -0
  109. deepset_mcp/tools/tokonomics/object_store.py +177 -0
  110. deepset_mcp/tools/workspace.py +61 -0
  111. deepset_mcp-0.0.2.dist-info/METADATA +288 -0
  112. deepset_mcp-0.0.2.dist-info/RECORD +114 -0
  113. deepset_mcp-0.0.2.dist-info/WHEEL +4 -0
  114. deepset_mcp-0.0.2.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,378 @@
1
+ import json
2
+ import logging
3
+ from collections.abc import AsyncIterator
4
+ from typing import TYPE_CHECKING, Any
5
+
6
+ from deepset_mcp.api.exceptions import UnexpectedAPIError
7
+ from deepset_mcp.api.pipeline.log_level import LogLevel
8
+ from deepset_mcp.api.pipeline.models import (
9
+ DeepsetPipeline,
10
+ DeepsetSearchResponse,
11
+ DeepsetStreamEvent,
12
+ PipelineList,
13
+ PipelineLogList,
14
+ PipelineValidationResult,
15
+ ValidationError,
16
+ )
17
+ from deepset_mcp.api.pipeline.protocols import PipelineResourceProtocol
18
+ from deepset_mcp.api.shared_models import NoContentResponse
19
+ from deepset_mcp.api.transport import raise_for_status
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+ if TYPE_CHECKING:
24
+ from deepset_mcp.api.protocols import AsyncClientProtocol
25
+
26
+
27
+ class PipelineResource(PipelineResourceProtocol):
28
+ """Manages interactions with the deepset pipeline API."""
29
+
30
+ def __init__(
31
+ self,
32
+ client: "AsyncClientProtocol",
33
+ workspace: str,
34
+ ) -> None:
35
+ """Initializes a PipelineResource instance.
36
+
37
+ :param client: The async client protocol instance.
38
+ :param workspace: The workspace identifier.
39
+ """
40
+ self._client = client
41
+ self._workspace = workspace
42
+
43
+ async def validate(self, yaml_config: str) -> PipelineValidationResult:
44
+ """Validate a pipeline's YAML configuration against the API.
45
+
46
+ :param yaml_config: The YAML configuration string to validate.
47
+ :returns: PipelineValidationResult containing validation status and any errors.
48
+ :raises ValueError: If the YAML is not valid (422 error) or contains syntax errors.
49
+ """
50
+ data = {"query_yaml": yaml_config}
51
+
52
+ resp = await self._client.request(
53
+ endpoint=f"v1/workspaces/{self._workspace}/pipeline_validations",
54
+ method="POST",
55
+ data=data,
56
+ )
57
+
58
+ # If successful (status 200), the YAML is valid
59
+ if resp.success:
60
+ return PipelineValidationResult(valid=True)
61
+
62
+ if resp.status_code == 400 and resp.json is not None and isinstance(resp.json, dict) and "details" in resp.json:
63
+ errors = [ValidationError(code=error["code"], message=error["message"]) for error in resp.json["details"]]
64
+
65
+ return PipelineValidationResult(valid=False, errors=errors)
66
+
67
+ if resp.status_code == 422:
68
+ errors = [ValidationError(code="YAML_ERROR", message="Syntax error in YAML")]
69
+
70
+ return PipelineValidationResult(valid=False, errors=errors)
71
+
72
+ raise UnexpectedAPIError(status_code=resp.status_code, message=resp.text, detail=resp.json)
73
+
74
+ async def list(
75
+ self,
76
+ page_number: int = 1,
77
+ limit: int = 10,
78
+ ) -> PipelineList:
79
+ """Retrieve pipeline in the configured workspace with optional pagination.
80
+
81
+ :param page_number: Page number for paging.
82
+ :param limit: Max number of items to return.
83
+ :returns: PipelineList with pipelines and metadata.
84
+ """
85
+ params: dict[str, Any] = {
86
+ "page_number": page_number,
87
+ "limit": limit,
88
+ }
89
+
90
+ resp = await self._client.request(
91
+ endpoint=f"v1/workspaces/{self._workspace}/pipelines",
92
+ method="GET",
93
+ params=params,
94
+ )
95
+
96
+ raise_for_status(resp)
97
+
98
+ response = resp.json
99
+
100
+ if response is not None:
101
+ pipelines = [DeepsetPipeline.model_validate(item) for item in response.get("data", [])]
102
+ return PipelineList(
103
+ data=pipelines,
104
+ has_more=response.get("has_more", False),
105
+ total=response.get("total", len(pipelines)),
106
+ )
107
+ else:
108
+ return PipelineList(data=[], has_more=False, total=0)
109
+
110
+ async def get(self, pipeline_name: str, include_yaml: bool = True) -> DeepsetPipeline:
111
+ """Fetch a single pipeline by its name.
112
+
113
+ :param pipeline_name: Name of the pipeline to fetch.
114
+ :param include_yaml: Whether to include YAML configuration in the response.
115
+ :returns: DeepsetPipeline instance.
116
+ """
117
+ resp = await self._client.request(endpoint=f"v1/workspaces/{self._workspace}/pipelines/{pipeline_name}")
118
+ raise_for_status(resp)
119
+
120
+ pipeline = DeepsetPipeline.model_validate(resp.json)
121
+
122
+ if include_yaml:
123
+ yaml_response = await self._client.request(
124
+ endpoint=f"v1/workspaces/{self._workspace}/pipelines/{pipeline_name}/yaml"
125
+ )
126
+
127
+ raise_for_status(yaml_response)
128
+
129
+ if yaml_response.json is not None:
130
+ pipeline.yaml_config = yaml_response.json["query_yaml"]
131
+
132
+ return pipeline
133
+
134
+ async def create(self, name: str, yaml_config: str) -> NoContentResponse:
135
+ """Create a new pipeline with a name and YAML config.
136
+
137
+ :param name: Name of the new pipeline.
138
+ :param yaml_config: YAML configuration for the pipeline.
139
+ :returns: NoContentResponse indicating successful creation.
140
+ """
141
+ data = {"name": name, "query_yaml": yaml_config}
142
+ resp = await self._client.request(
143
+ endpoint=f"v1/workspaces/{self._workspace}/pipelines",
144
+ method="POST",
145
+ data=data,
146
+ )
147
+
148
+ raise_for_status(resp)
149
+
150
+ return NoContentResponse(message="Pipeline created successfully.")
151
+
152
+ async def update(
153
+ self,
154
+ pipeline_name: str,
155
+ updated_pipeline_name: str | None = None,
156
+ yaml_config: str | None = None,
157
+ ) -> NoContentResponse:
158
+ """Update name and/or YAML config of an existing pipeline.
159
+
160
+ :param pipeline_name: Current name of the pipeline.
161
+ :param updated_pipeline_name: New name for the pipeline (optional).
162
+ :param yaml_config: New YAML configuration (optional).
163
+ :returns: NoContentResponse indicating successful update.
164
+ :raises ValueError: If neither updated_pipeline_name nor yaml_config is provided.
165
+ """
166
+ # Handle name update first if any
167
+ if updated_pipeline_name is not None:
168
+ name_resp = await self._client.request(
169
+ endpoint=f"v1/workspaces/{self._workspace}/pipelines/{pipeline_name}",
170
+ method="PATCH",
171
+ data={"name": updated_pipeline_name},
172
+ )
173
+
174
+ raise_for_status(name_resp)
175
+
176
+ pipeline_name = updated_pipeline_name
177
+
178
+ if yaml_config is None:
179
+ return NoContentResponse(message="Pipeline name updated successfully.")
180
+
181
+ if yaml_config is not None:
182
+ yaml_resp = await self._client.request(
183
+ endpoint=f"v1/workspaces/{self._workspace}/pipelines/{pipeline_name}/yaml",
184
+ method="PUT",
185
+ data={"query_yaml": yaml_config},
186
+ )
187
+
188
+ raise_for_status(yaml_resp)
189
+
190
+ if updated_pipeline_name is not None:
191
+ response = NoContentResponse(message="Pipeline name and YAML updated successfully.")
192
+ else:
193
+ response = NoContentResponse(message="Pipeline YAML updated successfully.")
194
+
195
+ return response
196
+
197
+ raise ValueError("Either `updated_pipeline_name` or `yaml_config` must be provided.")
198
+
199
+ async def get_logs(
200
+ self,
201
+ pipeline_name: str,
202
+ limit: int = 30,
203
+ level: LogLevel | None = None,
204
+ ) -> PipelineLogList:
205
+ """Fetch logs for a specific pipeline.
206
+
207
+ :param pipeline_name: Name of the pipeline to fetch logs for.
208
+ :param limit: Maximum number of log entries to return.
209
+ :param level: Filter logs by level. If None, returns all levels.
210
+ :returns: A PipelineLogList containing the log entries.
211
+ """
212
+ params: dict[str, Any] = {
213
+ "limit": limit,
214
+ "filter": "origin eq 'querypipeline'",
215
+ }
216
+
217
+ # Add level filter if specified
218
+ if level is not None:
219
+ params["filter"] = f"level eq '{level}' and origin eq 'querypipeline'"
220
+
221
+ resp = await self._client.request(
222
+ endpoint=f"v1/workspaces/{self._workspace}/pipelines/{pipeline_name}/logs",
223
+ method="GET",
224
+ params=params,
225
+ )
226
+
227
+ raise_for_status(resp)
228
+
229
+ if resp.json is not None:
230
+ return PipelineLogList.model_validate(resp.json)
231
+ else:
232
+ # Return empty log list if no response
233
+ return PipelineLogList(data=[], has_more=False, total=0)
234
+
235
+ async def deploy(self, pipeline_name: str) -> PipelineValidationResult:
236
+ """Deploy a pipeline to production.
237
+
238
+ :param pipeline_name: Name of the pipeline to deploy.
239
+ :returns: PipelineValidationResult containing deployment status and any errors.
240
+ :raises UnexpectedAPIError: If the API returns an unexpected status code.
241
+ """
242
+ resp = await self._client.request(
243
+ endpoint=f"v1/workspaces/{self._workspace}/pipelines/{pipeline_name}/deploy",
244
+ method="POST",
245
+ )
246
+
247
+ # If successful (status 200), the deployment was successful
248
+ if resp.success:
249
+ return PipelineValidationResult(valid=True)
250
+
251
+ # Handle validation errors (422)
252
+ if resp.status_code == 422 and resp.json is not None and isinstance(resp.json, dict) and "details" in resp.json:
253
+ errors = [ValidationError(code=error["code"], message=error["message"]) for error in resp.json["details"]]
254
+ return PipelineValidationResult(valid=False, errors=errors)
255
+
256
+ # Handle other 4xx errors (400, 404)
257
+ if 400 <= resp.status_code < 500:
258
+ # For non-validation errors, create a generic error
259
+ error_message = resp.text if resp.text else f"HTTP {resp.status_code} error"
260
+ errors = [ValidationError(code="DEPLOYMENT_ERROR", message=error_message)]
261
+ return PipelineValidationResult(valid=False, errors=errors)
262
+
263
+ raise UnexpectedAPIError(status_code=resp.status_code, message=resp.text, detail=resp.json)
264
+
265
+ async def delete(self, pipeline_name: str) -> NoContentResponse:
266
+ """Delete a pipeline.
267
+
268
+ :param pipeline_name: Name of the pipeline to delete.
269
+ :returns: NoContentResponse indicating successful deletion.
270
+ :raises UnexpectedAPIError: If the API returns an unexpected status code.
271
+ """
272
+ resp = await self._client.request(
273
+ endpoint=f"v1/workspaces/{self._workspace}/pipelines/{pipeline_name}",
274
+ method="DELETE",
275
+ )
276
+
277
+ raise_for_status(resp)
278
+
279
+ return NoContentResponse(message="Pipeline deleted successfully.")
280
+
281
+ async def search(
282
+ self,
283
+ pipeline_name: str,
284
+ query: str,
285
+ debug: bool = False,
286
+ view_prompts: bool = False,
287
+ params: dict[str, Any] | None = None,
288
+ filters: dict[str, Any] | None = None,
289
+ ) -> DeepsetSearchResponse:
290
+ """Search using a pipeline.
291
+
292
+ :param pipeline_name: Name of the pipeline to use for search.
293
+ :param query: Search query.
294
+ :param debug: Whether to include debug information.
295
+ :param view_prompts: Whether to include prompts in the response.
296
+ :param params: Additional parameters for pipeline components.
297
+ :param filters: Search filters to apply.
298
+ :returns: SearchResponse containing search results.
299
+ """
300
+ # Prepare request data
301
+ data: dict[str, Any] = {
302
+ "queries": [query], # API expects a list but we only send one query
303
+ "debug": debug,
304
+ "view_prompts": view_prompts,
305
+ }
306
+
307
+ if params:
308
+ data["params"] = params
309
+
310
+ if filters:
311
+ data["filters"] = filters
312
+
313
+ resp = await self._client.request(
314
+ endpoint=f"v1/workspaces/{self._workspace}/pipelines/{pipeline_name}/search",
315
+ method="POST",
316
+ data=data,
317
+ response_type=dict[str, Any],
318
+ timeout=180.0,
319
+ )
320
+
321
+ raise_for_status(resp)
322
+
323
+ if resp.json is not None:
324
+ return DeepsetSearchResponse.model_validate(resp.json)
325
+ else:
326
+ # Return empty response if no JSON data
327
+ return DeepsetSearchResponse()
328
+
329
+ async def search_stream(
330
+ self,
331
+ pipeline_name: str,
332
+ query: str,
333
+ debug: bool = False,
334
+ view_prompts: bool = False,
335
+ params: dict[str, Any] | None = None,
336
+ filters: dict[str, Any] | None = None,
337
+ ) -> AsyncIterator[DeepsetStreamEvent]:
338
+ """Search using a pipeline with response streaming.
339
+
340
+ :param pipeline_name: Name of the pipeline to use for search.
341
+ :param query: Search query.
342
+ :param debug: Whether to include debug information.
343
+ :param view_prompts: Whether to include prompts in the response.
344
+ :param params: Additional parameters for pipeline components.
345
+ :param filters: Search filters to apply.
346
+ :returns: AsyncIterator streaming the result.
347
+ """
348
+ # For streaming, we need to add include_result flag
349
+ # Prepare request data
350
+ data: dict[str, Any] = {
351
+ "query": query,
352
+ "debug": debug,
353
+ "view_prompts": view_prompts,
354
+ "include_result": True,
355
+ }
356
+
357
+ if params:
358
+ data["params"] = params
359
+
360
+ if filters:
361
+ data["filters"] = filters
362
+
363
+ async with self._client.stream_request(
364
+ endpoint=f"v1/workspaces/{self._workspace}/pipelines/{pipeline_name}/search-stream",
365
+ method="POST",
366
+ data=data,
367
+ ) as resp:
368
+ async for line in resp.iter_lines():
369
+ try:
370
+ event_dict = json.loads(line)
371
+ event = DeepsetStreamEvent.model_validate(event_dict)
372
+
373
+ if event.error is not None:
374
+ raise UnexpectedAPIError(message=event.error)
375
+ yield event
376
+ except (json.JSONDecodeError, ValueError):
377
+ # Skip malformed events
378
+ continue
File without changes
@@ -0,0 +1,56 @@
1
+ from enum import StrEnum
2
+ from uuid import UUID
3
+
4
+ from pydantic import BaseModel, Field
5
+
6
+
7
+ class PipelineType(StrEnum):
8
+ """Enum representing the type of a pipeline template."""
9
+
10
+ QUERY = "query"
11
+ INDEXING = "indexing"
12
+
13
+
14
+ class PipelineTemplateTag(BaseModel):
15
+ """Model representing a tag on a pipeline template."""
16
+
17
+ name: str
18
+ tag_id: UUID
19
+
20
+
21
+ class PipelineTemplate(BaseModel):
22
+ """Model representing a pipeline template."""
23
+
24
+ author: str
25
+ best_for: list[str]
26
+ description: str
27
+ template_name: str = Field(alias="pipeline_name")
28
+ display_name: str = Field(alias="name")
29
+ pipeline_template_id: UUID = Field(alias="pipeline_template_id")
30
+ potential_applications: list[str] = Field(alias="potential_applications")
31
+ yaml_config: str | None = Field(None, alias="query_yaml")
32
+ tags: list[PipelineTemplateTag]
33
+ pipeline_type: PipelineType
34
+
35
+
36
+ class PipelineTemplateList(BaseModel):
37
+ """Response model for listing pipeline templates."""
38
+
39
+ data: list[PipelineTemplate]
40
+ has_more: bool
41
+ total: int
42
+
43
+
44
+ class PipelineTemplateSearchResult(BaseModel):
45
+ """Model representing a search result for pipeline templates."""
46
+
47
+ template: PipelineTemplate
48
+ similarity_score: float
49
+
50
+
51
+ class PipelineTemplateSearchResults(BaseModel):
52
+ """Response model for pipeline template search results."""
53
+
54
+ results: list[PipelineTemplateSearchResult]
55
+ query: str
56
+ total_found: int
@@ -0,0 +1,17 @@
1
+ from typing import Protocol
2
+
3
+ from deepset_mcp.api.pipeline_template.models import PipelineTemplate, PipelineTemplateList
4
+
5
+
6
+ class PipelineTemplateResourceProtocol(Protocol):
7
+ """Protocol defining the implementation for PipelineTemplateResource."""
8
+
9
+ async def get_template(self, template_name: str) -> PipelineTemplate:
10
+ """Fetch a single pipeline template by its name."""
11
+ ...
12
+
13
+ async def list_templates(
14
+ self, limit: int = 100, field: str = "created_at", order: str = "DESC", filter: str | None = None
15
+ ) -> PipelineTemplateList:
16
+ """List pipeline templates in the configured workspace."""
17
+ ...
@@ -0,0 +1,88 @@
1
+ from typing import Any
2
+
3
+ from deepset_mcp.api.exceptions import UnexpectedAPIError
4
+ from deepset_mcp.api.pipeline_template.models import PipelineTemplate, PipelineTemplateList
5
+ from deepset_mcp.api.pipeline_template.protocols import PipelineTemplateResourceProtocol
6
+ from deepset_mcp.api.protocols import AsyncClientProtocol
7
+ from deepset_mcp.api.transport import raise_for_status
8
+
9
+
10
+ class PipelineTemplateResource(PipelineTemplateResourceProtocol):
11
+ """Resource for interacting with pipeline templates in a workspace."""
12
+
13
+ def __init__(self, client: AsyncClientProtocol, workspace: str) -> None:
14
+ """Initialize the pipeline template resource.
15
+
16
+ Parameters
17
+ ----------
18
+ client : AsyncClientProtocol
19
+ Client to use for making API requests
20
+ workspace : str
21
+ Workspace to operate in
22
+ """
23
+ self._client = client
24
+ self._workspace = workspace
25
+
26
+ async def get_template(self, template_name: str) -> PipelineTemplate:
27
+ """Fetch a single pipeline template by its name.
28
+
29
+ Parameters
30
+ ----------
31
+ template_name : str
32
+ Name of the template to fetch
33
+
34
+ Returns
35
+ -------
36
+ PipelineTemplate
37
+ The requested pipeline template
38
+ """
39
+ response = await self._client.request(f"/v1/workspaces/{self._workspace}/pipeline_templates/{template_name}")
40
+ raise_for_status(response)
41
+ data = response.json
42
+
43
+ return PipelineTemplate.model_validate(data)
44
+
45
+ async def list_templates(
46
+ self, limit: int = 100, field: str = "created_at", order: str = "DESC", filter: str | None = None
47
+ ) -> PipelineTemplateList:
48
+ """List pipeline templates in the configured workspace.
49
+
50
+ Parameters
51
+ ----------
52
+ limit : int, optional (default=100)
53
+ Maximum number of templates to return
54
+ field : str, optional (default="created_at")
55
+ Field to sort by
56
+ order : str, optional (default="DESC")
57
+ Sort order (ASC or DESC)
58
+ filter : str | None, optional (default=None)
59
+ OData filter expression for filtering templates
60
+
61
+ Returns
62
+ -------
63
+ PipelineTemplateList
64
+ List of pipeline templates with metadata
65
+ """
66
+ params = {"limit": limit, "page_number": 1, "field": field, "order": order}
67
+
68
+ if filter is not None:
69
+ params["filter"] = filter
70
+
71
+ response = await self._client.request(
72
+ f"/v1/workspaces/{self._workspace}/pipeline_templates",
73
+ method="GET",
74
+ params=params,
75
+ )
76
+
77
+ raise_for_status(response)
78
+
79
+ if response.json is None:
80
+ raise UnexpectedAPIError(message="Unexpected API response, no templates returned.")
81
+
82
+ response_data: dict[str, Any] = response.json
83
+
84
+ return PipelineTemplateList(
85
+ data=[PipelineTemplate.model_validate(template) for template in response_data["data"]],
86
+ has_more=response_data.get("has_more", False),
87
+ total=response_data.get("total", len(response_data["data"])),
88
+ )
@@ -0,0 +1,122 @@
1
+ from contextlib import AbstractAsyncContextManager
2
+ from types import TracebackType
3
+ from typing import Any, Literal, Protocol, Self, TypeVar, overload
4
+
5
+ from deepset_mcp.api.custom_components.protocols import CustomComponentsProtocol
6
+ from deepset_mcp.api.haystack_service.protocols import HaystackServiceProtocol
7
+ from deepset_mcp.api.indexes.protocols import IndexResourceProtocol
8
+ from deepset_mcp.api.integrations.protocols import IntegrationResourceProtocol
9
+ from deepset_mcp.api.pipeline.protocols import PipelineResourceProtocol
10
+ from deepset_mcp.api.pipeline_template.protocols import PipelineTemplateResourceProtocol
11
+ from deepset_mcp.api.secrets.protocols import SecretResourceProtocol
12
+ from deepset_mcp.api.transport import StreamingResponse, TransportResponse
13
+ from deepset_mcp.api.user.protocols import UserResourceProtocol
14
+ from deepset_mcp.api.workspace.protocols import WorkspaceResourceProtocol
15
+
16
+ T = TypeVar("T")
17
+
18
+
19
+ class AsyncClientProtocol(Protocol):
20
+ """Protocol defining the implementation for AsyncClient."""
21
+
22
+ @overload
23
+ async def request(
24
+ self,
25
+ endpoint: str,
26
+ *,
27
+ response_type: type[T],
28
+ method: str = "GET",
29
+ data: dict[str, Any] | None = None,
30
+ headers: dict[str, str] | None = None,
31
+ timeout: float | None | Literal["config"] = "config",
32
+ **kwargs: Any,
33
+ ) -> TransportResponse[T]: ...
34
+
35
+ @overload
36
+ async def request(
37
+ self,
38
+ endpoint: str,
39
+ *,
40
+ response_type: None = None,
41
+ method: str = "GET",
42
+ data: dict[str, Any] | None = None,
43
+ headers: dict[str, str] | None = None,
44
+ timeout: float | None | Literal["config"] = "config",
45
+ **kwargs: Any,
46
+ ) -> TransportResponse[Any]: ...
47
+
48
+ async def request(
49
+ self,
50
+ endpoint: str,
51
+ *,
52
+ response_type: type[T] | None = None,
53
+ method: str = "GET",
54
+ data: dict[str, Any] | None = None,
55
+ headers: dict[str, str] | None = None,
56
+ timeout: float | None | Literal["config"] = "config",
57
+ **kwargs: Any,
58
+ ) -> TransportResponse[Any]:
59
+ """Make a request to the API."""
60
+ ...
61
+
62
+ def stream_request(
63
+ self,
64
+ endpoint: str,
65
+ *,
66
+ method: str = "POST",
67
+ data: dict[str, Any] | None = None,
68
+ headers: dict[str, str] | None = None,
69
+ **kwargs: Any,
70
+ ) -> AbstractAsyncContextManager[StreamingResponse]:
71
+ """Make a streaming request to the API."""
72
+ ...
73
+
74
+ async def close(self) -> None:
75
+ """Close underlying transport resources."""
76
+ ...
77
+
78
+ async def __aenter__(self) -> Self:
79
+ """Enter the AsyncContextManager."""
80
+ ...
81
+
82
+ async def __aexit__(
83
+ self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
84
+ ) -> bool:
85
+ """Exit the AsyncContextmanager and clean up resources."""
86
+ ...
87
+
88
+ def pipelines(self, workspace: str) -> "PipelineResourceProtocol":
89
+ """Access pipelines in the specified workspace."""
90
+ ...
91
+
92
+ def haystack_service(self) -> "HaystackServiceProtocol":
93
+ """Access the Haystack service."""
94
+ ...
95
+
96
+ def pipeline_templates(self, workspace: str) -> "PipelineTemplateResourceProtocol":
97
+ """Access pipeline templates in the specified workspace."""
98
+ ...
99
+
100
+ def indexes(self, workspace: str) -> "IndexResourceProtocol":
101
+ """Access indexes in the specified workspace."""
102
+ ...
103
+
104
+ def custom_components(self, workspace: str) -> "CustomComponentsProtocol":
105
+ """Access custom components in the specified workspace."""
106
+ ...
107
+
108
+ def users(self) -> "UserResourceProtocol":
109
+ """Access users."""
110
+ ...
111
+
112
+ def secrets(self) -> "SecretResourceProtocol":
113
+ """Access secrets."""
114
+ ...
115
+
116
+ def workspaces(self) -> "WorkspaceResourceProtocol":
117
+ """Access workspaces."""
118
+ ...
119
+
120
+ def integrations(self) -> "IntegrationResourceProtocol":
121
+ """Access integrations."""
122
+ ...
File without changes
@@ -0,0 +1,16 @@
1
+ from pydantic import BaseModel
2
+
3
+
4
+ class Secret(BaseModel):
5
+ """Model representing a secret in deepset."""
6
+
7
+ name: str
8
+ secret_id: str
9
+
10
+
11
+ class SecretList(BaseModel):
12
+ """Model representing a list of secrets with pagination."""
13
+
14
+ data: list[Secret]
15
+ has_more: bool
16
+ total: int