codemie-sdk-python 0.1.52__py3-none-any.whl → 0.1.258__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of codemie-sdk-python might be problematic. Click here for more details.

Files changed (35) hide show
  1. codemie_sdk/__init__.py +114 -2
  2. codemie_sdk/auth/credentials.py +5 -4
  3. codemie_sdk/client/client.py +66 -5
  4. codemie_sdk/models/__init__.py +0 -0
  5. codemie_sdk/models/assistant.py +137 -12
  6. codemie_sdk/models/conversation.py +169 -0
  7. codemie_sdk/models/datasource.py +81 -1
  8. codemie_sdk/models/file_operation.py +25 -0
  9. codemie_sdk/models/integration.py +23 -2
  10. codemie_sdk/models/vendor_assistant.py +187 -0
  11. codemie_sdk/models/vendor_guardrail.py +152 -0
  12. codemie_sdk/models/vendor_knowledgebase.py +151 -0
  13. codemie_sdk/models/vendor_workflow.py +145 -0
  14. codemie_sdk/models/workflow.py +4 -4
  15. codemie_sdk/models/workflow_execution_payload.py +21 -0
  16. codemie_sdk/models/workflow_state.py +6 -3
  17. codemie_sdk/models/workflow_thoughts.py +26 -0
  18. codemie_sdk/services/assistant.py +261 -3
  19. codemie_sdk/services/conversation.py +90 -0
  20. codemie_sdk/services/datasource.py +81 -6
  21. codemie_sdk/services/files.py +82 -0
  22. codemie_sdk/services/integration.py +21 -1
  23. codemie_sdk/services/vendor_assistant.py +364 -0
  24. codemie_sdk/services/vendor_guardrail.py +375 -0
  25. codemie_sdk/services/vendor_knowledgebase.py +270 -0
  26. codemie_sdk/services/vendor_workflow.py +330 -0
  27. codemie_sdk/services/webhook.py +41 -0
  28. codemie_sdk/services/workflow.py +26 -2
  29. codemie_sdk/services/workflow_execution.py +54 -6
  30. codemie_sdk/utils/http.py +43 -35
  31. codemie_sdk_python-0.1.258.dist-info/METADATA +1404 -0
  32. codemie_sdk_python-0.1.258.dist-info/RECORD +45 -0
  33. codemie_sdk_python-0.1.52.dist-info/METADATA +0 -809
  34. codemie_sdk_python-0.1.52.dist-info/RECORD +0 -29
  35. {codemie_sdk_python-0.1.52.dist-info → codemie_sdk_python-0.1.258.dist-info}/WHEEL +0 -0
@@ -1,10 +1,14 @@
1
1
  """Assistant service implementation."""
2
2
 
3
+ import inspect
3
4
  import json
4
5
  from pathlib import Path
5
6
  from typing import List, Union, Optional, Dict, Any, Literal
7
+ from pydantic import BaseModel
8
+ from copy import deepcopy
6
9
 
7
10
  import requests
11
+ import mimetypes
8
12
 
9
13
  from ..models.assistant import (
10
14
  Assistant,
@@ -16,6 +20,7 @@ from ..models.assistant import (
16
20
  AssistantBase,
17
21
  Context,
18
22
  ExportAssistantPayload,
23
+ AssistantEvaluationRequest,
19
24
  )
20
25
  from ..models.common import PaginationParams
21
26
  from ..utils import ApiRequestHandler
@@ -37,7 +42,7 @@ class AssistantService:
37
42
  def list(
38
43
  self,
39
44
  minimal_response: bool = True,
40
- scope: Literal["visible_to_user", "created_by_user"] = "visible_to_user",
45
+ scope: Literal["visible_to_user", "marketplace"] = "visible_to_user",
41
46
  page: int = 0,
42
47
  per_page: int = 12,
43
48
  filters: Optional[Dict[str, Any]] = None,
@@ -165,33 +170,270 @@ class AssistantService:
165
170
  """
166
171
  return self._api.get(f"/v1/assistants/prebuilt/{slug}", Assistant)
167
172
 
173
+ def list_versions(
174
+ self, assistant_id: str, page: int = 0, per_page: Optional[int] = None
175
+ ):
176
+ """List assistant versions.
177
+
178
+ Args:
179
+ assistant_id: Assistant identifier
180
+ page: Page number for pagination
181
+ per_page: Items per page (optional). If not provided, backend defaults are used.
182
+
183
+ Returns:
184
+ List of AssistantVersion objects
185
+ """
186
+
187
+ params: Dict[str, Any] = {"page": page}
188
+ if per_page is not None:
189
+ params["per_page"] = per_page
190
+ from ..models.assistant import AssistantVersion
191
+
192
+ raw = self._api.get(
193
+ f"/v1/assistants/{assistant_id}/versions",
194
+ dict,
195
+ params=params,
196
+ wrap_response=False,
197
+ )
198
+ items = []
199
+ if isinstance(raw, list):
200
+ items = raw
201
+ elif isinstance(raw, dict):
202
+ items = raw.get("data") or raw.get("versions") or []
203
+ else:
204
+ items = []
205
+ return [AssistantVersion.model_validate(it) for it in items]
206
+
207
+ def get_version(self, assistant_id: str, version_number: int):
208
+ """Get a specific assistant version by number.
209
+
210
+ Args:
211
+ assistant_id: Assistant identifier
212
+ version_number: Version number to retrieve
213
+
214
+ Returns:
215
+ AssistantVersion object
216
+ """
217
+ from ..models.assistant import AssistantVersion
218
+
219
+ raw = self._api.get(
220
+ f"/v1/assistants/{assistant_id}/versions/{version_number}", AssistantVersion
221
+ )
222
+ if isinstance(raw, dict):
223
+ return AssistantVersion.model_validate(raw)
224
+ return raw
225
+
226
+ def compare_versions(self, assistant_id: str, v1: int, v2: int) -> Dict[str, Any]:
227
+ """Compare two assistant versions and return diff summary.
228
+
229
+ Args:
230
+ assistant_id: Assistant identifier
231
+ v1: First version number
232
+ v2: Second version number
233
+
234
+ Returns:
235
+ Generic dictionary with comparison result (diff, summary, etc.)
236
+ """
237
+ return self._api.get(
238
+ f"/v1/assistants/{assistant_id}/versions/{v1}/compare/{v2}",
239
+ dict,
240
+ )
241
+
242
+ def rollback_to_version(
243
+ self, assistant_id: str, version_number: int, change_notes: Optional[str] = None
244
+ ) -> dict:
245
+ """Rollback assistant to a specific version. Creates a new version mirroring target.
246
+
247
+ Args:
248
+ assistant_id: Assistant identifier
249
+ version_number: Target version to rollback to
250
+ change_notes: Optional description of why rollback is performed
251
+
252
+ Returns:
253
+ Backend response (dict)
254
+ """
255
+ payload: Dict[str, Any] = {}
256
+ if change_notes:
257
+ payload["change_notes"] = change_notes
258
+ try:
259
+ return self._api.post(
260
+ f"/v1/assistants/{assistant_id}/versions/{version_number}/rollback",
261
+ dict,
262
+ json_data=payload,
263
+ )
264
+ except requests.HTTPError as err:
265
+ try:
266
+ assistant = self.get(assistant_id)
267
+ version = self.get_version(assistant_id, version_number)
268
+
269
+ update_req = AssistantUpdateRequest(
270
+ name=assistant.name,
271
+ description=assistant.description or "",
272
+ system_prompt=version.system_prompt,
273
+ project=assistant.project,
274
+ llm_model_type=version.llm_model_type or assistant.llm_model_type,
275
+ temperature=version.temperature
276
+ if hasattr(version, "temperature")
277
+ else assistant.temperature,
278
+ top_p=version.top_p
279
+ if hasattr(version, "top_p")
280
+ else assistant.top_p,
281
+ context=version.context
282
+ if hasattr(version, "context")
283
+ else assistant.context,
284
+ toolkits=version.toolkits
285
+ if hasattr(version, "toolkits")
286
+ else assistant.toolkits,
287
+ user_prompts=assistant.user_prompts,
288
+ shared=assistant.shared,
289
+ is_react=assistant.is_react,
290
+ is_global=assistant.is_global,
291
+ slug=assistant.slug,
292
+ mcp_servers=version.mcp_servers
293
+ if hasattr(version, "mcp_servers")
294
+ else assistant.mcp_servers,
295
+ assistant_ids=version.assistant_ids
296
+ if hasattr(version, "assistant_ids")
297
+ else assistant.assistant_ids,
298
+ )
299
+ resp = self.update(assistant_id, update_req)
300
+ resp["_rollback_fallback"] = True
301
+ resp["_target_version"] = version_number
302
+ if change_notes:
303
+ resp["change_notes"] = change_notes
304
+ return resp
305
+ except Exception:
306
+ raise err
307
+
168
308
  def chat(
169
309
  self,
170
310
  assistant_id: str,
171
311
  request: AssistantChatRequest,
312
+ headers: Optional[Dict[str, str]] = None,
172
313
  ) -> Union[requests.Response, BaseModelResponse]:
173
314
  """Send a chat request to an assistant.
174
315
 
175
316
  Args:
176
317
  assistant_id: ID of the assistant to chat with
177
318
  request: Chat request details
319
+ headers: Optional additional HTTP headers (e.g., X-* for MCP propagation)
178
320
 
179
321
  Returns:
180
322
  Chat response or streaming response
181
323
  """
182
- return self._api.post(
324
+ pydantic_schema = None
325
+ if (
326
+ request.output_schema is not None
327
+ and inspect.isclass(request.output_schema)
328
+ and issubclass(request.output_schema, BaseModel)
329
+ ):
330
+ pydantic_schema = deepcopy(request.output_schema)
331
+ request.output_schema = request.output_schema.model_json_schema()
332
+
333
+ response = self._api.post(
183
334
  f"/v1/assistants/{assistant_id}/model",
184
335
  BaseModelResponse,
185
336
  json_data=request.model_dump(exclude_none=True, by_alias=True),
186
337
  stream=request.stream,
338
+ extra_headers=headers,
187
339
  )
340
+ if not request.stream and pydantic_schema:
341
+ # we do conversion to the BaseModel here because self._parse_response don't see actual request model,
342
+ # where reflected desired output format for structured output
343
+ response.generated = pydantic_schema.model_validate(response.generated)
344
+
345
+ return response
346
+
347
+ def chat_with_version(
348
+ self,
349
+ assistant_id: str,
350
+ version_number: int,
351
+ request: AssistantChatRequest,
352
+ ) -> Union[requests.Response, BaseModelResponse]:
353
+ """Send a chat request to a specific assistant version.
354
+
355
+ Uses the stable chat endpoint with an explicit `version` parameter to
356
+ ensure compatibility with environments that don't expose
357
+ /versions/{version}/model.
358
+
359
+ Args:
360
+ assistant_id: ID of the assistant to chat with
361
+ version_number: version to pin chat to
362
+ request: Chat request details
363
+
364
+ Returns:
365
+ Chat response or streaming response
366
+ """
367
+ pydantic_schema = None
368
+ if issubclass(request.output_schema, BaseModel):
369
+ pydantic_schema = deepcopy(request.output_schema)
370
+ request.output_schema = request.output_schema.model_json_schema()
371
+
372
+ payload = request.model_dump(exclude_none=True, by_alias=True)
373
+ payload["version"] = version_number
374
+
375
+ response = self._api.post(
376
+ f"/v1/assistants/{assistant_id}/model",
377
+ BaseModelResponse,
378
+ json_data=payload,
379
+ stream=request.stream,
380
+ )
381
+ if not request.stream and pydantic_schema:
382
+ response.generated = pydantic_schema.model_validate(response.generated)
383
+
384
+ return response
385
+
386
+ def chat_by_slug(
387
+ self,
388
+ assistant_slug: str,
389
+ request: AssistantChatRequest,
390
+ headers: Optional[Dict[str, str]] = None,
391
+ ) -> Union[requests.Response, BaseModelResponse]:
392
+ """Send a chat request to an assistant by slug.
393
+
394
+ Args:
395
+ assistant_slug: Slug of the assistant to chat with
396
+ request: Chat request details
397
+ headers: Optional additional HTTP headers (e.g., X-* for MCP propagation)
398
+
399
+ Returns:
400
+ Chat response or streaming response
401
+ """
402
+ pydantic_schema = None
403
+ if (
404
+ request.output_schema is not None
405
+ and inspect.isclass(request.output_schema)
406
+ and issubclass(request.output_schema, BaseModel)
407
+ ):
408
+ pydantic_schema = deepcopy(request.output_schema)
409
+ request.output_schema = request.output_schema.model_json_schema()
410
+
411
+ response = self._api.post(
412
+ f"/v1/assistants/slug/{assistant_slug}/model",
413
+ BaseModelResponse,
414
+ json_data=request.model_dump(exclude_none=True, by_alias=True),
415
+ stream=request.stream,
416
+ extra_headers=headers,
417
+ )
418
+ if not request.stream and pydantic_schema:
419
+ response.generated = pydantic_schema.model_validate(response.generated)
420
+
421
+ return response
188
422
 
189
423
  def upload_file_to_chat(self, file_path: Path):
190
424
  """Upload a file to assistant chat and return the response containing file_url."""
191
425
 
192
426
  with open(file_path, "rb") as file:
193
427
  files = [
194
- ("file", (file_path.name, file, self._api.detect_mime_type(file_path)))
428
+ (
429
+ "file",
430
+ (
431
+ file_path.name,
432
+ file,
433
+ mimetypes.guess_type(file_path.name)[0]
434
+ or "application/octet-stream",
435
+ ),
436
+ ),
195
437
  ]
196
438
  response = self._api.post_multipart("/v1/files/", dict, files=files)
197
439
 
@@ -213,3 +455,19 @@ class AssistantService:
213
455
  stream=True,
214
456
  json_data=request.model_dump(exclude_none=True),
215
457
  )
458
+
459
+ def evaluate(self, assistant_id: str, request: AssistantEvaluationRequest) -> dict:
460
+ """Evaluate an assistant with a dataset.
461
+
462
+ Args:
463
+ assistant_id: ID of the assistant to evaluate
464
+ request: Evaluation request details
465
+
466
+ Returns:
467
+ Evaluation results
468
+ """
469
+ return self._api.post(
470
+ f"/v1/assistants/{assistant_id}/evaluate",
471
+ dict,
472
+ json_data=request.model_dump(exclude_none=True),
473
+ )
@@ -0,0 +1,90 @@
1
+ """Conversation service implementation."""
2
+
3
+ from typing import List
4
+
5
+ from ..models.conversation import (
6
+ Conversation,
7
+ ConversationDetails,
8
+ ConversationCreateRequest,
9
+ )
10
+ from ..utils import ApiRequestHandler
11
+
12
+
13
+ class ConversationService:
14
+ """Service for managing user conversations."""
15
+
16
+ def __init__(self, api_domain: str, token: str, verify_ssl: bool = True):
17
+ """Initialize the conversation service.
18
+
19
+ Args:
20
+ api_domain: Base URL for the API
21
+ token: Authentication token
22
+ verify_ssl: Whether to verify SSL certificates
23
+ """
24
+ self._api = ApiRequestHandler(api_domain, token, verify_ssl)
25
+
26
+ def list(self) -> List[Conversation]:
27
+ """Get list of all conversations for the current user.
28
+
29
+ Returns:
30
+ List of all conversations for the current user.
31
+ """
32
+ return self._api.get("/v1/conversations", List[Conversation])
33
+
34
+ def list_by_assistant_id(self, assistant_id: str) -> List[Conversation]:
35
+ """Get list of all conversations for the current user that include the specified assistant.
36
+
37
+ Args:
38
+ assistant_id: Assistant ID
39
+
40
+ Returns:
41
+ List of conversations for the specified assistant.
42
+ """
43
+ return [
44
+ conv
45
+ for conv in self._api.get("/v1/conversations", List[Conversation])
46
+ if assistant_id in conv.assistant_ids
47
+ ]
48
+
49
+ def get_conversation(self, conversation_id: str) -> ConversationDetails:
50
+ """Get details for a specific conversation by its ID.
51
+
52
+ Args:
53
+ conversation_id: Conversation ID
54
+
55
+ Returns:
56
+ Conversation details
57
+ """
58
+ return self._api.get(
59
+ f"/v1/conversations/{conversation_id}",
60
+ ConversationDetails,
61
+ )
62
+
63
+ def create(self, request: ConversationCreateRequest) -> dict:
64
+ """Create a new conversation.
65
+
66
+ Args:
67
+ request: Conversation creation request
68
+
69
+ Returns:
70
+ Created conversation details
71
+ """
72
+ return self._api.post(
73
+ "/v1/conversations",
74
+ dict,
75
+ json_data=request.model_dump(exclude_none=True),
76
+ )
77
+
78
+ def delete(self, conversation_id: str) -> dict:
79
+ """Delete a specific conversation by its ID.
80
+
81
+ Args:
82
+ conversation_id: Conversation ID to delete
83
+
84
+ Returns:
85
+ Deletion confirmation
86
+ """
87
+ return self._api.delete(
88
+ f"/v1/conversations/{conversation_id}",
89
+ dict,
90
+ )
@@ -1,6 +1,7 @@
1
1
  """DataSource service implementation."""
2
2
 
3
3
  import json
4
+ import mimetypes
4
5
  from typing import Literal, List, Union, Tuple, Optional, Dict, Any
5
6
  from pathlib import Path
6
7
 
@@ -14,7 +15,11 @@ from ..models.datasource import (
14
15
  UpdateCodeDataSourceRequest,
15
16
  BaseUpdateDataSourceRequest,
16
17
  FileDataSourceRequest,
18
+ CodeAnalysisDataSourceRequest,
19
+ CodeExplorationDataSourceRequest,
20
+ ElasticsearchStatsResponse,
17
21
  )
22
+ from ..models.assistant import AssistantListResponse
18
23
  from ..utils import ApiRequestHandler
19
24
 
20
25
 
@@ -82,7 +87,10 @@ class DatasourceService:
82
87
  content = f.read()
83
88
 
84
89
  # Basic MIME type detection
85
- mime_type = self._api.detect_mime_type(file_path)
90
+ mime_type = (
91
+ mimetypes.guess_type(file_path.name)[0]
92
+ or "application/octet-stream"
93
+ )
86
94
  file_uploads.append(("files", (file_path.name, content, mime_type)))
87
95
 
88
96
  elif isinstance(file_item, tuple) and len(file_item) == 3:
@@ -161,16 +169,20 @@ class DatasourceService:
161
169
  params["sort_key"] = sort_key
162
170
  params["sort_order"] = sort_order
163
171
 
172
+ unified_filters = {}
164
173
  if datasource_types:
165
- params["index_type"] = datasource_types
174
+ unified_filters["index_type"] = datasource_types
166
175
  if projects:
167
- params["project"] = projects
176
+ unified_filters["project"] = projects
168
177
  if status:
169
- params["status"] = status.value
178
+ unified_filters["status"] = status.value
170
179
  if owner:
171
- params["created_by"] = owner
180
+ unified_filters["created_by"] = owner
172
181
  if filters:
173
- params["filters"] = json.dumps(filters)
182
+ unified_filters.update(filters)
183
+ if unified_filters:
184
+ params["filters"] = json.dumps(unified_filters)
185
+
174
186
  return self._api.get("/v1/index", List[DataSource], params=params)
175
187
 
176
188
  def get(self, datasource_id: str) -> DataSource:
@@ -198,3 +210,66 @@ class DatasourceService:
198
210
  Deletion confirmation
199
211
  """
200
212
  return self._api.delete(f"/v1/index/{datasource_id}", dict)
213
+
214
+ def get_assistants_using_datasource(
215
+ self, datasource_id: str
216
+ ) -> List[AssistantListResponse]:
217
+ """Get list of assistants that are using this datasource.
218
+
219
+ Args:
220
+ datasource_id: ID of the datasource
221
+
222
+ Returns:
223
+ List of AssistantListResponse objects containing assistants using this datasource
224
+
225
+ Raises:
226
+ ApiError: If the datasource is not found or other API errors occur.
227
+ """
228
+ return self._api.get(
229
+ f"/v1/index/{datasource_id}/assistants", List[AssistantListResponse]
230
+ )
231
+
232
+ def create_provider_datasource(
233
+ self,
234
+ toolkit_id: str,
235
+ provider_name: str,
236
+ request: Union[CodeAnalysisDataSourceRequest, CodeExplorationDataSourceRequest],
237
+ ) -> dict:
238
+ """Create a provider-based datasource.
239
+
240
+ Args:
241
+ toolkit_id: ID of the toolkit to use
242
+ provider_name: Name of the provider
243
+ request: Provider datasource creation request (CodeAnalysisDataSourceRequest or CodeExplorationDataSourceRequest)
244
+
245
+ Returns:
246
+ dict: Response from the server containing operation status
247
+ """
248
+ endpoint = (
249
+ f"/v1/index/provider?toolkit_id={toolkit_id}&provider_name={provider_name}"
250
+ )
251
+
252
+ return self._api.post(
253
+ endpoint,
254
+ dict,
255
+ json_data=request.model_dump(by_alias=True, exclude_none=True),
256
+ )
257
+
258
+ def get_elasticsearch_stats(self, datasource_id: str) -> ElasticsearchStatsResponse:
259
+ """Get Elasticsearch statistics for a specific datasource index.
260
+
261
+ Args:
262
+ datasource_id: ID of the datasource
263
+
264
+ Returns:
265
+ ElasticsearchStatsResponse with Elasticsearch statistics including:
266
+ - index_name: Name of the index in Elasticsearch
267
+ - size_in_bytes: Size of the index in bytes
268
+
269
+ Raises:
270
+ ApiError: If the datasource is not found, platform datasources are not supported,
271
+ or Elasticsearch statistics are not available.
272
+ """
273
+ return self._api.get(
274
+ f"/v1/index/{datasource_id}/elasticsearch", ElasticsearchStatsResponse
275
+ )
@@ -0,0 +1,82 @@
1
+ """File operation service implementation."""
2
+
3
+ import mimetypes
4
+ from pathlib import Path
5
+ from typing import List
6
+
7
+ from ..models.file_operation import FileBulkCreateResponse
8
+ from ..utils import ApiRequestHandler
9
+
10
+
11
+ class FileOperationService:
12
+ """Service for managing file operations."""
13
+
14
+ def __init__(self, api_domain: str, token: str, verify_ssl: bool = True):
15
+ """Initialize the file operation service.
16
+
17
+ Args:
18
+ api_domain: Base URL for the CodeMie API
19
+ token: Authentication token
20
+ verify_ssl: Whether to verify SSL certificates
21
+ """
22
+ self._api = ApiRequestHandler(api_domain, token, verify_ssl)
23
+
24
+ def bulk_upload(self, files: List[Path]) -> FileBulkCreateResponse:
25
+ """Upload multiple files in a single operation.
26
+
27
+ Args:
28
+ files: List of File paths (required)
29
+
30
+ Returns:
31
+ FileBulkCreateResponse: Results of the bulk operation including
32
+
33
+ Raises:
34
+ ValueError: If items in files is not a Path
35
+ ApiError: If bulk creation fails or validation errors occur
36
+ """
37
+
38
+ files_to_upload = []
39
+
40
+ for file_path in files:
41
+ if isinstance(file_path, Path):
42
+ # File path provided - read file and detect MIME type
43
+ with open(file_path, "rb") as file:
44
+ content = file.read()
45
+
46
+ # Basic MIME type detection
47
+ mime_type = (
48
+ mimetypes.guess_type(file_path.name)[0]
49
+ or "application/octet-stream"
50
+ )
51
+ files_to_upload.append(("files", (file_path.name, content, mime_type)))
52
+ else:
53
+ raise ValueError("Each item in list must be a Path")
54
+
55
+ response = self._api.post_multipart(
56
+ "/v1/files/bulk",
57
+ files=files_to_upload,
58
+ response_model=FileBulkCreateResponse,
59
+ )
60
+
61
+ return response
62
+
63
+ def get_file(self, file_id: str) -> bytes:
64
+ """Get a file by its ID.
65
+
66
+ Args:
67
+ file_id: The file identifier (base64 encoded ID from file_url)
68
+
69
+ Returns:
70
+ bytes: The file content as binary data
71
+
72
+ Raises:
73
+ ApiError: If the file doesn't exist or there's an API error
74
+ """
75
+ import requests
76
+
77
+ response = self._api.get(
78
+ f"/v1/files/{file_id}",
79
+ response_model=requests.Response,
80
+ wrap_response=False,
81
+ )
82
+ return response.content
@@ -5,7 +5,11 @@ from typing import List, Optional, Dict, Any
5
5
 
6
6
  from ..exceptions import NotFoundError
7
7
  from ..models.common import PaginationParams
8
- from ..models.integration import Integration, IntegrationType
8
+ from ..models.integration import (
9
+ Integration,
10
+ IntegrationType,
11
+ IntegrationTestRequest,
12
+ )
9
13
  from ..utils import ApiRequestHandler
10
14
 
11
15
 
@@ -150,3 +154,19 @@ class IntegrationService:
150
154
  return self._api.delete(
151
155
  f"{self._get_base_path(setting_type)}/{setting_id}", dict
152
156
  )
157
+
158
+ def test(self, integration: IntegrationTestRequest, response_type: Any) -> Any:
159
+ """Test an integration.
160
+
161
+ Args:
162
+ integration: IntegrationTestRequest - integration to test
163
+ response_type: Type of response expected
164
+
165
+ Returns:
166
+ Test integration response
167
+ """
168
+ return self._api.post(
169
+ "/v1/settings/test/",
170
+ response_model=response_type,
171
+ json_data=integration.model_dump(exclude_none=True),
172
+ )