codemie-sdk-python 0.1.52__py3-none-any.whl → 0.1.258__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of codemie-sdk-python might be problematic. Click here for more details.

Files changed (35) hide show
  1. codemie_sdk/__init__.py +114 -2
  2. codemie_sdk/auth/credentials.py +5 -4
  3. codemie_sdk/client/client.py +66 -5
  4. codemie_sdk/models/__init__.py +0 -0
  5. codemie_sdk/models/assistant.py +137 -12
  6. codemie_sdk/models/conversation.py +169 -0
  7. codemie_sdk/models/datasource.py +81 -1
  8. codemie_sdk/models/file_operation.py +25 -0
  9. codemie_sdk/models/integration.py +23 -2
  10. codemie_sdk/models/vendor_assistant.py +187 -0
  11. codemie_sdk/models/vendor_guardrail.py +152 -0
  12. codemie_sdk/models/vendor_knowledgebase.py +151 -0
  13. codemie_sdk/models/vendor_workflow.py +145 -0
  14. codemie_sdk/models/workflow.py +4 -4
  15. codemie_sdk/models/workflow_execution_payload.py +21 -0
  16. codemie_sdk/models/workflow_state.py +6 -3
  17. codemie_sdk/models/workflow_thoughts.py +26 -0
  18. codemie_sdk/services/assistant.py +261 -3
  19. codemie_sdk/services/conversation.py +90 -0
  20. codemie_sdk/services/datasource.py +81 -6
  21. codemie_sdk/services/files.py +82 -0
  22. codemie_sdk/services/integration.py +21 -1
  23. codemie_sdk/services/vendor_assistant.py +364 -0
  24. codemie_sdk/services/vendor_guardrail.py +375 -0
  25. codemie_sdk/services/vendor_knowledgebase.py +270 -0
  26. codemie_sdk/services/vendor_workflow.py +330 -0
  27. codemie_sdk/services/webhook.py +41 -0
  28. codemie_sdk/services/workflow.py +26 -2
  29. codemie_sdk/services/workflow_execution.py +54 -6
  30. codemie_sdk/utils/http.py +43 -35
  31. codemie_sdk_python-0.1.258.dist-info/METADATA +1404 -0
  32. codemie_sdk_python-0.1.258.dist-info/RECORD +45 -0
  33. codemie_sdk_python-0.1.52.dist-info/METADATA +0 -809
  34. codemie_sdk_python-0.1.52.dist-info/RECORD +0 -29
  35. {codemie_sdk_python-0.1.52.dist-info → codemie_sdk_python-0.1.258.dist-info}/WHEEL +0 -0
codemie_sdk/__init__.py CHANGED
@@ -18,6 +18,118 @@ Basic usage:
18
18
  """
19
19
 
20
20
  from .client.client import CodeMieClient
21
+ from .models.vendor_assistant import (
22
+ VendorType,
23
+ VendorAssistantSetting,
24
+ VendorAssistantSettingsResponse,
25
+ VendorAssistant,
26
+ VendorAssistantVersion,
27
+ VendorAssistantStatus,
28
+ VendorAssistantsResponse,
29
+ VendorAssistantAlias,
30
+ VendorAssistantAliasesResponse,
31
+ VendorAssistantInstallRequest,
32
+ VendorAssistantInstallSummary,
33
+ VendorAssistantInstallResponse,
34
+ VendorAssistantUninstallResponse,
35
+ PaginationInfo,
36
+ TokenPagination,
37
+ )
38
+ from .models.vendor_workflow import (
39
+ VendorWorkflowSetting,
40
+ VendorWorkflowSettingsResponse,
41
+ VendorWorkflow,
42
+ VendorWorkflowStatus,
43
+ VendorWorkflowsResponse,
44
+ VendorWorkflowAlias,
45
+ VendorWorkflowAliasesResponse,
46
+ VendorWorkflowInstallRequest,
47
+ VendorWorkflowInstallSummary,
48
+ VendorWorkflowInstallResponse,
49
+ VendorWorkflowUninstallResponse,
50
+ )
51
+ from .models.vendor_knowledgebase import (
52
+ VendorKnowledgeBaseSetting,
53
+ VendorKnowledgeBaseSettingsResponse,
54
+ VendorKnowledgeBase,
55
+ VendorKnowledgeBaseStatus,
56
+ VendorKnowledgeBasesResponse,
57
+ VendorKnowledgeBaseDetail,
58
+ VendorKnowledgeBaseInstallRequest,
59
+ VendorKnowledgeBaseInstallSummary,
60
+ VendorKnowledgeBaseInstallResponse,
61
+ VendorKnowledgeBaseUninstallResponse,
62
+ )
63
+ from .models.vendor_guardrail import (
64
+ VendorGuardrailSetting,
65
+ VendorGuardrailSettingsResponse,
66
+ VendorGuardrail,
67
+ VendorGuardrailStatus,
68
+ VendorGuardrailsResponse,
69
+ VendorGuardrailVersion,
70
+ VendorGuardrailVersionsResponse,
71
+ VendorGuardrailInstallRequest,
72
+ VendorGuardrailInstallSummary,
73
+ VendorGuardrailInstallResponse,
74
+ VendorGuardrailUninstallResponse,
75
+ )
76
+ from .services.vendor_assistant import VendorAssistantService
77
+ from .services.vendor_workflow import VendorWorkflowService
78
+ from .services.vendor_knowledgebase import VendorKnowledgeBaseService
79
+ from .services.vendor_guardrail import VendorGuardrailService
21
80
 
22
- __version__ = "0.1.19"
23
- __all__ = ["CodeMieClient"]
81
+ __version__ = "0.2.12"
82
+ __all__ = [
83
+ "CodeMieClient",
84
+ "VendorType",
85
+ "VendorAssistantSetting",
86
+ "VendorAssistantSettingsResponse",
87
+ "VendorAssistant",
88
+ "VendorAssistantVersion",
89
+ "VendorAssistantStatus",
90
+ "VendorAssistantsResponse",
91
+ "VendorAssistantAlias",
92
+ "VendorAssistantAliasesResponse",
93
+ "VendorAssistantInstallRequest",
94
+ "VendorAssistantInstallSummary",
95
+ "VendorAssistantInstallResponse",
96
+ "VendorAssistantUninstallResponse",
97
+ "PaginationInfo",
98
+ "TokenPagination",
99
+ "VendorAssistantService",
100
+ "VendorWorkflowSetting",
101
+ "VendorWorkflowSettingsResponse",
102
+ "VendorWorkflow",
103
+ "VendorWorkflowStatus",
104
+ "VendorWorkflowsResponse",
105
+ "VendorWorkflowAlias",
106
+ "VendorWorkflowAliasesResponse",
107
+ "VendorWorkflowInstallRequest",
108
+ "VendorWorkflowInstallSummary",
109
+ "VendorWorkflowInstallResponse",
110
+ "VendorWorkflowUninstallResponse",
111
+ "VendorWorkflowService",
112
+ "VendorKnowledgeBaseSetting",
113
+ "VendorKnowledgeBaseSettingsResponse",
114
+ "VendorKnowledgeBase",
115
+ "VendorKnowledgeBaseStatus",
116
+ "VendorKnowledgeBasesResponse",
117
+ "VendorKnowledgeBaseDetail",
118
+ "VendorKnowledgeBaseInstallRequest",
119
+ "VendorKnowledgeBaseInstallSummary",
120
+ "VendorKnowledgeBaseInstallResponse",
121
+ "VendorKnowledgeBaseUninstallResponse",
122
+ "VendorKnowledgeBaseService",
123
+ "VendorGuardrailSetting",
124
+ "VendorGuardrailSettingsResponse",
125
+ "VendorGuardrail",
126
+ "VendorGuardrailStatus",
127
+ "VendorGuardrailsResponse",
128
+ "VendorGuardrailVersion",
129
+ "VendorGuardrailVersionsResponse",
130
+ "VendorGuardrailInstallRequest",
131
+ "VendorGuardrailInstallSummary",
132
+ "VendorGuardrailInstallResponse",
133
+ "VendorGuardrailUninstallResponse",
134
+ "VendorGuardrailService",
135
+ ]
@@ -36,14 +36,15 @@ class KeycloakCredentials:
36
36
  self.password = password
37
37
  self.verify_ssl = verify_ssl
38
38
 
39
- if not ((client_id and client_secret) or (username and password)):
39
+ def get_token(self) -> str:
40
+ """Get access token using either client credentials or password grant."""
41
+ if not (
42
+ (self.client_id and self.client_secret) or (self.username and self.password)
43
+ ):
40
44
  raise ValueError(
41
45
  "Either client credentials (client_id, client_secret) or "
42
46
  "user credentials (username, password) must be provided"
43
47
  )
44
-
45
- def get_token(self) -> str:
46
- """Get access token using either client credentials or password grant."""
47
48
  url = (
48
49
  f"{self.server_url}/realms/{self.realm_name}/protocol/openid-connect/token"
49
50
  )
@@ -4,12 +4,19 @@ from typing import Optional
4
4
 
5
5
  from ..auth.credentials import KeycloakCredentials
6
6
  from ..services.assistant import AssistantService
7
+ from ..services.conversation import ConversationService
7
8
  from ..services.datasource import DatasourceService
8
9
  from ..services.llm import LLMService
9
10
  from ..services.integration import IntegrationService
10
11
  from ..services.task import TaskService
11
12
  from ..services.user import UserService
12
13
  from ..services.workflow import WorkflowService
14
+ from ..services.files import FileOperationService
15
+ from ..services.webhook import WebhookService
16
+ from ..services.vendor_assistant import VendorAssistantService
17
+ from ..services.vendor_workflow import VendorWorkflowService
18
+ from ..services.vendor_knowledgebase import VendorKnowledgeBaseService
19
+ from ..services.vendor_guardrail import VendorGuardrailService
13
20
 
14
21
 
15
22
  class CodeMieClient:
@@ -50,6 +57,7 @@ class CodeMieClient:
50
57
 
51
58
  self._token: Optional[str] = None
52
59
  self._api_domain = codemie_api_domain.rstrip("/")
60
+ self._is_localhost = self._is_localhost_domain(self._api_domain)
53
61
  self._verify_ssl = verify_ssl
54
62
  if not verify_ssl:
55
63
  import requests
@@ -57,8 +65,8 @@ class CodeMieClient:
57
65
 
58
66
  requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
59
67
 
60
- # Initialize token first
61
- self._token = self.auth.get_token()
68
+ # Initialize token
69
+ self._token = "" if self._is_localhost else self.auth.get_token()
62
70
 
63
71
  # Initialize services with verify_ssl parameter and token
64
72
  self.assistants = AssistantService(
@@ -76,17 +84,49 @@ class CodeMieClient:
76
84
  self.workflows = WorkflowService(
77
85
  self._api_domain, self._token, verify_ssl=self._verify_ssl
78
86
  )
87
+ self.conversations = ConversationService(
88
+ self._api_domain, self._token, verify_ssl=self._verify_ssl
89
+ )
90
+ self.files = FileOperationService(
91
+ self._api_domain, self._token, verify_ssl=self._verify_ssl
92
+ )
93
+ self.webhook = WebhookService(
94
+ self._api_domain, self._token, verify_ssl=self._verify_ssl
95
+ )
96
+ self.vendor_assistants = VendorAssistantService(
97
+ self._api_domain, self._token, verify_ssl=self._verify_ssl
98
+ )
99
+ self.vendor_workflows = VendorWorkflowService(
100
+ self._api_domain, self._token, verify_ssl=self._verify_ssl
101
+ )
102
+ self.vendor_knowledgebases = VendorKnowledgeBaseService(
103
+ self._api_domain, self._token, verify_ssl=self._verify_ssl
104
+ )
105
+ self.vendor_guardrails = VendorGuardrailService(
106
+ self._api_domain, self._token, verify_ssl=self._verify_ssl
107
+ )
79
108
 
80
109
  @property
81
110
  def token(self) -> str:
82
111
  """Get current token or fetch new one if not available."""
83
- if not self._token:
84
- self._token = self.auth.get_token()
112
+ self._token = "" if self._is_localhost else self.auth.get_token()
85
113
  return self._token
86
114
 
115
+ @staticmethod
116
+ def _is_localhost_domain(domain: str) -> bool:
117
+ """Check if the domain is a localhost variant."""
118
+ domain_lower = domain.lower()
119
+ localhost_patterns = [
120
+ "localhost",
121
+ "127.0.0.1",
122
+ "0.0.0.0",
123
+ "192.168",
124
+ ]
125
+ return any(pattern in domain_lower for pattern in localhost_patterns)
126
+
87
127
  def refresh_token(self) -> str:
88
128
  """Force token refresh."""
89
- self._token = self.auth.get_token()
129
+ self._token = "" if self._is_localhost else self.auth.get_token()
90
130
  # Update token in services
91
131
  self.assistants = AssistantService(
92
132
  self._api_domain, self._token, verify_ssl=self._verify_ssl
@@ -109,4 +149,25 @@ class CodeMieClient:
109
149
  self.workflows = WorkflowService(
110
150
  self._api_domain, self._token, verify_ssl=self._verify_ssl
111
151
  )
152
+ self.conversations = ConversationService(
153
+ self._api_domain, self._token, verify_ssl=self._verify_ssl
154
+ )
155
+ self.files = FileOperationService(
156
+ self._api_domain, self._token, verify_ssl=self._verify_ssl
157
+ )
158
+ self.webhook = WebhookService(
159
+ self._api_domain, self._token, verify_ssl=self._verify_ssl
160
+ )
161
+ self.vendor_assistants = VendorAssistantService(
162
+ self._api_domain, self._token, verify_ssl=self._verify_ssl
163
+ )
164
+ self.vendor_workflows = VendorWorkflowService(
165
+ self._api_domain, self._token, verify_ssl=self._verify_ssl
166
+ )
167
+ self.vendor_knowledgebases = VendorKnowledgeBaseService(
168
+ self._api_domain, self._token, verify_ssl=self._verify_ssl
169
+ )
170
+ self.vendor_guardrails = VendorGuardrailService(
171
+ self._api_domain, self._token, verify_ssl=self._verify_ssl
172
+ )
112
173
  return self._token
File without changes
@@ -3,7 +3,7 @@
3
3
  import uuid
4
4
  from datetime import datetime
5
5
  from enum import Enum
6
- from typing import List, Optional, Any, Union, Dict
6
+ from typing import List, Optional, Any, Union, Dict, Type
7
7
 
8
8
  from pydantic import BaseModel, Field, ConfigDict, model_validator
9
9
 
@@ -53,13 +53,58 @@ class Context(BaseModel):
53
53
  name: str
54
54
 
55
55
 
56
- class MCPServerConfig(BaseModel):
56
+ class PromptVariable(BaseModel):
57
+ """Model for assistant prompt variables."""
58
+
57
59
  model_config = ConfigDict(extra="ignore")
58
60
 
59
- command: str
60
- args: Optional[list[str]]
61
- env: Optional[dict[str, Any]] = None
62
- auth_token: Optional[str] = None
61
+ key: str
62
+ description: Optional[str] = None
63
+ default_value: str
64
+
65
+
66
+ class MCPServerConfig(BaseModel):
67
+ """
68
+ Configuration for an MCP server.
69
+
70
+ Defines how to start and connect to an MCP server instance, including
71
+ command, arguments, environment variables, and authentication parameters.
72
+
73
+ Attributes:
74
+ command (str): The command used to invoke the MCP server
75
+ args (Optional[list[str]]): List of arguments for the server command
76
+ env (Optional[dict[str, Any]]): Environment variables for the server process
77
+ auth_token (Optional[str]): Authentication token for MCP-Connect server
78
+ """
79
+
80
+ command: Optional[str] = Field(
81
+ None,
82
+ description="The command used to invoke the MCP server (e.g., 'npx', 'uvx') using a stdio transport",
83
+ )
84
+ url: Optional[str] = Field(
85
+ None,
86
+ description="The HTTP URL of a remote MCP server (use when connecting over HTTP/streamable-http).",
87
+ )
88
+ args: Optional[list[str]] = Field(
89
+ default_factory=list,
90
+ description="List of arguments to pass to the MCP server command",
91
+ )
92
+ headers: Optional[dict[str, str]] = Field(
93
+ default_factory=dict,
94
+ description="HTTP headers to include when connecting to an MCP server via `url`.",
95
+ )
96
+ env: Optional[dict[str, Any]] = Field(
97
+ default_factory=dict,
98
+ description="Environment variables to be set for the MCP server process",
99
+ )
100
+ type: Optional[str] = Field(
101
+ None,
102
+ description="Transport type. Set to 'streamable-http' to use a streamable HTTP transport; "
103
+ "leave null for stdio/sse command transports.",
104
+ )
105
+ auth_token: Optional[str] = Field(
106
+ None, description="Authentication token for the MCP-Connect server"
107
+ )
63
108
 
64
109
 
65
110
  class MCPServerDetails(BaseModel):
@@ -102,6 +147,17 @@ class AssistantBase(BaseModel):
102
147
  icon_url: Optional[str] = None
103
148
 
104
149
 
150
+ class AssistantListResponse(BaseModel):
151
+ """Model for assistant list response."""
152
+
153
+ model_config = ConfigDict(extra="ignore")
154
+
155
+ id: str
156
+ name: str
157
+ slug: Optional[str] = None
158
+ created_by: Optional[User] = None
159
+
160
+
105
161
  class Assistant(AssistantBase):
106
162
  """Full assistant model with additional fields."""
107
163
 
@@ -112,7 +168,10 @@ class Assistant(AssistantBase):
112
168
  project: str
113
169
  llm_model_type: Optional[str] = None
114
170
  toolkits: List[ToolKitDetails] = Field(default_factory=list)
115
- user_prompts: List[str] = Field(default_factory=list)
171
+ conversation_starters: List[str] = Field(
172
+ default_factory=list,
173
+ description="List of suggested conversation starter prompts",
174
+ )
116
175
  shared: bool = False
117
176
  is_react: bool = False
118
177
  is_global: bool = False
@@ -126,6 +185,8 @@ class Assistant(AssistantBase):
126
185
  user_abilities: Optional[List[Any]] = None
127
186
  mcp_servers: List[MCPServerDetails] = Field(default_factory=list)
128
187
  assistant_ids: List[str] = Field(default_factory=list)
188
+ version_count: Optional[int] = None
189
+ prompt_variables: Optional[List[PromptVariable]] = Field(default=None)
129
190
 
130
191
 
131
192
  class AssistantRequestBase(AssistantBase):
@@ -141,7 +202,10 @@ class AssistantRequestBase(AssistantBase):
141
202
  context: List[Context] = Field(default_factory=list)
142
203
  llm_model_type: str
143
204
  toolkits: List[ToolKitDetails] = Field(default_factory=list)
144
- user_prompts: List[str] = Field(default_factory=list)
205
+ conversation_starters: List[str] = Field(
206
+ default_factory=list,
207
+ description="List of suggested conversation starter prompts",
208
+ )
145
209
  shared: bool = False
146
210
  is_react: bool = False
147
211
  is_global: Optional[bool] = False
@@ -150,6 +214,7 @@ class AssistantRequestBase(AssistantBase):
150
214
  top_p: Optional[float] = None
151
215
  mcp_servers: List[MCPServerDetails] = Field(default_factory=list)
152
216
  assistant_ids: List[str] = Field(default_factory=list)
217
+ prompt_variables: List[PromptVariable] = Field(default_factory=list)
153
218
 
154
219
 
155
220
  class AssistantCreateRequest(AssistantRequestBase):
@@ -164,6 +229,27 @@ class AssistantUpdateRequest(AssistantRequestBase):
164
229
  pass
165
230
 
166
231
 
232
+ class AssistantVersion(BaseModel):
233
+ """Immutable snapshot of assistant configuration for a specific version."""
234
+
235
+ model_config = ConfigDict(extra="ignore", use_enum_values=True)
236
+
237
+ version_number: int
238
+ created_date: datetime
239
+ created_by: Optional[User] = None
240
+ change_notes: Optional[str] = None
241
+ description: Optional[str] = None
242
+ system_prompt: str
243
+ llm_model_type: Optional[str] = None
244
+ temperature: Optional[float] = None
245
+ top_p: Optional[float] = None
246
+ context: List[Context] = Field(default_factory=list)
247
+ toolkits: List[ToolKitDetails] = Field(default_factory=list)
248
+ mcp_servers: List[MCPServerDetails] = Field(default_factory=list)
249
+ assistant_ids: List[str] = Field(default_factory=list)
250
+ prompt_variables: List[PromptVariable] = Field(default_factory=list)
251
+
252
+
167
253
  class ChatRole(str, Enum):
168
254
  """Enum for chat message roles."""
169
255
 
@@ -204,11 +290,13 @@ class AssistantChatRequest(BaseModel):
204
290
  """Model for chat request to assistant."""
205
291
 
206
292
  conversation_id: Optional[str] = Field(
207
- default=str(uuid.uuid4()), description="Conversation identifier"
293
+ default_factory=lambda: str(uuid.uuid4()), description="Conversation identifier"
208
294
  )
209
295
  text: str = Field(description="User's input")
210
296
  content_raw: Optional[str] = Field(default="", description="Raw content input")
211
- file_name: Optional[str] = Field(default=None, description="Associated file name")
297
+ file_names: List[str] = Field(
298
+ default_factory=list, description="List of file names"
299
+ )
212
300
  llm_model: Optional[str] = Field(
213
301
  default=None, description="Specific LLM model to use"
214
302
  )
@@ -217,9 +305,17 @@ class AssistantChatRequest(BaseModel):
217
305
  description="Conversation history as list of messages or string",
218
306
  )
219
307
  history_index: int = Field(
220
- default=0, description="DataSource in conversation history"
308
+ default=None, description="DataSource in conversation history"
221
309
  )
222
310
  stream: bool = Field(default=False, description="Enable streaming response")
311
+ propagate_headers: bool = Field(
312
+ default=False,
313
+ description="Enable propagation of X-* HTTP headers to MCP servers during tool execution",
314
+ )
315
+ custom_metadata: Optional[dict[str, Any]] = Field(
316
+ default=None,
317
+ description="Custom metadata for the AI Assistant",
318
+ )
223
319
  top_k: int = Field(default=10, description="Top K results to consider")
224
320
  system_prompt: str = Field(default="", description="Override system prompt")
225
321
  background_task: bool = Field(default=False, description="Run as background task")
@@ -227,12 +323,26 @@ class AssistantChatRequest(BaseModel):
227
323
  default=None, description="Provide additional metadata"
228
324
  )
229
325
  tools_config: Optional[List[ToolConfig]] = None
326
+ output_schema: Optional[dict | Type[BaseModel]] = Field(
327
+ default=None,
328
+ description="Structured output schema for the agent. \
329
+ If specified, `generated` field in response will have the same type",
330
+ )
331
+ mcp_server_single_usage: Optional[bool] = Field(
332
+ default=None,
333
+ description="Override conversation-level MCP server lifecycle setting for this request. \
334
+ When true, MCP servers are created fresh and destroyed after use. \
335
+ When false, MCP servers are cached and reused. \
336
+ If not specified, uses conversation's default setting.",
337
+ )
230
338
 
231
339
 
232
340
  class BaseModelResponse(BaseModel):
233
341
  """Model for chat response from assistant."""
234
342
 
235
- generated: str = Field(description="Generated response error_message")
343
+ generated: str | dict | BaseModel = Field(
344
+ description="Generated response. If output_schema in request is specified, corresponds with its type"
345
+ )
236
346
  time_elapsed: Optional[float] = Field(
237
347
  default=None, alias="timeElapsed", description="Time taken for generation"
238
348
  )
@@ -263,3 +373,18 @@ class EnvVars(BaseModel):
263
373
 
264
374
  class ExportAssistantPayload(BaseModel):
265
375
  env_vars: Optional[EnvVars] = None
376
+
377
+
378
+ class AssistantEvaluationRequest(BaseModel):
379
+ """Model for assistant evaluation request."""
380
+
381
+ model_config = ConfigDict(extra="ignore")
382
+
383
+ dataset_id: str = Field(description="ID of the dataset to use for evaluation")
384
+ experiment_name: str = Field(description="Name of the evaluation experiment")
385
+ system_prompt: Optional[str] = Field(
386
+ default=None, description="System prompt to use for evaluation"
387
+ )
388
+ llm_model: Optional[str] = Field(
389
+ default=None, description="LLM model to use for evaluation"
390
+ )
@@ -0,0 +1,169 @@
1
+ """Models for conversation-related data structures."""
2
+
3
+ from datetime import datetime
4
+ from typing import List, Optional, Union
5
+
6
+ from pydantic import BaseModel, Field
7
+
8
+ from codemie_sdk.models.assistant import ContextType
9
+
10
+
11
+ class Conversation(BaseModel):
12
+ """
13
+ Model for conversation summary data as returned from the list endpoint.
14
+ """
15
+
16
+ id: str
17
+ name: str
18
+ folder: Optional[str]
19
+ pinned: bool
20
+ date: str
21
+ assistant_ids: List[str]
22
+ initial_assistant_id: Optional[str]
23
+
24
+
25
+ class Mark(BaseModel):
26
+ """Model for conversation review/mark data."""
27
+
28
+ mark: str
29
+ rating: int
30
+ comments: str
31
+ date: datetime
32
+ operator: Optional["Operator"] = None
33
+
34
+
35
+ class Operator(BaseModel):
36
+ """Represents an operator involved in marking a conversation."""
37
+
38
+ user_id: str
39
+ name: str
40
+
41
+
42
+ class Thought(BaseModel):
43
+ """Model for reasoning or tool-invocation within a message's history."""
44
+
45
+ id: str
46
+ parent_id: Optional[str]
47
+ metadata: dict
48
+ in_progress: bool
49
+ input_text: Optional[str]
50
+ message: Optional[str]
51
+ author_type: str
52
+ author_name: str
53
+ output_format: str
54
+ error: Optional[bool]
55
+ children: List[str]
56
+
57
+
58
+ class HistoryMark(BaseModel):
59
+ """Model for conversation history review/mark data."""
60
+
61
+ mark: str
62
+ rating: int
63
+ comments: Optional[str]
64
+ date: datetime
65
+
66
+
67
+ class HistoryItem(BaseModel):
68
+ """Represents an individual message within a conversation's history."""
69
+
70
+ role: str
71
+ message: str
72
+ historyIndex: int
73
+ date: datetime
74
+ responseTime: Optional[float]
75
+ inputTokens: Optional[int]
76
+ outputTokens: Optional[int]
77
+ moneySpent: Optional[float]
78
+ userMark: Optional[HistoryMark]
79
+ operatorMark: Optional[HistoryMark]
80
+ messageRaw: Optional[str]
81
+ fileNames: List[str]
82
+ assistantId: Optional[str]
83
+ thoughts: Optional[List[Thought]] = Field(default_factory=list)
84
+
85
+
86
+ class ContextItem(BaseModel):
87
+ """Represents contextual settings for conversation."""
88
+
89
+ context_type: Optional[ContextType]
90
+ name: str
91
+
92
+
93
+ class ToolItem(BaseModel):
94
+ """Represents a tool used by an assistant, including configuration and description."""
95
+
96
+ name: str
97
+ label: Optional[str]
98
+ settings_config: Optional[bool]
99
+ user_description: Optional[str]
100
+
101
+
102
+ class AssistantDataItem(BaseModel):
103
+ """Model represents details for an assistant included in a conversation."""
104
+
105
+ assistant_id: str
106
+ assistant_name: str
107
+ assistant_icon: Optional[str]
108
+ assistant_type: str
109
+ context: List[Union[ContextItem, str]]
110
+ tools: List[ToolItem]
111
+ conversation_starters: List[str]
112
+
113
+
114
+ class ConversationDetailsData(BaseModel):
115
+ """Extended details about a conversation's configuration and context."""
116
+
117
+ llm_model: Optional[str]
118
+ context: List[ContextItem]
119
+ app_name: Optional[str]
120
+ repo_name: Optional[str]
121
+ index_type: Optional[str]
122
+
123
+
124
+ class AssistantDetailsData(BaseModel):
125
+ """Extended details about an assistant included in a conversation."""
126
+
127
+ assistant_id: str
128
+ assistant_name: str
129
+ assistant_icon: str
130
+ assistant_type: str
131
+ context: List[Union[ContextItem, str]]
132
+ tools: List[ToolItem]
133
+ conversation_starters: List[str]
134
+
135
+
136
+ class ConversationCreateRequest(BaseModel):
137
+ """Model for creating a new conversation."""
138
+
139
+ initial_assistant_id: Optional[str] = None
140
+ folder: Optional[str] = None
141
+ mcp_server_single_usage: Optional[bool] = False
142
+
143
+
144
+ class ConversationDetails(BaseModel):
145
+ """Summary information for a user conversation as returned from list endpoints."""
146
+
147
+ id: str
148
+ date: datetime
149
+ update_date: datetime
150
+ conversation_id: str
151
+ conversation_name: str
152
+ llm_model: Optional[str]
153
+ folder: Optional[str]
154
+ pinned: bool
155
+ history: List[HistoryItem]
156
+ user_id: str
157
+ user_name: str
158
+ assistant_ids: List[str]
159
+ assistant_data: List[AssistantDataItem]
160
+ initial_assistant_id: str
161
+ final_user_mark: Optional[Mark]
162
+ final_operator_mark: Optional[Mark]
163
+ project: str
164
+ conversation_details: Optional[ConversationDetailsData]
165
+ assistant_details: Optional[AssistantDetailsData]
166
+ user_abilities: Optional[List[str]]
167
+ is_folder_migrated: bool
168
+ category: Optional[str]
169
+ mcp_server_single_usage: Optional[bool] = False