codemie-sdk-python 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of codemie-sdk-python might be problematic. Click here for more details.
- codemie_sdk/__init__.py +23 -0
- codemie_sdk/auth/__init__.py +5 -0
- codemie_sdk/auth/credentials.py +112 -0
- codemie_sdk/client/__init__.py +5 -0
- codemie_sdk/client/client.py +107 -0
- codemie_sdk/exceptions.py +45 -0
- codemie_sdk/models/assistant.py +192 -0
- codemie_sdk/models/common.py +39 -0
- codemie_sdk/models/datasource.py +293 -0
- codemie_sdk/models/integration.py +68 -0
- codemie_sdk/models/llm.py +48 -0
- codemie_sdk/models/task.py +44 -0
- codemie_sdk/models/user.py +50 -0
- codemie_sdk/models/workflow.py +86 -0
- codemie_sdk/services/assistant.py +173 -0
- codemie_sdk/services/datasource.py +150 -0
- codemie_sdk/services/integration.py +152 -0
- codemie_sdk/services/llm.py +38 -0
- codemie_sdk/services/task.py +34 -0
- codemie_sdk/services/user.py +34 -0
- codemie_sdk/services/workflow.py +144 -0
- codemie_sdk/services/workflow_execution.py +102 -0
- codemie_sdk/utils/__init__.py +5 -0
- codemie_sdk/utils/http.py +226 -0
- codemie_sdk_python-0.1.1.dist-info/LICENSE +19 -0
- codemie_sdk_python-0.1.1.dist-info/METADATA +120 -0
- codemie_sdk_python-0.1.1.dist-info/RECORD +28 -0
- codemie_sdk_python-0.1.1.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,293 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Optional, List
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel, Field, model_validator, ConfigDict, field_validator
|
|
7
|
+
|
|
8
|
+
from .common import TokensUsage, User
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class CodeDataSourceType(str, Enum):
|
|
12
|
+
CODE = "code"
|
|
13
|
+
SUMMARY = "summary"
|
|
14
|
+
CHUNK_SUMMARY = "chunk-summary"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class DataSourceType(str, Enum):
|
|
18
|
+
CODE = "code"
|
|
19
|
+
CONFLUENCE = "knowledge_base_confluence"
|
|
20
|
+
JIRA = "knowledge_base_jira"
|
|
21
|
+
FILE = "knowledge_base_file"
|
|
22
|
+
GOOGLE = "llm_routing_google"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class DataSourceStatus(str, Enum):
|
|
26
|
+
COMPLETED = "completed"
|
|
27
|
+
FAILED = "failed"
|
|
28
|
+
FETCHING = "fetching"
|
|
29
|
+
IN_PROGRESS = "in_progress"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class DataSourceProcessingInfo(BaseModel):
|
|
33
|
+
model_config = ConfigDict(extra="ignore")
|
|
34
|
+
|
|
35
|
+
total_documents_count: Optional[int] = Field(None, alias="total_documents")
|
|
36
|
+
skipped_documents_count: Optional[int] = Field(None, alias="skipped_documents")
|
|
37
|
+
total_size_kb: Optional[float] = None
|
|
38
|
+
average_file_size_bytes: Optional[float] = None
|
|
39
|
+
unique_extensions: Optional[List[str]] = None
|
|
40
|
+
filtered_documents: Optional[int] = None
|
|
41
|
+
processed_documents_count: Optional[int] = Field(None, alias="documents_count_key")
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
# Base request models
|
|
45
|
+
class Confluence(BaseModel):
|
|
46
|
+
"""Model for Confluence-specific response fields"""
|
|
47
|
+
|
|
48
|
+
cql: Optional[str] = None
|
|
49
|
+
include_restricted_content: Optional[bool] = None
|
|
50
|
+
include_archived_content: Optional[bool] = None
|
|
51
|
+
include_attachments: Optional[bool] = None
|
|
52
|
+
include_comments: Optional[bool] = None
|
|
53
|
+
keep_markdown_format: Optional[bool] = None
|
|
54
|
+
keep_newlines: Optional[bool] = None
|
|
55
|
+
max_pages: Optional[int] = None
|
|
56
|
+
pages_per_request: Optional[int] = None
|
|
57
|
+
|
|
58
|
+
model_config = ConfigDict(extra="ignore")
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class Jira(BaseModel):
|
|
62
|
+
"""Model for Jira-specific response fields"""
|
|
63
|
+
|
|
64
|
+
jql: Optional[str] = None
|
|
65
|
+
|
|
66
|
+
model_config = ConfigDict(extra="ignore")
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class Google(BaseModel):
|
|
70
|
+
"""Model for Jira-specific response fields"""
|
|
71
|
+
|
|
72
|
+
google_doc: str = Field(None, alias="googleDoc")
|
|
73
|
+
|
|
74
|
+
model_config = ConfigDict(extra="ignore")
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class Code(BaseModel):
|
|
78
|
+
"""Model for code repository datasource creation"""
|
|
79
|
+
|
|
80
|
+
link: Optional[str] = Field(..., min_length=1, max_length=1000)
|
|
81
|
+
branch: Optional[str] = Field(..., min_length=1, max_length=1000)
|
|
82
|
+
index_type: Optional[CodeDataSourceType] = Field(None, alias="indexType")
|
|
83
|
+
files_filter: Optional[str] = Field(default="", alias="filesFilter")
|
|
84
|
+
embeddings_model: Optional[str] = Field(None, alias="embeddingsModel")
|
|
85
|
+
summarization_model: Optional[str] = Field(None, alias="summarizationModel")
|
|
86
|
+
prompt: Optional[str] = None
|
|
87
|
+
docs_generation: bool = Field(False, alias="docsGeneration")
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
class BaseDataSourceRequest(BaseModel):
|
|
91
|
+
"""Base model for all datasource creation requests"""
|
|
92
|
+
|
|
93
|
+
name: str = Field(
|
|
94
|
+
...,
|
|
95
|
+
description="Name must contain only lowercase letters and underscores.",
|
|
96
|
+
max_length=25,
|
|
97
|
+
)
|
|
98
|
+
project_name: str
|
|
99
|
+
description: str = Field(..., max_length=100)
|
|
100
|
+
shared_with_project: bool = Field(False, alias="project_space_visible")
|
|
101
|
+
setting_id: str = Field(None)
|
|
102
|
+
type: DataSourceType
|
|
103
|
+
|
|
104
|
+
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
|
105
|
+
|
|
106
|
+
@classmethod
|
|
107
|
+
def required_fields(cls) -> List[str]:
|
|
108
|
+
return []
|
|
109
|
+
|
|
110
|
+
@field_validator("name")
|
|
111
|
+
def validate_name_field(cls, value):
|
|
112
|
+
if not re.fullmatch(r"^[a-z][a-z_-]*$", value):
|
|
113
|
+
raise ValueError(
|
|
114
|
+
"Name must contain only lowercase letters and underscores, and cannot begin with '_' or '-'."
|
|
115
|
+
)
|
|
116
|
+
return value
|
|
117
|
+
|
|
118
|
+
@model_validator(mode="before")
|
|
119
|
+
def pre_init_validator(cls, values):
|
|
120
|
+
# Ensure that at least one of the fields is not None
|
|
121
|
+
if cls.required_fields() and not any(
|
|
122
|
+
values.get(field) for field in cls.required_fields()
|
|
123
|
+
):
|
|
124
|
+
raise ValueError(
|
|
125
|
+
f"At least one of the following fields must be set: {', '.join(cls.required_fields())}"
|
|
126
|
+
)
|
|
127
|
+
return values
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
class CodeDataSourceRequest(BaseDataSourceRequest, Code):
|
|
131
|
+
# Override to comply with keys in json
|
|
132
|
+
project_space_visible: bool = Field(False, alias="projectSpaceVisible")
|
|
133
|
+
setting_id: str = Field(None, alias="settingId")
|
|
134
|
+
|
|
135
|
+
def __init__(self, **data):
|
|
136
|
+
super().__init__(type=DataSourceType.CODE, **data)
|
|
137
|
+
|
|
138
|
+
@classmethod
|
|
139
|
+
def required_fields(cls) -> List[str]:
|
|
140
|
+
return ["link", "embeddings_model", "branch", "index_type"]
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class JiraDataSourceRequest(BaseDataSourceRequest, Jira):
|
|
144
|
+
def __init__(self, **data):
|
|
145
|
+
super().__init__(type=DataSourceType.JIRA, **data)
|
|
146
|
+
|
|
147
|
+
@classmethod
|
|
148
|
+
def required_fields(cls) -> List[str]:
|
|
149
|
+
return ["jql"]
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
class ConfluenceDataSourceRequest(BaseDataSourceRequest, Confluence):
|
|
153
|
+
def __init__(self, **data):
|
|
154
|
+
super().__init__(type=DataSourceType.CONFLUENCE, **data)
|
|
155
|
+
|
|
156
|
+
@classmethod
|
|
157
|
+
def required_fields(cls) -> List[str]:
|
|
158
|
+
return ["cql"]
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
class GoogleDataSourceRequest(BaseDataSourceRequest, Google):
|
|
162
|
+
def __init__(self, **data):
|
|
163
|
+
super().__init__(type=DataSourceType.GOOGLE, **data)
|
|
164
|
+
|
|
165
|
+
@classmethod
|
|
166
|
+
def required_fields(cls) -> List[str]:
|
|
167
|
+
return ["google_doc"]
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
class BaseUpdateDataSourceRequest(BaseDataSourceRequest):
|
|
171
|
+
"""Mixin update-specific reindex fields"""
|
|
172
|
+
|
|
173
|
+
full_reindex: Optional[bool] = Field(None)
|
|
174
|
+
skip_reindex: Optional[bool] = Field(None)
|
|
175
|
+
resume_indexing: Optional[bool] = Field(None)
|
|
176
|
+
incremental_reindex: Optional[bool] = Field(None)
|
|
177
|
+
|
|
178
|
+
model_config = ConfigDict(populate_by_name=True, extra="allow")
|
|
179
|
+
|
|
180
|
+
@model_validator(mode="after")
|
|
181
|
+
def validate_reindex_options(self) -> "BaseUpdateDataSourceRequest":
|
|
182
|
+
ds_type = self.type
|
|
183
|
+
|
|
184
|
+
if ds_type == DataSourceType.CONFLUENCE:
|
|
185
|
+
if self.incremental_reindex:
|
|
186
|
+
raise ValueError(
|
|
187
|
+
"Confluence data sources only support full_reindex and resume_indexing"
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
elif ds_type == DataSourceType.JIRA:
|
|
191
|
+
if self.resume_indexing:
|
|
192
|
+
raise ValueError(
|
|
193
|
+
"Jira data sources only support full_reindex and incremental_reindex"
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
elif ds_type == DataSourceType.CODE:
|
|
197
|
+
if self.incremental_reindex:
|
|
198
|
+
raise ValueError("Code data sources do not support incremental_reindex")
|
|
199
|
+
|
|
200
|
+
elif ds_type == DataSourceType.GOOGLE:
|
|
201
|
+
if self.resume_indexing or self.incremental_reindex:
|
|
202
|
+
raise ValueError("Google data sources only support full_reindex")
|
|
203
|
+
|
|
204
|
+
return self
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
class UpdateCodeDataSourceRequest(BaseUpdateDataSourceRequest, Code):
|
|
208
|
+
"""Model for code repository datasource updates"""
|
|
209
|
+
|
|
210
|
+
def __init__(self, **data):
|
|
211
|
+
super().__init__(type=DataSourceType.CODE, **data)
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
class UpdateConfluenceDataSourceRequest(BaseUpdateDataSourceRequest, Confluence):
|
|
215
|
+
def __init__(self, **data):
|
|
216
|
+
super().__init__(type=DataSourceType.CONFLUENCE, **data)
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
class UpdateJiraDataSourceRequest(BaseUpdateDataSourceRequest, Jira):
|
|
220
|
+
def __init__(self, **data):
|
|
221
|
+
super().__init__(type=DataSourceType.JIRA, **data)
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
class UpdateGoogleDataSourceRequest(BaseUpdateDataSourceRequest):
|
|
225
|
+
"""Model for Google docs datasource updates"""
|
|
226
|
+
|
|
227
|
+
google_doc: Optional[str] = Field(None, alias="googleDoc")
|
|
228
|
+
|
|
229
|
+
def __init__(self, **data):
|
|
230
|
+
super().__init__(type=DataSourceType.GOOGLE, **data)
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
class DataSource(BaseModel):
|
|
234
|
+
model_config = ConfigDict(
|
|
235
|
+
extra="ignore",
|
|
236
|
+
populate_by_name=True,
|
|
237
|
+
)
|
|
238
|
+
|
|
239
|
+
id: str
|
|
240
|
+
project_name: str
|
|
241
|
+
name: str = Field(None, alias="repo_name")
|
|
242
|
+
description: Optional[str] = Field(None)
|
|
243
|
+
type: DataSourceType = Field(None, alias="index_type")
|
|
244
|
+
embeddings_model: Optional[str] = Field(None)
|
|
245
|
+
status: DataSourceStatus
|
|
246
|
+
setting_id: Optional[str] = Field(None)
|
|
247
|
+
created_date: datetime = Field(None, alias="date")
|
|
248
|
+
created_by: User
|
|
249
|
+
shared_with_project: bool = Field(None, alias="project_space_visible")
|
|
250
|
+
update_date: datetime
|
|
251
|
+
error_message: Optional[str] = Field(None, alias="text")
|
|
252
|
+
user_abilities: List[str]
|
|
253
|
+
processing_info: Optional[DataSourceProcessingInfo] = Field(None)
|
|
254
|
+
processed_documents: Optional[List[str]] = Field(None, alias="processed_files")
|
|
255
|
+
tokens_usage: Optional[TokensUsage] = Field(None)
|
|
256
|
+
# Code specific fields
|
|
257
|
+
code: Optional[Code] = None
|
|
258
|
+
# Jira specific fields
|
|
259
|
+
jira: Optional[Jira] = None
|
|
260
|
+
# Confluence specific fields
|
|
261
|
+
confluence: Optional[Confluence] = None
|
|
262
|
+
# Google doc specific fields
|
|
263
|
+
google_doc_link: Optional[str] = None
|
|
264
|
+
|
|
265
|
+
@model_validator(mode="before")
|
|
266
|
+
def before_init(cls, values):
|
|
267
|
+
if values.get("error"):
|
|
268
|
+
values["status"] = DataSourceStatus.FAILED
|
|
269
|
+
elif values.get("completed"):
|
|
270
|
+
values["status"] = DataSourceStatus.COMPLETED
|
|
271
|
+
elif values.get("is_fetching"):
|
|
272
|
+
values["status"] = DataSourceStatus.FETCHING
|
|
273
|
+
else:
|
|
274
|
+
values["status"] = DataSourceStatus.IN_PROGRESS
|
|
275
|
+
|
|
276
|
+
if values.get("index_type") in [
|
|
277
|
+
DataSourceType.CONFLUENCE,
|
|
278
|
+
DataSourceType.JIRA,
|
|
279
|
+
DataSourceType.GOOGLE,
|
|
280
|
+
]:
|
|
281
|
+
complete_state = values.get("complete_state", 0)
|
|
282
|
+
if complete_state is not None:
|
|
283
|
+
values["processing_info"] = {"documents_count_key": complete_state}
|
|
284
|
+
elif values.get("index_type") == DataSourceType.CODE:
|
|
285
|
+
values["code"] = {
|
|
286
|
+
"link": values.get("link"),
|
|
287
|
+
"branch": values.get("branch"),
|
|
288
|
+
"files_filter": values.get("files_filter"),
|
|
289
|
+
"summarization_prompt": values.get("prompt"),
|
|
290
|
+
"summarization_model": values.get("summarization_model"),
|
|
291
|
+
"summarization_docs_generation": values.get("docs_generation"),
|
|
292
|
+
}
|
|
293
|
+
return values
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
"""Models for assistant-related data structures."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from typing import List, Optional, Any
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel, Field, ConfigDict
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class CredentialTypes(str, Enum):
|
|
11
|
+
"""Enum for credential types."""
|
|
12
|
+
|
|
13
|
+
JIRA = "Jira"
|
|
14
|
+
CONFLUENCE = "Confluence"
|
|
15
|
+
GIT = "Git"
|
|
16
|
+
KUBERNETES = "Kubernetes"
|
|
17
|
+
AWS = "AWS"
|
|
18
|
+
GCP = "GCP"
|
|
19
|
+
KEYCLOAK = "Keycloak"
|
|
20
|
+
AZURE = "Azure"
|
|
21
|
+
ELASTIC = "Elastic"
|
|
22
|
+
OPENAPI = "OpenAPI"
|
|
23
|
+
PLUGIN = "Plugin"
|
|
24
|
+
FILESYSTEM = "FileSystem"
|
|
25
|
+
SCHEDULER = "Scheduler"
|
|
26
|
+
WEBHOOK = "Webhook"
|
|
27
|
+
EMAIL = "Email"
|
|
28
|
+
AZURE_DEVOPS = "AzureDevOps"
|
|
29
|
+
SONAR = "Sonar"
|
|
30
|
+
SQL = "SQL"
|
|
31
|
+
TELEGRAM = "Telegram"
|
|
32
|
+
ZEPHYR_CLOUD = "ZephyrCloud"
|
|
33
|
+
ZEPHYR_SQUAD = "ZephyrSquad"
|
|
34
|
+
SERVICE_NOW = "ServiceNow"
|
|
35
|
+
DIAL = "DIAL"
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class IntegrationType(str, Enum):
|
|
39
|
+
"""Enum for setting types."""
|
|
40
|
+
|
|
41
|
+
USER = "user"
|
|
42
|
+
PROJECT = "project"
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class CredentialValues(BaseModel):
|
|
46
|
+
"""Model for credential values."""
|
|
47
|
+
|
|
48
|
+
model_config = ConfigDict(extra="ignore")
|
|
49
|
+
|
|
50
|
+
key: str
|
|
51
|
+
value: Any
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class Integration(BaseModel):
|
|
55
|
+
"""Model for settings configuration."""
|
|
56
|
+
|
|
57
|
+
model_config = ConfigDict(extra="ignore")
|
|
58
|
+
|
|
59
|
+
id: Optional[str] = None
|
|
60
|
+
date: Optional[datetime] = None
|
|
61
|
+
update_date: Optional[datetime] = None
|
|
62
|
+
user_id: Optional[str] = None
|
|
63
|
+
project_name: str
|
|
64
|
+
alias: Optional[str] = None
|
|
65
|
+
default: bool = False
|
|
66
|
+
credential_type: CredentialTypes
|
|
67
|
+
credential_values: List[CredentialValues]
|
|
68
|
+
setting_type: IntegrationType = Field(default=IntegrationType.USER)
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"""Models for LLM service."""
|
|
2
|
+
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Optional
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class LLMProvider(str, Enum):
|
|
10
|
+
"""LLM provider options."""
|
|
11
|
+
|
|
12
|
+
AZURE_OPENAI = "azure_openai"
|
|
13
|
+
AWS_BEDROCK = "aws_bedrock"
|
|
14
|
+
GOOGLE_VERTEXAI = "google_vertexai"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class CostConfig(BaseModel):
|
|
18
|
+
"""Cost configuration for LLM model."""
|
|
19
|
+
|
|
20
|
+
input: float
|
|
21
|
+
output: float
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class LLMFeatures(BaseModel):
|
|
25
|
+
"""Features supported by LLM model."""
|
|
26
|
+
|
|
27
|
+
streaming: Optional[bool] = True
|
|
28
|
+
tools: Optional[bool] = True
|
|
29
|
+
temperature: Optional[bool] = True
|
|
30
|
+
parallel_tool_calls: Optional[bool] = True
|
|
31
|
+
system_prompt: Optional[bool] = True
|
|
32
|
+
max_tokens: Optional[bool] = True
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class LLMModel(BaseModel):
|
|
36
|
+
"""LLM model configuration."""
|
|
37
|
+
|
|
38
|
+
base_name: str
|
|
39
|
+
deployment_name: str
|
|
40
|
+
label: Optional[str] = None
|
|
41
|
+
multimodal: Optional[bool] = None
|
|
42
|
+
react_agent: Optional[bool] = None
|
|
43
|
+
enabled: bool
|
|
44
|
+
provider: Optional[LLMProvider] = None
|
|
45
|
+
default: Optional[bool] = None
|
|
46
|
+
cost: Optional[CostConfig] = None
|
|
47
|
+
max_output_tokens: Optional[int] = None
|
|
48
|
+
features: Optional[LLMFeatures] = Field(default_factory=lambda: LLMFeatures())
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel, ConfigDict, Field
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class BackgroundTaskStatus(str, Enum):
|
|
9
|
+
"""Enum for background task statuses."""
|
|
10
|
+
|
|
11
|
+
STARTED = "STARTED"
|
|
12
|
+
COMPLETED = "COMPLETED"
|
|
13
|
+
FAILED = "FAILED"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class TaskUser(BaseModel):
|
|
17
|
+
"""Model representing task user information."""
|
|
18
|
+
|
|
19
|
+
model_config = ConfigDict(extra="ignore")
|
|
20
|
+
|
|
21
|
+
user_id: str = Field(description="Unique identifier of the user")
|
|
22
|
+
username: str = Field(default="", description="Username of the task owner")
|
|
23
|
+
name: str = Field(default="", description="Display name of the task owner")
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class BackgroundTaskEntity(BaseModel):
|
|
27
|
+
"""Model representing a background task."""
|
|
28
|
+
|
|
29
|
+
model_config = ConfigDict(extra="ignore")
|
|
30
|
+
|
|
31
|
+
id: str = Field(description="Unique identifier of the task")
|
|
32
|
+
task: str = Field(description="Task description or name")
|
|
33
|
+
user: TaskUser = Field(description="Information about the task owner")
|
|
34
|
+
final_output: Optional[str] = Field(
|
|
35
|
+
default="", description="The final result or output of the task"
|
|
36
|
+
)
|
|
37
|
+
current_step: Optional[str] = Field(
|
|
38
|
+
default="", description="Current step or stage of the task"
|
|
39
|
+
)
|
|
40
|
+
status: BackgroundTaskStatus = Field(
|
|
41
|
+
description="Task status (STARTED, COMPLETED, or FAILED)"
|
|
42
|
+
)
|
|
43
|
+
date: datetime = Field(description="Task creation timestamp")
|
|
44
|
+
update_date: datetime = Field(description="Last update timestamp")
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
"""Models for user-related data structures."""
|
|
2
|
+
|
|
3
|
+
from typing import List, Optional
|
|
4
|
+
from pydantic import BaseModel, ConfigDict, Field
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class User(BaseModel):
|
|
8
|
+
"""Model representing a user profile."""
|
|
9
|
+
|
|
10
|
+
model_config = ConfigDict(populate_by_name=True, extra="ignore")
|
|
11
|
+
|
|
12
|
+
user_id: str = Field(
|
|
13
|
+
description="Unique identifier of the user", validation_alias="userId"
|
|
14
|
+
)
|
|
15
|
+
name: str = Field(description="Full name of the user")
|
|
16
|
+
username: str = Field(description="Username for authentication")
|
|
17
|
+
is_admin: bool = Field(
|
|
18
|
+
description="Whether the user has admin privileges", validation_alias="isAdmin"
|
|
19
|
+
)
|
|
20
|
+
applications: List[str] = Field(
|
|
21
|
+
default_factory=list, description="List of applications the user has access to"
|
|
22
|
+
)
|
|
23
|
+
applications_admin: List[str] = Field(
|
|
24
|
+
default_factory=list,
|
|
25
|
+
description="List of applications where user has admin rights",
|
|
26
|
+
validation_alias="applicationsAdmin",
|
|
27
|
+
)
|
|
28
|
+
picture: str = Field(default="", description="URL to user's profile picture")
|
|
29
|
+
knowledge_bases: List[str] = Field(
|
|
30
|
+
default_factory=list,
|
|
31
|
+
description="List of knowledge bases the user has access to",
|
|
32
|
+
validation_alias="knowledgeBases",
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class UserData(BaseModel):
|
|
37
|
+
"""Model representing user data."""
|
|
38
|
+
|
|
39
|
+
model_config = ConfigDict(populate_by_name=True, extra="ignore")
|
|
40
|
+
|
|
41
|
+
id: Optional[str] = Field(
|
|
42
|
+
default=None, description="Unique identifier of the user data record"
|
|
43
|
+
)
|
|
44
|
+
date: Optional[str] = Field(default=None, description="Creation timestamp")
|
|
45
|
+
update_date: Optional[str] = Field(
|
|
46
|
+
default=None, description="Last update timestamp"
|
|
47
|
+
)
|
|
48
|
+
user_id: Optional[str] = Field(
|
|
49
|
+
default=None, description="Associated user identifier"
|
|
50
|
+
)
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
"""Workflow models."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from enum import StrEnum, Enum
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel, ConfigDict, Field
|
|
8
|
+
|
|
9
|
+
from codemie_sdk.models.common import User, TokensUsage
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class WorkflowMode(StrEnum):
|
|
13
|
+
"""Available workflow modes."""
|
|
14
|
+
|
|
15
|
+
SEQUENTIAL = "Sequential"
|
|
16
|
+
AUTONOMOUS = "Autonomous"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class WorkflowCreateRequest(BaseModel):
|
|
20
|
+
"""Request model for workflow creation."""
|
|
21
|
+
|
|
22
|
+
model_config = ConfigDict(populate_by_name=True)
|
|
23
|
+
|
|
24
|
+
project: str = Field(..., min_length=1)
|
|
25
|
+
name: str = Field(..., min_length=1)
|
|
26
|
+
description: Optional[str] = None
|
|
27
|
+
yaml_config: str = Field(..., min_length=1)
|
|
28
|
+
mode: WorkflowMode = WorkflowMode.SEQUENTIAL
|
|
29
|
+
shared: bool = False
|
|
30
|
+
icon_url: Optional[str] = None
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class WorkflowUpdateRequest(BaseModel):
|
|
34
|
+
"""Request model for workflow updates."""
|
|
35
|
+
|
|
36
|
+
model_config = ConfigDict(populate_by_name=True)
|
|
37
|
+
project: str = Field(..., min_length=1)
|
|
38
|
+
name: str = Field(..., min_length=1)
|
|
39
|
+
description: str = Field(..., min_length=1)
|
|
40
|
+
yaml_config: str = Field(..., min_length=1)
|
|
41
|
+
mode: Optional[WorkflowMode] = None
|
|
42
|
+
shared: Optional[bool] = None
|
|
43
|
+
icon_url: Optional[str] = None
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class Workflow(BaseModel):
|
|
47
|
+
"""Workflow template model."""
|
|
48
|
+
|
|
49
|
+
model_config = ConfigDict(populate_by_name=True)
|
|
50
|
+
|
|
51
|
+
id: Optional[str] = None
|
|
52
|
+
project: str
|
|
53
|
+
name: str
|
|
54
|
+
description: Optional[str] = None
|
|
55
|
+
yaml_config: Optional[str] = None
|
|
56
|
+
mode: WorkflowMode = WorkflowMode.SEQUENTIAL
|
|
57
|
+
shared: bool = False
|
|
58
|
+
icon_url: Optional[str] = None
|
|
59
|
+
created_date: datetime = Field(None, alias="date")
|
|
60
|
+
update_date: datetime = Field(None)
|
|
61
|
+
created_by: User
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class ExecutionStatus(str, Enum):
|
|
65
|
+
IN_PROGRESS = "In Progress"
|
|
66
|
+
NOT_STARTED = "Not Started"
|
|
67
|
+
INTERRUPTED = "Interrupted"
|
|
68
|
+
FAILED = "Failed"
|
|
69
|
+
SUCCEEDED = "Succeeded"
|
|
70
|
+
ABORTED = "Aborted"
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class WorkflowExecution(BaseModel):
|
|
74
|
+
"""Model representing a workflow execution."""
|
|
75
|
+
|
|
76
|
+
model_config = ConfigDict(populate_by_name=True)
|
|
77
|
+
|
|
78
|
+
id: str
|
|
79
|
+
execution_id: str
|
|
80
|
+
workflow_id: str
|
|
81
|
+
status: ExecutionStatus = Field(alias="overall_status")
|
|
82
|
+
created_date: datetime = Field(alias="date")
|
|
83
|
+
prompt: str
|
|
84
|
+
updated_date: Optional[datetime] = None
|
|
85
|
+
created_by: User
|
|
86
|
+
tokens_usage: Optional[TokensUsage] = None
|