seekrai 0.2.2__tar.gz → 0.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. {seekrai-0.2.2 → seekrai-0.3.0}/PKG-INFO +3 -1
  2. {seekrai-0.2.2 → seekrai-0.3.0}/pyproject.toml +3 -3
  3. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/client.py +8 -0
  4. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/resources/__init__.py +6 -0
  5. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/resources/chat/completions.py +14 -6
  6. seekrai-0.3.0/src/seekrai/resources/deployments.py +203 -0
  7. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/resources/finetune.py +8 -2
  8. seekrai-0.3.0/src/seekrai/resources/projects.py +129 -0
  9. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/types/__init__.py +26 -0
  10. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/types/alignment.py +2 -0
  11. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/types/chat_completions.py +7 -5
  12. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/types/common.py +16 -2
  13. seekrai-0.3.0/src/seekrai/types/deployments.py +69 -0
  14. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/types/finetune.py +5 -1
  15. seekrai-0.3.0/src/seekrai/types/projects.py +31 -0
  16. {seekrai-0.2.2 → seekrai-0.3.0}/LICENSE +0 -0
  17. {seekrai-0.2.2 → seekrai-0.3.0}/README.md +0 -0
  18. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/__init__.py +0 -0
  19. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/abstract/__init__.py +0 -0
  20. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/abstract/api_requestor.py +0 -0
  21. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/constants.py +0 -0
  22. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/error.py +0 -0
  23. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/filemanager.py +0 -0
  24. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/resources/alignment.py +0 -0
  25. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/resources/chat/__init__.py +0 -0
  26. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/resources/completions.py +0 -0
  27. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/resources/embeddings.py +0 -0
  28. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/resources/files.py +0 -0
  29. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/resources/images.py +0 -0
  30. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/resources/models.py +0 -0
  31. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/seekrflow_response.py +0 -0
  32. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/types/abstract.py +0 -0
  33. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/types/completions.py +0 -0
  34. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/types/embeddings.py +0 -0
  35. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/types/error.py +0 -0
  36. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/types/files.py +0 -0
  37. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/types/images.py +0 -0
  38. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/types/models.py +0 -0
  39. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/utils/__init__.py +0 -0
  40. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/utils/_log.py +0 -0
  41. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/utils/api_helpers.py +0 -0
  42. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/utils/files.py +0 -0
  43. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/utils/tools.py +0 -0
  44. {seekrai-0.2.2 → seekrai-0.3.0}/src/seekrai/version.py +0 -0
@@ -1,7 +1,8 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: seekrai
3
- Version: 0.2.2
3
+ Version: 0.3.0
4
4
  Summary: Python client for SeekrAI
5
+ Home-page: https://gitlab.cb.ntent.com/ml/seekr-py
5
6
  License: Apache-2.0
6
7
  Author: SeekrFlow
7
8
  Author-email: support@seekr.com
@@ -27,6 +28,7 @@ Requires-Dist: tabulate (>=0.9.0,<0.10.0)
27
28
  Requires-Dist: tqdm (>=4.66.2,<5.0.0)
28
29
  Requires-Dist: typer (>=0.9,<0.13)
29
30
  Project-URL: Homepage, https://www.seekr.com/
31
+ Project-URL: Repository, https://gitlab.cb.ntent.com/ml/seekr-py
30
32
  Description-Content-Type: text/markdown
31
33
 
32
34
  The Seekr Python Library is the official Python client for SeekrFlow's API platform, providing a convenient way for interacting with the REST APIs and enables easy integrations with Python 3.8+ applications with easy to use synchronous and asynchronous clients.
@@ -14,7 +14,7 @@ build-backend = "poetry.core.masonry.api"
14
14
 
15
15
  [tool.poetry]
16
16
  name = "seekrai"
17
- version = "0.2.2"
17
+ version = "0.3.0"
18
18
  authors = [
19
19
  "SeekrFlow <support@seekr.com>"
20
20
  ]
@@ -26,8 +26,8 @@ classifiers = [
26
26
  "License :: OSI Approved :: Apache Software License",
27
27
  "Operating System :: POSIX :: Linux",
28
28
  ]
29
- #repository = "https://gitlab.cb.ntent.com/ml/seekr-py"
30
- #homepage = "https://gitlab.cb.ntent.com/ml/seekr-py"
29
+ repository = "https://gitlab.cb.ntent.com/ml/seekr-py"
30
+ homepage = "https://gitlab.cb.ntent.com/ml/seekr-py"
31
31
 
32
32
 
33
33
  [tool.poetry.dependencies]
@@ -19,6 +19,8 @@ class SeekrFlow:
19
19
  models: resources.Models
20
20
  fine_tuning: resources.FineTuning
21
21
  alignment: resources.Alignment
22
+ projects: resources.Projects
23
+ deployments: resources.Deployments
22
24
 
23
25
  # client options
24
26
  client: SeekrFlowClient
@@ -79,6 +81,8 @@ class SeekrFlow:
79
81
  self.models = resources.Models(self.client)
80
82
  self.fine_tuning = resources.FineTuning(self.client)
81
83
  self.alignment = resources.Alignment(self.client)
84
+ self.projects = resources.Projects(self.client)
85
+ self.deployments = resources.Deployments(self.client)
82
86
 
83
87
 
84
88
  class AsyncSeekrFlow:
@@ -90,6 +94,8 @@ class AsyncSeekrFlow:
90
94
  models: resources.AsyncModels
91
95
  fine_tuning: resources.AsyncFineTuning
92
96
  alignment: resources.AsyncAlignment
97
+ projects: resources.AsyncProjects
98
+ deployments: resources.AsyncDeployments
93
99
 
94
100
  # client options
95
101
  client: SeekrFlowClient
@@ -150,6 +156,8 @@ class AsyncSeekrFlow:
150
156
  self.models = resources.AsyncModels(self.client)
151
157
  self.fine_tuning = resources.AsyncFineTuning(self.client)
152
158
  self.alignment = resources.AsyncAlignment(self.client)
159
+ self.projects = resources.AsyncProjects(self.client)
160
+ self.deployments = resources.AsyncDeployments(self.client)
153
161
 
154
162
 
155
163
  Client = SeekrFlow
@@ -1,11 +1,13 @@
1
1
  from seekrai.resources.alignment import Alignment, AsyncAlignment
2
2
  from seekrai.resources.chat import AsyncChat, Chat
3
3
  from seekrai.resources.completions import AsyncCompletions, Completions
4
+ from seekrai.resources.deployments import AsyncDeployments, Deployments
4
5
  from seekrai.resources.embeddings import AsyncEmbeddings, Embeddings
5
6
  from seekrai.resources.files import AsyncFiles, Files
6
7
  from seekrai.resources.finetune import AsyncFineTuning, FineTuning
7
8
  from seekrai.resources.images import AsyncImages, Images
8
9
  from seekrai.resources.models import AsyncModels, Models
10
+ from seekrai.resources.projects import AsyncProjects, Projects
9
11
 
10
12
 
11
13
  __all__ = [
@@ -25,4 +27,8 @@ __all__ = [
25
27
  "Images",
26
28
  "AsyncModels",
27
29
  "Models",
30
+ "AsyncProjects",
31
+ "Projects",
32
+ "AsyncDeployments",
33
+ "Deployments",
28
34
  ]
@@ -29,7 +29,8 @@ class ChatCompletions:
29
29
  top_k: int = 5,
30
30
  repetition_penalty: float = 1,
31
31
  stream: bool = False,
32
- logprobs: int = 0,
32
+ logprobs: bool | None = False,
33
+ top_logprobs: int | None = 0,
33
34
  echo: bool = False,
34
35
  n: int = 1,
35
36
  safety_model: str | None = None,
@@ -61,8 +62,10 @@ class ChatCompletions:
61
62
  Defaults to None.
62
63
  stream (bool, optional): Flag indicating whether to stream the generated completions.
63
64
  Defaults to False.
64
- logprobs (int, optional): Number of top-k logprobs to return
65
- Defaults to None.
65
+ logprobs (bool, optional): Whether to return log probabilities of the output tokens.
66
+ Defaults to False.
67
+ top_logprobs (int, optional): Number of most likely tokens to return the log probabilities for.
68
+ If greater than 0, it implies logprobs=True. Defaults to 0.
66
69
  echo (bool, optional): Echo prompt in output. Can be used with logprobs to return prompt logprobs.
67
70
  Defaults to None.
68
71
  n (int, optional): Number of completions to generate. Setting to None will return a single generation.
@@ -102,6 +105,7 @@ class ChatCompletions:
102
105
  repetition_penalty=repetition_penalty,
103
106
  stream=stream,
104
107
  logprobs=logprobs,
108
+ top_logprobs=top_logprobs,
105
109
  echo=echo,
106
110
  n=n,
107
111
  safety_model=safety_model,
@@ -143,7 +147,8 @@ class AsyncChatCompletions:
143
147
  top_k: int = 5,
144
148
  repetition_penalty: float = 1,
145
149
  stream: bool = False,
146
- logprobs: int = 0,
150
+ logprobs: bool | None = False,
151
+ top_logprobs: int | None = 0,
147
152
  echo: bool = False,
148
153
  n: int = 1,
149
154
  safety_model: str | None = None,
@@ -175,8 +180,10 @@ class AsyncChatCompletions:
175
180
  Defaults to None.
176
181
  stream (bool, optional): Flag indicating whether to stream the generated completions.
177
182
  Defaults to False.
178
- logprobs (int, optional): Number of top-k logprobs to return
179
- Defaults to None.
183
+ logprobs (bool, optional): Whether to return log probabilities of the output tokens.
184
+ Defaults to False.
185
+ top_logprobs (int, optional): Number of most likely tokens to return the log probabilities for.
186
+ If greater than 0, it implies logprobs=True. Defaults to 0.
180
187
  echo (bool, optional): Echo prompt in output. Can be used with logprobs to return prompt logprobs.
181
188
  Defaults to None.
182
189
  n (int, optional): Number of completions to generate. Setting to None will return a single generation.
@@ -216,6 +223,7 @@ class AsyncChatCompletions:
216
223
  repetition_penalty=repetition_penalty,
217
224
  stream=stream,
218
225
  logprobs=logprobs,
226
+ top_logprobs=top_logprobs,
219
227
  echo=echo,
220
228
  n=n,
221
229
  safety_model=safety_model,
@@ -0,0 +1,203 @@
1
+ from seekrai.abstract import api_requestor
2
+ from seekrai.seekrflow_response import SeekrFlowResponse
3
+ from seekrai.types import SeekrFlowClient, SeekrFlowRequest
4
+ from seekrai.types.deployments import Deployment as DeploymentSchema
5
+ from seekrai.types.deployments import GetDeploymentsResponse
6
+
7
+
8
+ class Deployments:
9
+ def __init__(self, client: SeekrFlowClient) -> None:
10
+ self._client = client
11
+
12
+ def list(self) -> GetDeploymentsResponse:
13
+ requestor = api_requestor.APIRequestor(
14
+ client=self._client,
15
+ )
16
+
17
+ response, _, _ = requestor.request(
18
+ options=SeekrFlowRequest(
19
+ method="GET",
20
+ url="flow/deployments",
21
+ ),
22
+ stream=False,
23
+ )
24
+
25
+ assert isinstance(response, SeekrFlowResponse)
26
+ return GetDeploymentsResponse(**response.data)
27
+
28
+ def retrieve(self, deployment_id: str) -> DeploymentSchema:
29
+ requestor = api_requestor.APIRequestor(
30
+ client=self._client,
31
+ )
32
+
33
+ response, _, _ = requestor.request(
34
+ options=SeekrFlowRequest(
35
+ method="GET",
36
+ url=f"flow/deployments/{deployment_id}",
37
+ ),
38
+ stream=False,
39
+ )
40
+
41
+ assert isinstance(response, SeekrFlowResponse)
42
+ return DeploymentSchema(**response.data)
43
+
44
+ def create(
45
+ self,
46
+ name: str,
47
+ description: str,
48
+ model_type: str,
49
+ model_id: str,
50
+ n_instances: int,
51
+ ) -> DeploymentSchema:
52
+ requestor = api_requestor.APIRequestor(
53
+ client=self._client,
54
+ )
55
+
56
+ response, _, _ = requestor.request(
57
+ options=SeekrFlowRequest(
58
+ method="POST",
59
+ url="flow/deployments",
60
+ params={
61
+ "name": name,
62
+ "description": description,
63
+ "model_type": model_type,
64
+ "model_id": model_id,
65
+ "n_instances": n_instances,
66
+ },
67
+ ),
68
+ stream=False,
69
+ )
70
+
71
+ assert isinstance(response, SeekrFlowResponse)
72
+ return DeploymentSchema(**response.data)
73
+
74
+ def promote(self, deployment_id: str) -> DeploymentSchema:
75
+ requestor = api_requestor.APIRequestor(
76
+ client=self._client,
77
+ )
78
+
79
+ response, _, _ = requestor.request(
80
+ options=SeekrFlowRequest(
81
+ method="PUT",
82
+ url=f"flow/deployments/{deployment_id}/promote",
83
+ ),
84
+ stream=False,
85
+ )
86
+
87
+ assert isinstance(response, SeekrFlowResponse)
88
+ return DeploymentSchema(**response.data)
89
+
90
+ def demote(self, deployment_id: str) -> DeploymentSchema:
91
+ requestor = api_requestor.APIRequestor(
92
+ client=self._client,
93
+ )
94
+
95
+ response, _, _ = requestor.request(
96
+ options=SeekrFlowRequest(
97
+ method="PUT",
98
+ url=f"flow/deployments/{deployment_id}/demote",
99
+ ),
100
+ stream=False,
101
+ )
102
+
103
+ assert isinstance(response, SeekrFlowResponse)
104
+ return DeploymentSchema(**response.data)
105
+
106
+
107
+ class AsyncDeployments:
108
+ def __init__(self, client: SeekrFlowClient) -> None:
109
+ self._client = client
110
+
111
+ async def list(self) -> GetDeploymentsResponse:
112
+ requestor = api_requestor.APIRequestor(
113
+ client=self._client,
114
+ )
115
+
116
+ response, _, _ = await requestor.arequest(
117
+ options=SeekrFlowRequest(
118
+ method="GET",
119
+ url="flow/deployments",
120
+ ),
121
+ stream=False,
122
+ )
123
+
124
+ assert isinstance(response, SeekrFlowResponse)
125
+ return GetDeploymentsResponse(**response.data)
126
+
127
+ async def retrieve(self, deployment_id: str) -> DeploymentSchema:
128
+ requestor = api_requestor.APIRequestor(
129
+ client=self._client,
130
+ )
131
+
132
+ response, _, _ = await requestor.arequest(
133
+ options=SeekrFlowRequest(
134
+ method="GET",
135
+ url=f"flow/deployments/{deployment_id}",
136
+ ),
137
+ stream=False,
138
+ )
139
+
140
+ assert isinstance(response, SeekrFlowResponse)
141
+ return DeploymentSchema(**response.data)
142
+
143
+ async def create(
144
+ self,
145
+ name: str,
146
+ description: str,
147
+ model_type: str,
148
+ model_id: str,
149
+ n_instances: int,
150
+ ) -> DeploymentSchema:
151
+ requestor = api_requestor.APIRequestor(
152
+ client=self._client,
153
+ )
154
+
155
+ response, _, _ = await requestor.arequest(
156
+ options=SeekrFlowRequest(
157
+ method="POST",
158
+ url="flow/deployments",
159
+ params={
160
+ "name": name,
161
+ "description": description,
162
+ "model_type": model_type,
163
+ "model_id": model_id,
164
+ "n_instances": n_instances,
165
+ },
166
+ ),
167
+ stream=False,
168
+ )
169
+
170
+ assert isinstance(response, SeekrFlowResponse)
171
+ return DeploymentSchema(**response.data)
172
+
173
+ async def promote(self, deployment_id: str) -> DeploymentSchema:
174
+ requestor = api_requestor.APIRequestor(
175
+ client=self._client,
176
+ )
177
+
178
+ response, _, _ = await requestor.arequest(
179
+ options=SeekrFlowRequest(
180
+ method="PUT",
181
+ url=f"flow/deployments/{deployment_id}/promote",
182
+ ),
183
+ stream=False,
184
+ )
185
+
186
+ assert isinstance(response, SeekrFlowResponse)
187
+ return DeploymentSchema(**response.data)
188
+
189
+ async def demote(self, deployment_id: str) -> DeploymentSchema:
190
+ requestor = api_requestor.APIRequestor(
191
+ client=self._client,
192
+ )
193
+
194
+ response, _, _ = await requestor.arequest(
195
+ options=SeekrFlowRequest(
196
+ method="PUT",
197
+ url=f"flow/deployments/{deployment_id}/demote",
198
+ ),
199
+ stream=False,
200
+ )
201
+
202
+ assert isinstance(response, SeekrFlowResponse)
203
+ return DeploymentSchema(**response.data)
@@ -24,6 +24,7 @@ class FineTuning:
24
24
  def create(
25
25
  self,
26
26
  *,
27
+ project_id: int,
27
28
  training_config: TrainingConfig,
28
29
  infrastructure_config: InfrastructureConfig,
29
30
  # wandb_api_key: str | None = None,
@@ -42,7 +43,9 @@ class FineTuning:
42
43
  )
43
44
 
44
45
  parameter_payload = FinetuneRequest(
45
- training_config=training_config, infrastructure_config=infrastructure_config
46
+ project_id=project_id,
47
+ training_config=training_config,
48
+ infrastructure_config=infrastructure_config,
46
49
  ).model_dump()
47
50
 
48
51
  response, _, _ = requestor.request(
@@ -250,6 +253,7 @@ class AsyncFineTuning:
250
253
  async def create(
251
254
  self,
252
255
  *,
256
+ project_id: int,
253
257
  training_config: TrainingConfig,
254
258
  infrastructure_config: InfrastructureConfig,
255
259
  ) -> FinetuneResponse:
@@ -266,7 +270,9 @@ class AsyncFineTuning:
266
270
  )
267
271
 
268
272
  parameter_payload = FinetuneRequest(
269
- training_config=training_config, infrastructure_config=infrastructure_config
273
+ project_id=project_id,
274
+ training_config=training_config,
275
+ infrastructure_config=infrastructure_config,
270
276
  ).model_dump()
271
277
 
272
278
  response, _, _ = await requestor.arequest(
@@ -0,0 +1,129 @@
1
+ from seekrai.abstract import api_requestor
2
+ from seekrai.seekrflow_response import SeekrFlowResponse
3
+ from seekrai.types import SeekrFlowClient, SeekrFlowRequest
4
+ from seekrai.types.projects import (
5
+ GetProjectsResponse,
6
+ PostProjectRequest,
7
+ )
8
+ from seekrai.types.projects import (
9
+ Project as ProjectSchema,
10
+ )
11
+
12
+
13
+ class Projects:
14
+ def __init__(self, client: SeekrFlowClient) -> None:
15
+ self._client = client
16
+
17
+ def list(self, skip: int = 0, limit: int = 100) -> GetProjectsResponse:
18
+ requestor = api_requestor.APIRequestor(
19
+ client=self._client,
20
+ )
21
+
22
+ response, _, _ = requestor.request(
23
+ options=SeekrFlowRequest(
24
+ method="GET",
25
+ url="flow/projects",
26
+ params={"skip": skip, "limit": limit},
27
+ ),
28
+ stream=False,
29
+ )
30
+
31
+ assert isinstance(response, SeekrFlowResponse)
32
+ return GetProjectsResponse(**response.data)
33
+
34
+ def retrieve(self, project_id: int) -> ProjectSchema:
35
+ requestor = api_requestor.APIRequestor(
36
+ client=self._client,
37
+ )
38
+
39
+ response, _, _ = requestor.request(
40
+ options=SeekrFlowRequest(
41
+ method="GET",
42
+ url=f"flow/projects/{project_id}",
43
+ ),
44
+ stream=False,
45
+ )
46
+
47
+ assert isinstance(response, SeekrFlowResponse)
48
+ return ProjectSchema(**response.data)
49
+
50
+ def create(self, name: str, description: str) -> ProjectSchema:
51
+ requestor = api_requestor.APIRequestor(
52
+ client=self._client,
53
+ )
54
+
55
+ parameter_payload = PostProjectRequest(
56
+ name=name,
57
+ description=description,
58
+ ).model_dump()
59
+
60
+ response, _, _ = requestor.request(
61
+ options=SeekrFlowRequest(
62
+ method="POST",
63
+ url="flow/projects",
64
+ params=parameter_payload,
65
+ ),
66
+ stream=False,
67
+ )
68
+
69
+ assert isinstance(response, SeekrFlowResponse)
70
+ return ProjectSchema(**response.data)
71
+
72
+
73
+ class AsyncProjects:
74
+ def __init__(self, client: SeekrFlowClient) -> None:
75
+ self._client = client
76
+
77
+ async def list(self, skip: int = 0, limit: int = 100) -> GetProjectsResponse:
78
+ requestor = api_requestor.APIRequestor(
79
+ client=self._client,
80
+ )
81
+
82
+ response, _, _ = await requestor.arequest(
83
+ options=SeekrFlowRequest(
84
+ method="GET",
85
+ url="flow/projects",
86
+ ),
87
+ stream=False,
88
+ )
89
+
90
+ assert isinstance(response, SeekrFlowResponse)
91
+ return GetProjectsResponse(**response.data)
92
+
93
+ async def retrieve(self, project_id: int) -> ProjectSchema:
94
+ requestor = api_requestor.APIRequestor(
95
+ client=self._client,
96
+ )
97
+
98
+ response, _, _ = await requestor.arequest(
99
+ options=SeekrFlowRequest(
100
+ method="GET",
101
+ url=f"flow/projects/{project_id}",
102
+ ),
103
+ stream=False,
104
+ )
105
+
106
+ assert isinstance(response, SeekrFlowResponse)
107
+ return ProjectSchema(**response.data)
108
+
109
+ async def create(self, name: str, description: str) -> ProjectSchema:
110
+ requestor = api_requestor.APIRequestor(
111
+ client=self._client,
112
+ )
113
+
114
+ parameter_payload = PostProjectRequest(
115
+ name=name,
116
+ description=description,
117
+ ).model_dump()
118
+
119
+ response, _, _ = await requestor.arequest(
120
+ options=SeekrFlowRequest(
121
+ method="POST",
122
+ url="flow/projects",
123
+ params=parameter_payload,
124
+ ),
125
+ stream=False,
126
+ )
127
+
128
+ assert isinstance(response, SeekrFlowResponse)
129
+ return ProjectSchema(**response.data)
@@ -16,6 +16,15 @@ from seekrai.types.completions import (
16
16
  CompletionRequest,
17
17
  CompletionResponse,
18
18
  )
19
+ from seekrai.types.deployments import (
20
+ Deployment,
21
+ DeploymentProcessor,
22
+ DeploymentStatus,
23
+ DeploymentType,
24
+ GetDeploymentsResponse,
25
+ HardwareType,
26
+ NewDeploymentRequest,
27
+ )
19
28
  from seekrai.types.embeddings import EmbeddingRequest, EmbeddingResponse
20
29
  from seekrai.types.files import (
21
30
  FileDeleteResponse,
@@ -40,6 +49,12 @@ from seekrai.types.images import (
40
49
  ImageResponse,
41
50
  )
42
51
  from seekrai.types.models import ModelList, ModelResponse
52
+ from seekrai.types.projects import (
53
+ GetProjectsResponse,
54
+ PostProjectRequest,
55
+ Project,
56
+ ProjectWithRuns,
57
+ )
43
58
 
44
59
 
45
60
  __all__ = [
@@ -75,4 +90,15 @@ __all__ = [
75
90
  "AlignmentResponse",
76
91
  "AlignmentJobStatus",
77
92
  "AlignmentList",
93
+ "Project",
94
+ "ProjectWithRuns",
95
+ "GetProjectsResponse",
96
+ "PostProjectRequest",
97
+ "Deployment",
98
+ "DeploymentProcessor",
99
+ "DeploymentStatus",
100
+ "DeploymentType",
101
+ "GetDeploymentsResponse",
102
+ "HardwareType",
103
+ "NewDeploymentRequest",
78
104
  ]
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  from datetime import datetime
2
4
  from enum import Enum
3
5
  from typing import List, Literal, Optional
@@ -8,8 +8,9 @@ from pydantic import Field
8
8
  from seekrai.types.abstract import BaseModel
9
9
  from seekrai.types.common import (
10
10
  DeltaContent,
11
+ DeltaLogProbs,
11
12
  FinishReason,
12
- LogprobsPart,
13
+ LogProbs,
13
14
  ObjectType,
14
15
  PromptPart,
15
16
  UsageData,
@@ -89,9 +90,10 @@ class ChatCompletionRequest(BaseModel):
89
90
  # stream SSE token chunks
90
91
  stream: bool = False
91
92
  # return logprobs
92
- logprobs: int = 0
93
+ logprobs: bool | None = False
94
+ top_logprobs: int | None = 0
93
95
  # echo prompt.
94
- # can be used with logprobs to return prompt logprobs
96
+ # can be used with logprobs to return prompt logprobs (is this supported in Seekr API/worker implementation?)
95
97
  echo: bool = False
96
98
  # number of output generations
97
99
  n: int = 1
@@ -105,9 +107,9 @@ class ChatCompletionRequest(BaseModel):
105
107
 
106
108
  class ChatCompletionChoicesData(BaseModel):
107
109
  index: int | None = None
108
- logprobs: LogprobsPart | None = None
109
110
  finish_reason: FinishReason | None = None
110
111
  message: ChatCompletionMessage | None = None
112
+ logprobs: LogProbs | None = None
111
113
 
112
114
 
113
115
  class ChatCompletionResponse(BaseModel):
@@ -129,9 +131,9 @@ class ChatCompletionResponse(BaseModel):
129
131
 
130
132
  class ChatCompletionChoicesChunk(BaseModel):
131
133
  index: int | None = None
132
- logprobs: float | None = None
133
134
  finish_reason: FinishReason | None = None
134
135
  delta: DeltaContent | None = None
136
+ logprobs: DeltaLogProbs | None = None
135
137
 
136
138
 
137
139
  class ChatCompletionChunk(BaseModel):
@@ -1,9 +1,9 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from enum import Enum
4
- from typing import Any, Dict, List
4
+ from typing import Any, Dict, List, Optional
5
5
 
6
- from pydantic import ConfigDict
6
+ from pydantic import ConfigDict, Field
7
7
 
8
8
  from seekrai.types.abstract import BaseModel
9
9
 
@@ -33,6 +33,13 @@ class ObjectType(str, Enum):
33
33
  Model = "model"
34
34
 
35
35
 
36
+ class LogProbs(BaseModel): # OpenAI style
37
+ text_offset: List[int] = Field(default_factory=list)
38
+ token_logprobs: List[Optional[float]] = Field(default_factory=list)
39
+ tokens: List[str] = Field(default_factory=list)
40
+ top_logprobs: List[Optional[Dict[str, float]]] = Field(default_factory=list)
41
+
42
+
36
43
  class LogprobsPart(BaseModel):
37
44
  # token list
38
45
  tokens: List[str | None] | None = None
@@ -51,6 +58,13 @@ class DeltaContent(BaseModel):
51
58
  content: str | None = None
52
59
 
53
60
 
61
+ class DeltaLogProbs(BaseModel):
62
+ text_offset: int | None = None
63
+ token_logprob: float | None = None
64
+ token: str | None = None
65
+ top_logprobs: Dict[str, float] | None = None
66
+
67
+
54
68
  class SeekrFlowRequest(BaseModel):
55
69
  model_config = ConfigDict(arbitrary_types_allowed=True)
56
70
 
@@ -0,0 +1,69 @@
1
+ import enum
2
+ from datetime import datetime
3
+ from typing import Optional
4
+
5
+ from pydantic import BaseModel, ConfigDict, Field
6
+
7
+
8
+ class DeploymentType(str, enum.Enum):
9
+ # Matches UI
10
+ FINE_TUNED_RUN = "Fine-tuned Run"
11
+ BASE_MODEL = "Base Model" # TODO - clean up spacing, capital, etc.
12
+
13
+
14
+ class DeploymentStatus(str, enum.Enum):
15
+ # dedicated
16
+ INACTIVE = "Inactive" # Shared with serverless.
17
+ PENDING = "Pending"
18
+ ACTIVE = "Active" # Shared with serverless.
19
+ FAILED = "Failed"
20
+ STARTED = "Started"
21
+ SUCCESS = "Success"
22
+
23
+
24
+ class HardwareType(str, enum.Enum):
25
+ # Matches UI
26
+ SERVERLESS = "Serverless"
27
+ DEDICATED = "Dedicated"
28
+
29
+
30
+ class DeploymentProcessor(str, enum.Enum):
31
+ GAUDI2 = "GAUDI2"
32
+ GAUDI3 = "GAUDI3"
33
+ A100 = "A100"
34
+ H100 = "H100"
35
+ XEON = "XEON"
36
+ NVIDIA = "NVIDIA" # TODO - this doesnt make sense with A100, etc.
37
+
38
+
39
+ class NewDeploymentRequest(BaseModel):
40
+ model_type: DeploymentType
41
+ model_id: str
42
+ name: str = Field(min_length=5, max_length=100)
43
+ description: str = Field(min_length=5, max_length=1000)
44
+ n_instances: int = Field(..., ge=1, le=50)
45
+
46
+
47
+ class Deployment(BaseModel):
48
+ model_config = ConfigDict(from_attributes=True)
49
+
50
+ id: str
51
+ model_type: DeploymentType
52
+ model_id: str
53
+ name: str
54
+ description: str
55
+ status: DeploymentStatus
56
+ memory: Optional[str] = None
57
+ hardware_type: HardwareType = HardwareType.DEDICATED
58
+ total_input_tokens: int
59
+ total_output_tokens: int
60
+ created_at: datetime
61
+ last_deployed_at: Optional[datetime] = None
62
+ updated_at: datetime
63
+ processor: DeploymentProcessor = DeploymentProcessor.GAUDI2
64
+ n_instances: int
65
+ user_id: int
66
+
67
+
68
+ class GetDeploymentsResponse(BaseModel):
69
+ data: list[Deployment]
@@ -2,7 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  from datetime import datetime
4
4
  from enum import Enum
5
- from typing import List, Literal
5
+ from typing import List, Literal, Optional
6
6
 
7
7
  from seekrai.types.abstract import BaseModel
8
8
  from seekrai.types.common import (
@@ -118,11 +118,13 @@ class InfrastructureConfig(BaseModel):
118
118
  accel_type: AcceleratorType
119
119
  n_accel: int
120
120
 
121
+
121
122
  class FinetuneRequest(BaseModel):
122
123
  """
123
124
  Fine-tune request type
124
125
  """
125
126
 
127
+ project_id: int
126
128
  training_config: TrainingConfig
127
129
  infrastructure_config: InfrastructureConfig
128
130
 
@@ -168,6 +170,8 @@ class FinetuneResponse(BaseModel):
168
170
  # list of fine-tune events
169
171
  events: List[FinetuneEvent] | None = None
170
172
  inference_available: bool = False
173
+ project_id: Optional[int] = None # TODO - fix this
174
+
171
175
  # dataset token count
172
176
  # TODO
173
177
  # token_count: int | None = None
@@ -0,0 +1,31 @@
1
+ from datetime import datetime
2
+ from typing import Optional
3
+
4
+ from pydantic import BaseModel, ConfigDict, Field
5
+
6
+
7
+ class Project(BaseModel):
8
+ model_config = ConfigDict(from_attributes=True)
9
+
10
+ id: int
11
+ name: str
12
+ description: str
13
+ user_id: int
14
+ created_at: datetime
15
+ updated_at: datetime
16
+
17
+
18
+ class ProjectWithRuns(Project):
19
+ runs: int
20
+ runs_deployed: int
21
+ last_modified: datetime
22
+
23
+
24
+ class GetProjectsResponse(BaseModel):
25
+ data: list[ProjectWithRuns]
26
+
27
+
28
+ class PostProjectRequest(BaseModel):
29
+ id: Optional[int] = Field(default=None)
30
+ name: str = Field(min_length=5, max_length=100)
31
+ description: str = Field(min_length=5, max_length=1000)
File without changes
File without changes
File without changes
File without changes
File without changes