adaptive-sdk 0.1.13__py3-none-any.whl → 0.1.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- adaptive_sdk/client.py +2 -0
- adaptive_sdk/graphql_client/__init__.py +6 -4
- adaptive_sdk/graphql_client/add_model_to_use_case.py +6 -0
- adaptive_sdk/graphql_client/async_client.py +35 -19
- adaptive_sdk/graphql_client/client.py +35 -19
- adaptive_sdk/graphql_client/custom_fields.py +20 -4
- adaptive_sdk/graphql_client/custom_mutations.py +29 -8
- adaptive_sdk/graphql_client/deploy_model.py +12 -0
- adaptive_sdk/graphql_client/enums.py +3 -20
- adaptive_sdk/graphql_client/fragments.py +4 -4
- adaptive_sdk/graphql_client/input_types.py +157 -18
- adaptive_sdk/graphql_client/remove_model_from_use_case.py +6 -0
- adaptive_sdk/input_types/typed_dicts.py +14 -15
- adaptive_sdk/resources/__init__.py +3 -0
- adaptive_sdk/resources/artifacts.py +61 -0
- adaptive_sdk/resources/chat.py +11 -9
- adaptive_sdk/resources/interactions.py +57 -25
- adaptive_sdk/resources/models.py +86 -95
- adaptive_sdk/rest/rest_types.py +2 -1
- {adaptive_sdk-0.1.13.dist-info → adaptive_sdk-0.1.14.dist-info}/METADATA +4 -1
- {adaptive_sdk-0.1.13.dist-info → adaptive_sdk-0.1.14.dist-info}/RECORD +22 -19
- adaptive_sdk/graphql_client/attach_model_to_use_case.py +0 -12
- {adaptive_sdk-0.1.13.dist-info → adaptive_sdk-0.1.14.dist-info}/WHEEL +0 -0
|
@@ -1,26 +1,27 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import
|
|
4
|
-
from typing import Dict, List, Literal, TYPE_CHECKING
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Literal
|
|
5
4
|
from uuid import UUID
|
|
6
5
|
|
|
6
|
+
import humps
|
|
7
|
+
from typing_extensions import override
|
|
8
|
+
|
|
7
9
|
from adaptive_sdk import input_types
|
|
8
10
|
from adaptive_sdk.error_handling import rest_error_handler
|
|
9
11
|
from adaptive_sdk.graphql_client import (
|
|
12
|
+
CompletionData,
|
|
10
13
|
CompletionGroupBy,
|
|
11
14
|
CursorPageInput,
|
|
12
15
|
ListCompletionsFilterInput,
|
|
13
|
-
OrderPair,
|
|
14
|
-
ListInteractionsCompletions,
|
|
15
16
|
ListGroupedInteractionsCompletionsGrouped,
|
|
16
|
-
|
|
17
|
+
ListInteractionsCompletions,
|
|
18
|
+
OrderPair,
|
|
17
19
|
)
|
|
18
20
|
from adaptive_sdk.graphql_client.base_model import UNSET
|
|
19
21
|
from adaptive_sdk.rest import rest_types
|
|
20
22
|
from adaptive_sdk.utils import convert_optional_UUID
|
|
21
|
-
from typing_extensions import override
|
|
22
23
|
|
|
23
|
-
from .base_resource import
|
|
24
|
+
from .base_resource import AsyncAPIResource, SyncAPIResource, UseCaseResource
|
|
24
25
|
|
|
25
26
|
if TYPE_CHECKING:
|
|
26
27
|
from adaptive_sdk.client import Adaptive, AsyncAdaptive
|
|
@@ -32,9 +33,7 @@ def _prepare_add_interactions_inputs(
|
|
|
32
33
|
messages: list[input_types.ChatMessage],
|
|
33
34
|
feedbacks: list[input_types.InteractionFeedbackDict] | None,
|
|
34
35
|
):
|
|
35
|
-
input_messages = (
|
|
36
|
-
[rest_types.ChatMessage(role=m["role"], content=m["content"]) for m in messages]
|
|
37
|
-
)
|
|
36
|
+
input_messages = [rest_types.ChatMessage(role=m["role"], content=m["content"]) for m in messages]
|
|
38
37
|
input_feedbacks = (
|
|
39
38
|
[
|
|
40
39
|
rest_types.InteractionFeedback(
|
|
@@ -123,20 +122,18 @@ class Interactions(SyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
123
122
|
group_by: Retrieve interactions grouped by selected dimension.
|
|
124
123
|
|
|
125
124
|
"""
|
|
126
|
-
new_filters = {} if filters is None else deepcopy(filters)
|
|
127
125
|
order = [] if order is None else order
|
|
128
126
|
new_page = {} if page is None else page
|
|
129
127
|
|
|
130
|
-
|
|
128
|
+
filter_list = _build_filter_list(filters)
|
|
131
129
|
new_order = humps.camelize(order)
|
|
132
130
|
new_page = humps.camelize(new_page)
|
|
133
131
|
|
|
134
|
-
if new_filters.get("timerange"):
|
|
135
|
-
new_filters["timerange"]["from"] = new_filters["timerange"]["from_"] # type: ignore
|
|
136
|
-
del new_filters["timerange"]["from_"] # type: ignore
|
|
137
|
-
|
|
138
|
-
new_filters.update({"useCase": self.use_case_key(use_case)}) # type: ignore
|
|
139
132
|
order_inputs = [OrderPair.model_validate(o) for o in new_order] if new_order else UNSET
|
|
133
|
+
new_filters = {
|
|
134
|
+
"useCase": self.use_case_key(use_case),
|
|
135
|
+
"advancedFilters": {"and": [filter_list]},
|
|
136
|
+
}
|
|
140
137
|
if group_by:
|
|
141
138
|
return self._gql_client.list_grouped_interactions(
|
|
142
139
|
filter=ListCompletionsFilterInput.model_validate(new_filters),
|
|
@@ -234,20 +231,18 @@ class AsyncInteractions(AsyncAPIResource, UseCaseResource): # type: ignore[misc
|
|
|
234
231
|
group_by: Retrieve interactions grouped by selected dimension.
|
|
235
232
|
|
|
236
233
|
"""
|
|
237
|
-
new_filters = {} if filters is None else deepcopy(filters)
|
|
238
234
|
order = [] if order is None else order
|
|
239
235
|
new_page = {} if page is None else page
|
|
240
236
|
|
|
241
|
-
|
|
237
|
+
filter_list = _build_filter_list(filters)
|
|
242
238
|
new_order = humps.camelize(order)
|
|
243
239
|
new_page = humps.camelize(new_page)
|
|
244
240
|
|
|
245
|
-
if new_filters.get("timerange"):
|
|
246
|
-
new_filters["timerange"]["from"] = new_filters["timerange"]["from_"] # type: ignore
|
|
247
|
-
del new_filters["timerange"]["from_"] # type: ignore
|
|
248
|
-
|
|
249
|
-
new_filters.update({"useCase": self.use_case_key(use_case)}) # type: ignore
|
|
250
241
|
order_inputs = [OrderPair.model_validate(o) for o in new_order] if new_order else UNSET
|
|
242
|
+
new_filters = {
|
|
243
|
+
"useCase": self.use_case_key(use_case),
|
|
244
|
+
"advancedFilters": {"and": [filter_list]},
|
|
245
|
+
}
|
|
251
246
|
if group_by:
|
|
252
247
|
result = await self._gql_client.list_grouped_interactions(
|
|
253
248
|
filter=ListCompletionsFilterInput.model_validate(new_filters),
|
|
@@ -277,3 +272,40 @@ class AsyncInteractions(AsyncAPIResource, UseCaseResource): # type: ignore[misc
|
|
|
277
272
|
"""
|
|
278
273
|
result = await self._gql_client.describe_interaction(use_case=self.use_case_key(use_case), id=completion_id)
|
|
279
274
|
return result.completion
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
def _build_filter_list(
|
|
278
|
+
filters: input_types.ListCompletionsFilterInput | None = None,
|
|
279
|
+
) -> List[Any]:
|
|
280
|
+
filter_list: List[Any] = []
|
|
281
|
+
if filters is not None:
|
|
282
|
+
if "models" in filters:
|
|
283
|
+
filter_list.append({"model": {"in": filters["models"]}})
|
|
284
|
+
if "timerange" in filters:
|
|
285
|
+
filter_list.append(
|
|
286
|
+
{
|
|
287
|
+
"timerange": {
|
|
288
|
+
"from": filters["timerange"]["from_"],
|
|
289
|
+
"to": filters["timerange"]["to"],
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
)
|
|
293
|
+
if "session_id" in filters:
|
|
294
|
+
filter_list.append({"sessionId": filters["session_id"]})
|
|
295
|
+
if "completion_id" in filters:
|
|
296
|
+
filter_list.append({"completionId": filters["completion_id"]})
|
|
297
|
+
if "user_id" in filters:
|
|
298
|
+
filter_list.append({"userId": filters["user_id"]})
|
|
299
|
+
if "feedbacks" in filters:
|
|
300
|
+
filter_list.append({"feedbacks": filters["feedbacks"]})
|
|
301
|
+
if "comparisons" in filters:
|
|
302
|
+
filter_list.append({"comparisons": filters["comparisons"]})
|
|
303
|
+
if "labels" in filters:
|
|
304
|
+
for label_cond in filters["labels"]:
|
|
305
|
+
filter_list.append({"labels": label_cond})
|
|
306
|
+
if "prompt_hash" in filters:
|
|
307
|
+
filter_list.append({"promptHash": {"eq": filters["prompt_hash"]}})
|
|
308
|
+
if "source" in filters:
|
|
309
|
+
for source in filters["source"]:
|
|
310
|
+
filter_list.append({"source": source})
|
|
311
|
+
return filter_list
|
adaptive_sdk/resources/models.py
CHANGED
|
@@ -1,27 +1,31 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
|
-
from typing import get_args, Callable, Literal, Sequence, TYPE_CHECKING
|
|
3
2
|
|
|
3
|
+
from typing import TYPE_CHECKING, Callable, Literal, Sequence, get_args
|
|
4
|
+
|
|
5
|
+
from adaptive_sdk import input_types
|
|
4
6
|
from adaptive_sdk.graphql_client import (
|
|
5
|
-
OpenAIModel,
|
|
6
|
-
OpenAIProviderDataInput,
|
|
7
|
-
GoogleProviderDataInput,
|
|
8
|
-
ModelProviderDataInput,
|
|
9
7
|
AddExternalModelInput,
|
|
8
|
+
AddHFModelInput,
|
|
10
9
|
ExternalModelProviderName,
|
|
11
|
-
|
|
12
|
-
UpdateModelService,
|
|
13
|
-
ModelData,
|
|
10
|
+
GoogleProviderDataInput,
|
|
14
11
|
JobData,
|
|
15
|
-
ModelServiceData,
|
|
16
12
|
ListModelsModels,
|
|
17
|
-
|
|
13
|
+
ModelComputeConfigInput,
|
|
14
|
+
ModelData,
|
|
18
15
|
ModelFilter,
|
|
19
16
|
ModelPlacementInput,
|
|
20
|
-
|
|
17
|
+
ModelProviderDataInput,
|
|
18
|
+
ModelServiceData,
|
|
19
|
+
OpenAIProviderDataInput,
|
|
20
|
+
UpdateModelService,
|
|
21
|
+
)
|
|
22
|
+
from adaptive_sdk.graphql_client.input_types import (
|
|
23
|
+
AddModelToUseCaseInput,
|
|
24
|
+
DeployModelInput,
|
|
25
|
+
RemoveModelFromUseCaseInput,
|
|
21
26
|
)
|
|
22
|
-
from adaptive_sdk import input_types
|
|
23
27
|
|
|
24
|
-
from .base_resource import
|
|
28
|
+
from .base_resource import AsyncAPIResource, SyncAPIResource, UseCaseResource
|
|
25
29
|
|
|
26
30
|
if TYPE_CHECKING:
|
|
27
31
|
from adaptive_sdk.client import Adaptive, AsyncAdaptive
|
|
@@ -29,9 +33,7 @@ if TYPE_CHECKING:
|
|
|
29
33
|
provider_config = {
|
|
30
34
|
"open_ai": {
|
|
31
35
|
"provider_data": lambda api_key, model_id: ModelProviderDataInput(
|
|
32
|
-
openAI=OpenAIProviderDataInput(
|
|
33
|
-
apiKey=api_key, externalModelId=OpenAIModel(model_id)
|
|
34
|
-
)
|
|
36
|
+
openAI=OpenAIProviderDataInput(apiKey=api_key, externalModelId=model_id)
|
|
35
37
|
),
|
|
36
38
|
},
|
|
37
39
|
"google": {
|
|
@@ -88,9 +90,7 @@ def is_supported_model(model_id: str):
|
|
|
88
90
|
supported_models = get_args(SupportedHFModels)
|
|
89
91
|
if model_id not in supported_models:
|
|
90
92
|
supported_models_str = "\n".join(supported_models)
|
|
91
|
-
raise ValueError(
|
|
92
|
-
f"Model {model_id} is not supported.\n\nChoose from:\n{supported_models_str}"
|
|
93
|
-
)
|
|
93
|
+
raise ValueError(f"Model {model_id} is not supported.\n\nChoose from:\n{supported_models_str}")
|
|
94
94
|
|
|
95
95
|
|
|
96
96
|
class Models(SyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
@@ -155,22 +155,16 @@ class Models(SyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
155
155
|
provider_data = provider_data_fn(api_key, external_model_id)
|
|
156
156
|
case "azure":
|
|
157
157
|
if not endpoint:
|
|
158
|
-
raise ValueError(
|
|
159
|
-
"`endpoint` is required to connect Azure external model."
|
|
160
|
-
)
|
|
158
|
+
raise ValueError("`endpoint` is required to connect Azure external model.")
|
|
161
159
|
provider_data = provider_data_fn(api_key, external_model_id, endpoint)
|
|
162
160
|
case _:
|
|
163
161
|
raise ValueError(f"Provider {provider} is not supported")
|
|
164
162
|
|
|
165
163
|
provider_enum = ExternalModelProviderName(provider.upper())
|
|
166
|
-
input = AddExternalModelInput(
|
|
167
|
-
name=name, provider=provider_enum, providerData=provider_data
|
|
168
|
-
)
|
|
164
|
+
input = AddExternalModelInput(name=name, provider=provider_enum, providerData=provider_data)
|
|
169
165
|
return self._gql_client.add_external_model(input).add_external_model
|
|
170
166
|
|
|
171
|
-
def list(
|
|
172
|
-
self, filter: input_types.ModelFilter | None = None
|
|
173
|
-
) -> Sequence[ListModelsModels]:
|
|
167
|
+
def list(self, filter: input_types.ModelFilter | None = None) -> Sequence[ListModelsModels]:
|
|
174
168
|
"""
|
|
175
169
|
List all models in Adaptive model registry.
|
|
176
170
|
"""
|
|
@@ -203,32 +197,79 @@ class Models(SyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
203
197
|
If `True`, this call blocks until model is `Online`.
|
|
204
198
|
make_default: Make the model the use case's default on attachment.
|
|
205
199
|
"""
|
|
206
|
-
|
|
200
|
+
|
|
201
|
+
input = AddModelToUseCaseInput(
|
|
202
|
+
model=model,
|
|
203
|
+
useCase=self.use_case_key(use_case),
|
|
204
|
+
)
|
|
205
|
+
self._gql_client.add_model_to_use_case(input)
|
|
206
|
+
input = DeployModelInput(
|
|
207
207
|
model=model,
|
|
208
208
|
useCase=self.use_case_key(use_case),
|
|
209
|
-
|
|
209
|
+
placement=(ModelPlacementInput.model_validate(placement) if placement else None),
|
|
210
210
|
wait=wait,
|
|
211
|
-
placement=(
|
|
212
|
-
ModelPlacementInput.model_validate(placement) if placement else None
|
|
213
|
-
),
|
|
214
211
|
)
|
|
215
|
-
result = self._gql_client.
|
|
212
|
+
result: ModelServiceData = self._gql_client.deploy_model(input).deploy_model
|
|
216
213
|
if make_default:
|
|
217
|
-
result = self.update(model=model, is_default=make_default)
|
|
214
|
+
result = self.update(model=model, is_default=make_default)
|
|
218
215
|
return result
|
|
219
216
|
|
|
220
|
-
def
|
|
217
|
+
def add_to_use_case(
|
|
218
|
+
self,
|
|
219
|
+
model: str,
|
|
220
|
+
use_case: str | None = None,
|
|
221
|
+
) -> bool:
|
|
222
|
+
"""
|
|
223
|
+
Attach a model to the client's use case.
|
|
224
|
+
|
|
225
|
+
Args:
|
|
226
|
+
model: Model key.
|
|
227
|
+
wait: If the model is not deployed already, attaching it to the use case will automatically deploy it.
|
|
228
|
+
If `True`, this call blocks until model is `Online`.
|
|
229
|
+
make_default: Make the model the use case's default on attachment.
|
|
230
|
+
"""
|
|
231
|
+
|
|
232
|
+
input = AddModelToUseCaseInput(
|
|
233
|
+
model=model,
|
|
234
|
+
useCase=self.use_case_key(use_case),
|
|
235
|
+
)
|
|
236
|
+
return self._gql_client.add_model_to_use_case(input).add_model_to_use_case
|
|
237
|
+
|
|
238
|
+
def deploy(
|
|
221
239
|
self,
|
|
222
240
|
model: str,
|
|
241
|
+
wait: bool = False,
|
|
242
|
+
make_default: bool = False,
|
|
223
243
|
use_case: str | None = None,
|
|
244
|
+
placement: input_types.ModelPlacementInput | None = None,
|
|
224
245
|
) -> ModelServiceData:
|
|
246
|
+
input = DeployModelInput(
|
|
247
|
+
model=model,
|
|
248
|
+
useCase=self.use_case_key(use_case),
|
|
249
|
+
placement=(ModelPlacementInput.model_validate(placement) if placement else None),
|
|
250
|
+
wait=wait,
|
|
251
|
+
)
|
|
252
|
+
result: ModelServiceData = self._gql_client.deploy_model(input).deploy_model
|
|
253
|
+
if make_default:
|
|
254
|
+
result = self.update(model=model, is_default=make_default)
|
|
255
|
+
return result
|
|
256
|
+
|
|
257
|
+
def detach(
|
|
258
|
+
self,
|
|
259
|
+
model: str,
|
|
260
|
+
use_case: str,
|
|
261
|
+
) -> bool:
|
|
225
262
|
"""
|
|
226
263
|
Detach model from client's use case.
|
|
227
264
|
|
|
228
265
|
Args:
|
|
229
266
|
model: Model key.
|
|
230
267
|
"""
|
|
231
|
-
|
|
268
|
+
input = RemoveModelFromUseCaseInput(
|
|
269
|
+
model=model,
|
|
270
|
+
useCase=use_case,
|
|
271
|
+
)
|
|
272
|
+
return self._gql_client.remove_model_from_use_case(input).remove_model_from_use_case
|
|
232
273
|
|
|
233
274
|
def update_compute_config(
|
|
234
275
|
self,
|
|
@@ -247,7 +288,6 @@ class Models(SyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
247
288
|
self,
|
|
248
289
|
model: str,
|
|
249
290
|
is_default: bool | None = None,
|
|
250
|
-
attached: bool | None = None,
|
|
251
291
|
desired_online: bool | None = None,
|
|
252
292
|
use_case: str | None = None,
|
|
253
293
|
placement: input_types.ModelPlacementInput | None = None,
|
|
@@ -269,11 +309,8 @@ class Models(SyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
269
309
|
useCase=self.use_case_key(use_case),
|
|
270
310
|
modelService=model,
|
|
271
311
|
isDefault=is_default,
|
|
272
|
-
attached=attached,
|
|
273
312
|
desiredOnline=desired_online,
|
|
274
|
-
placement=(
|
|
275
|
-
ModelPlacementInput.model_validate(placement) if placement else None
|
|
276
|
-
),
|
|
313
|
+
placement=(ModelPlacementInput.model_validate(placement) if placement else None),
|
|
277
314
|
)
|
|
278
315
|
return self._gql_client.update_model(input).update_model_service
|
|
279
316
|
|
|
@@ -286,9 +323,7 @@ class Models(SyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
286
323
|
force: If model is attached to several use cases, `force` must equal `True` in order
|
|
287
324
|
for the model to be terminated.
|
|
288
325
|
"""
|
|
289
|
-
return self._gql_client.terminate_model(
|
|
290
|
-
id_or_key=model, force=force
|
|
291
|
-
).terminate_model
|
|
326
|
+
return self._gql_client.terminate_model(id_or_key=model, force=force).terminate_model
|
|
292
327
|
|
|
293
328
|
|
|
294
329
|
class AsyncModels(AsyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
@@ -353,23 +388,17 @@ class AsyncModels(AsyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
353
388
|
provider_data = provider_data_fn(api_key, external_model_id)
|
|
354
389
|
case "azure":
|
|
355
390
|
if not endpoint:
|
|
356
|
-
raise ValueError(
|
|
357
|
-
"`endpoint` is required to connect Azure external model."
|
|
358
|
-
)
|
|
391
|
+
raise ValueError("`endpoint` is required to connect Azure external model.")
|
|
359
392
|
provider_data = provider_data_fn(api_key, external_model_id, endpoint)
|
|
360
393
|
case _:
|
|
361
394
|
raise ValueError(f"Provider {provider} is not supported")
|
|
362
395
|
|
|
363
396
|
provider_enum = ExternalModelProviderName(provider.upper())
|
|
364
|
-
input = AddExternalModelInput(
|
|
365
|
-
name=name, provider=provider_enum, providerData=provider_data
|
|
366
|
-
)
|
|
397
|
+
input = AddExternalModelInput(name=name, provider=provider_enum, providerData=provider_data)
|
|
367
398
|
result = await self._gql_client.add_external_model(input)
|
|
368
399
|
return result.add_external_model
|
|
369
400
|
|
|
370
|
-
async def list(
|
|
371
|
-
self, filter: input_types.ModelFilter | None = None
|
|
372
|
-
) -> Sequence[ListModelsModels]:
|
|
401
|
+
async def list(self, filter: input_types.ModelFilter | None = None) -> Sequence[ListModelsModels]:
|
|
373
402
|
"""
|
|
374
403
|
List all models in Adaptive model registry.
|
|
375
404
|
"""
|
|
@@ -385,38 +414,6 @@ class AsyncModels(AsyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
385
414
|
"""
|
|
386
415
|
return (await self._gql_client.describe_model(input=model)).model
|
|
387
416
|
|
|
388
|
-
async def attach(
|
|
389
|
-
self,
|
|
390
|
-
model: str,
|
|
391
|
-
wait: bool = True,
|
|
392
|
-
make_default: bool = False,
|
|
393
|
-
use_case: str | None = None,
|
|
394
|
-
placement: input_types.ModelPlacementInput | None = None,
|
|
395
|
-
) -> ModelServiceData:
|
|
396
|
-
"""
|
|
397
|
-
Attach a model to the client's use case.
|
|
398
|
-
|
|
399
|
-
Args:
|
|
400
|
-
model: Model key.
|
|
401
|
-
wait: If the model is not deployed already, attaching it to the use case will automatically deploy it.
|
|
402
|
-
If `True`, this call blocks until model is `Online`.
|
|
403
|
-
make_default: Make the model the use case's default on attachment.
|
|
404
|
-
"""
|
|
405
|
-
input = AttachModel(
|
|
406
|
-
model=model,
|
|
407
|
-
useCase=self.use_case_key(use_case),
|
|
408
|
-
attached=True,
|
|
409
|
-
wait=wait,
|
|
410
|
-
placement=(
|
|
411
|
-
ModelPlacementInput.model_validate(placement) if placement else None
|
|
412
|
-
),
|
|
413
|
-
)
|
|
414
|
-
result = await self._gql_client.attach_model_to_use_case(input)
|
|
415
|
-
result = result.attach_model
|
|
416
|
-
if make_default:
|
|
417
|
-
result = await self.update(model=model, is_default=make_default) # type: ignore[assignment]
|
|
418
|
-
return result
|
|
419
|
-
|
|
420
417
|
async def detach(
|
|
421
418
|
self,
|
|
422
419
|
model: str,
|
|
@@ -428,7 +425,7 @@ class AsyncModels(AsyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
428
425
|
Args:
|
|
429
426
|
model: Model key.
|
|
430
427
|
"""
|
|
431
|
-
return await self.update(model=model,
|
|
428
|
+
return await self.update(model=model, use_case=use_case)
|
|
432
429
|
|
|
433
430
|
async def update_compute_config(
|
|
434
431
|
self,
|
|
@@ -449,7 +446,6 @@ class AsyncModels(AsyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
449
446
|
self,
|
|
450
447
|
model: str,
|
|
451
448
|
is_default: bool | None = None,
|
|
452
|
-
attached: bool | None = None,
|
|
453
449
|
desired_online: bool | None = None,
|
|
454
450
|
use_case: str | None = None,
|
|
455
451
|
placement: input_types.ModelPlacementInput | None = None,
|
|
@@ -471,11 +467,8 @@ class AsyncModels(AsyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
471
467
|
useCase=self.use_case_key(use_case),
|
|
472
468
|
modelService=model,
|
|
473
469
|
isDefault=is_default,
|
|
474
|
-
attached=attached,
|
|
475
470
|
desiredOnline=desired_online,
|
|
476
|
-
placement=(
|
|
477
|
-
ModelPlacementInput.model_validate(placement) if placement else None
|
|
478
|
-
),
|
|
471
|
+
placement=(ModelPlacementInput.model_validate(placement) if placement else None),
|
|
479
472
|
)
|
|
480
473
|
result = await self._gql_client.update_model(input)
|
|
481
474
|
return result.update_model_service
|
|
@@ -489,6 +482,4 @@ class AsyncModels(AsyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
489
482
|
force: If model is attached to several use cases, `force` must equal `True` in order
|
|
490
483
|
for the model to be terminated.
|
|
491
484
|
"""
|
|
492
|
-
return (
|
|
493
|
-
await self._gql_client.terminate_model(id_or_key=model, force=force)
|
|
494
|
-
).terminate_model
|
|
485
|
+
return (await self._gql_client.terminate_model(id_or_key=model, force=force)).terminate_model
|
adaptive_sdk/rest/rest_types.py
CHANGED
|
@@ -256,4 +256,5 @@ class ChatInput(GenerateParameters):
|
|
|
256
256
|
system_prompt_args: Annotated[Optional[Dict[str, Any]], Field(description='Will be used to render system prompt template')] = None
|
|
257
257
|
tags: Optional[List[str]] = None
|
|
258
258
|
use_tools: Optional[bool] = None
|
|
259
|
-
tools: Annotated[Optional[List[ToolOverride]], Field(description='Override tool configuration for this request - enables/disables specific tools')] = None
|
|
259
|
+
tools: Annotated[Optional[List[ToolOverride]], Field(description='Override tool configuration for this request - enables/disables specific tools')] = None
|
|
260
|
+
store: Optional[bool] = None
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: adaptive-sdk
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.14
|
|
4
4
|
Summary: Python SDK for Adaptive Engine
|
|
5
5
|
Author-email: Vincent Debergue <vincent@adaptive-ml.com>, Joao Moura <joao@adaptive-ml.com>, Yacine Bouraoui <yacine@adaptive-ml.com>
|
|
6
6
|
Requires-Python: >=3.10
|
|
@@ -27,8 +27,11 @@ Requires-Dist: toml>=0.10.2 ; extra == "dev"
|
|
|
27
27
|
Requires-Dist: black==25.1.0 ; extra == "dev"
|
|
28
28
|
Requires-Dist: pytest==8.4.1 ; extra == "dev"
|
|
29
29
|
Requires-Dist: pytest-asyncio==0.26.0 ; extra == "dev"
|
|
30
|
+
Requires-Dist: pytest-dependency==0.6.0 ; extra == "dev"
|
|
30
31
|
Requires-Dist: mypy==1.16.0 ; extra == "dev"
|
|
31
32
|
Requires-Dist: tenacity==9.1.2 ; extra == "dev"
|
|
33
|
+
Requires-Dist: pyright >= 1.1 ; extra == "dev"
|
|
34
|
+
Requires-Dist: adaptive-harmony ; extra == "dev"
|
|
32
35
|
Provides-Extra: dev
|
|
33
36
|
|
|
34
37
|
# adaptive-sdk
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
adaptive_sdk/__init__.py,sha256=RQWUHUyEWFSqvTKQpM7Q98nRPnisXRgIZlYd65aJucs,308
|
|
2
2
|
adaptive_sdk/base_client.py,sha256=kDeRwwsoue3MqKSXT-k-aUbUdAUb1Abow8oc_AULTnU,4935
|
|
3
|
-
adaptive_sdk/client.py,sha256=
|
|
3
|
+
adaptive_sdk/client.py,sha256=tTQ793NF_hjOf5yZiVVFLEm5f5RAQtbjcG-W8d0zifU,5663
|
|
4
4
|
adaptive_sdk/error_handling.py,sha256=emicEqdEOCgWwCgMkQyvB7VWdKRMnfrWA2urEthQ_g0,3345
|
|
5
5
|
adaptive_sdk/openapi.json,sha256=1zCNC1PsbRnfWy_2TF1MUgF4PDL3pbwYqJbzVTqmm-U,14607
|
|
6
6
|
adaptive_sdk/patch.py,sha256=mF9iw2VnxirxA7_T7TdatpCTwk3EXfXml9elYAvmy7U,223
|
|
@@ -13,20 +13,20 @@ adaptive_sdk/external/reward_client.py,sha256=TSVdi437McUkMModDQV6ZswduaDmmG1Bvk
|
|
|
13
13
|
adaptive_sdk/external/reward_server.py,sha256=yT8vqAEIoaq8nFZYaku5IoK0_7fX9uX_dfF1CxcDKnA,7488
|
|
14
14
|
adaptive_sdk/external/reward_types.py,sha256=aZmTolT0OjgObo-64zJkarUKOY4RdxHnsQt8AKAaq1w,1710
|
|
15
15
|
adaptive_sdk/external/websocket_utils.py,sha256=QN-K1IRbqe1LdQTz6vXhVgn-R2SjOB80NjGD2w_LaAo,1284
|
|
16
|
-
adaptive_sdk/graphql_client/__init__.py,sha256=
|
|
16
|
+
adaptive_sdk/graphql_client/__init__.py,sha256=2T0Zv8HkVhQ0EwawugXeoqWrvcwfbbAqSJ8Zy47hjJM,22094
|
|
17
17
|
adaptive_sdk/graphql_client/add_external_model.py,sha256=9VpQHlJMm5t_Ja_SX2MkYcSG1IQpem6mrdXMfNTa1oo,352
|
|
18
18
|
adaptive_sdk/graphql_client/add_hf_model.py,sha256=aC2IcYftepe28Hi01Kg0w3A7bjKrStWasx6aXiqghJU,312
|
|
19
19
|
adaptive_sdk/graphql_client/add_model.py,sha256=Uyhpxkziz1Pv2S7Q0wOaKGK4krjmEf2PeLK0yEs89Hs,461
|
|
20
|
+
adaptive_sdk/graphql_client/add_model_to_use_case.py,sha256=PQ6_hdCiiCRbbW9ylbfzjr5aAMgHXG16Fdtm_UjvJiE,182
|
|
20
21
|
adaptive_sdk/graphql_client/add_remote_env.py,sha256=X94F94tnMzuX9TC_Ieowngw35gJxnKL2YST2sqEJ7CY,328
|
|
21
22
|
adaptive_sdk/graphql_client/async_base_client_open_telemetry.py,sha256=XSRJGEcyfWnFjrDz4Un6xfQWOxr7jWto1vYjbYTQeo0,19761
|
|
22
|
-
adaptive_sdk/graphql_client/async_client.py,sha256=
|
|
23
|
-
adaptive_sdk/graphql_client/attach_model_to_use_case.py,sha256=WyERM4wxKrUS1u9VZ3FUasZu5AVSbRswzy9jmxssTFM,361
|
|
23
|
+
adaptive_sdk/graphql_client/async_client.py,sha256=RhNbCIsBPl8te-7c2QfkY4VMCBpLP2tU1DLbViSSyh8,119538
|
|
24
24
|
adaptive_sdk/graphql_client/base_client_open_telemetry.py,sha256=IV96gRr5FuH-dWMU5PBvQhTwEsV7udsXU-Dyh-Mx-4k,9398
|
|
25
25
|
adaptive_sdk/graphql_client/base_model.py,sha256=2xxKA4sIdlGPIezw06XP9bseSxBURU7nivgt_gL44iA,621
|
|
26
26
|
adaptive_sdk/graphql_client/base_operation.py,sha256=aooq1M4r79cvMoganZ2RvQ3-v0io22fGLOVfl3UBqPQ,4635
|
|
27
27
|
adaptive_sdk/graphql_client/cancel_ab_campaign.py,sha256=JAEpmedz0jOu90U3KR0PYCMAhf2_E6h6WOT30HSE91k,176
|
|
28
28
|
adaptive_sdk/graphql_client/cancel_job.py,sha256=3ZLoqrULi1mp5A5i4rD-gXliKhD8038IPfvCNBg0dPs,291
|
|
29
|
-
adaptive_sdk/graphql_client/client.py,sha256
|
|
29
|
+
adaptive_sdk/graphql_client/client.py,sha256=-eJ4ok5_N4CkNGVXtlntvGzXzGNAVQC2x65DqPgF90E,118581
|
|
30
30
|
adaptive_sdk/graphql_client/create_ab_campaign.py,sha256=___2iFSGnWKkjXz_MfxKUxi1EoQcSBv9AX8S7YoUeVw,374
|
|
31
31
|
adaptive_sdk/graphql_client/create_custom_recipe.py,sha256=eIVLDHbdFG2qWjoZBLC3Xs40Fjy6L-u6wrABV5ibUxo,382
|
|
32
32
|
adaptive_sdk/graphql_client/create_dataset_from_multipart_upload.py,sha256=eoqOfEviCFww5ElTknknV0qOpFTJQmQ1BeGLsRQ5iHc,730
|
|
@@ -39,8 +39,8 @@ adaptive_sdk/graphql_client/create_role.py,sha256=6aTdNOZxavMyjkH-g01uYOZgpjYWcA
|
|
|
39
39
|
adaptive_sdk/graphql_client/create_team.py,sha256=6Alt1ralE1-Xvp2wrEaLUHMW5RtiFqz2fIsUYE_2LbM,370
|
|
40
40
|
adaptive_sdk/graphql_client/create_use_case.py,sha256=sekD76jWCo3zNCfMsBGhVYfNSIK4JPPBz9066BOt49g,332
|
|
41
41
|
adaptive_sdk/graphql_client/create_user.py,sha256=gurD0kZgncXt1HBr7Oo5AkK5ubqFKpJvaR1rn506gHo,301
|
|
42
|
-
adaptive_sdk/graphql_client/custom_fields.py,sha256=
|
|
43
|
-
adaptive_sdk/graphql_client/custom_mutations.py,sha256=
|
|
42
|
+
adaptive_sdk/graphql_client/custom_fields.py,sha256=lRdkGYIa2gsbAf4_akOSfqHOFgwpIx67GeIcDYnWHYY,100895
|
|
43
|
+
adaptive_sdk/graphql_client/custom_mutations.py,sha256=cKtpF9qmuwj8QqSesUtW58tW5TmDqnLFpjOjRMxPb3U,26335
|
|
44
44
|
adaptive_sdk/graphql_client/custom_queries.py,sha256=dVMj82YFJg9wjcnZdmnCbEKEaLiL8QUgpCjr5ONuQAU,18242
|
|
45
45
|
adaptive_sdk/graphql_client/custom_typing_fields.py,sha256=yT_a4kZOMIccqIm-sAG0zu70c-plQtQrZ6ynmfxjlDU,20786
|
|
46
46
|
adaptive_sdk/graphql_client/dataset_upload_processing_status.py,sha256=Xwj9bxmRf0RVxMT5kf30yX0vQaCYEuTI5-alCiqedqI,705
|
|
@@ -49,6 +49,7 @@ adaptive_sdk/graphql_client/delete_dataset.py,sha256=k5enUd6zO89RmA349eVzYajtZig
|
|
|
49
49
|
adaptive_sdk/graphql_client/delete_grader.py,sha256=U9r26BtvOaThzyf0VGouvkuEaJ1wJGPGjbHluSDWBsc,350
|
|
50
50
|
adaptive_sdk/graphql_client/delete_judge.py,sha256=rdiA12Q3Q0JEZTOBjTK3Tz33f2aurj5G9vqSoJUjw3I,342
|
|
51
51
|
adaptive_sdk/graphql_client/delete_user.py,sha256=zMlvEaxGQAfDn-vMc0LWuhgzv1DJb69DuFwJcKGdF5o,301
|
|
52
|
+
adaptive_sdk/graphql_client/deploy_model.py,sha256=UI6__BkSWUjQWusPRcukl-6te0dv2CFKt73QqfC3mz8,325
|
|
52
53
|
adaptive_sdk/graphql_client/describe_ab_campaign.py,sha256=1-z2U5aPHx3EBk0JawW1MO0n1hUpuFNk_oEgzn_6hhQ,594
|
|
53
54
|
adaptive_sdk/graphql_client/describe_dataset.py,sha256=3E4vhnIsTXAjBz51TpfNhw8ldItoePumGn5hw5B3EkM,300
|
|
54
55
|
adaptive_sdk/graphql_client/describe_interaction.py,sha256=1gJ2b7ssrrcOujEKzzj7oH18h--V3DSk8HL84hlwuUc,331
|
|
@@ -58,13 +59,13 @@ adaptive_sdk/graphql_client/describe_metric_admin.py,sha256=_SKKwnFhZnbOTT97elEr
|
|
|
58
59
|
adaptive_sdk/graphql_client/describe_model.py,sha256=UnsOnAyBjNsnkJaS4q5uwkSSvInHwRqUj3XqAoO0yO4,434
|
|
59
60
|
adaptive_sdk/graphql_client/describe_model_admin.py,sha256=XUt_CBSMw1HRleUEWZn2snYt2BNSux_siqrVlwtqH-w,484
|
|
60
61
|
adaptive_sdk/graphql_client/describe_use_case.py,sha256=WW0QkTmdfggN8YBUNGi8ShrP_fr2jXPR6Fer6jlQxu0,353
|
|
61
|
-
adaptive_sdk/graphql_client/enums.py,sha256=
|
|
62
|
+
adaptive_sdk/graphql_client/enums.py,sha256=tIHf3CU79ldu_UOq9hu0P1bLCvFmtHyafUnZvISXdFo,4902
|
|
62
63
|
adaptive_sdk/graphql_client/exceptions.py,sha256=NiC6v-5S7aRDlvQTcHH3K5KvxWvk-c-PkIQQHkipTB8,2268
|
|
63
|
-
adaptive_sdk/graphql_client/fragments.py,sha256=
|
|
64
|
+
adaptive_sdk/graphql_client/fragments.py,sha256=mZgJhxudB3x4u7vs5RnCp3rycUhEuP09-A91N-xClbk,22515
|
|
64
65
|
adaptive_sdk/graphql_client/get_custom_recipe.py,sha256=7qxBZGQTqpc69k-NwzgFctaHWaRz0tHl7YlVSsEad6U,383
|
|
65
66
|
adaptive_sdk/graphql_client/get_grader.py,sha256=kubHDBtUcq6mZtUR5_Of0QbjnGUPSYuavF3_xwmwbY8,233
|
|
66
67
|
adaptive_sdk/graphql_client/get_judge.py,sha256=urEnHW3XfURi5GAFBPfbqzOZGQDxgsGRA6nZmUKmoMA,224
|
|
67
|
-
adaptive_sdk/graphql_client/input_types.py,sha256=
|
|
68
|
+
adaptive_sdk/graphql_client/input_types.py,sha256=6bFD-TnDFWFzaEVKNpRVTZKr782RRmmPCnW8SG8Kg-E,25302
|
|
68
69
|
adaptive_sdk/graphql_client/link_metric.py,sha256=EDH67ckBzzc6MYIGfsmgZRBnjqxLsCGwFUaFMXPEsBY,327
|
|
69
70
|
adaptive_sdk/graphql_client/list_ab_campaigns.py,sha256=SIbU6I2OQkNHt0Gw6YStoiiwJHUk2rfXnpoGLzrFjxc,379
|
|
70
71
|
adaptive_sdk/graphql_client/list_compute_pools.py,sha256=4Qli5Foxm3jztbUAL5gbwqtcrElwwlC4LGJMOMBI6Cc,782
|
|
@@ -88,6 +89,7 @@ adaptive_sdk/graphql_client/list_users.py,sha256=9LCNz49jqxrUapHyOdnzs-ZtU1xsejq
|
|
|
88
89
|
adaptive_sdk/graphql_client/load_dataset.py,sha256=P_h3wPTT4E5mbgJoR5jX1_5GhDLIJsA6lmYPT27VxY8,323
|
|
89
90
|
adaptive_sdk/graphql_client/lock_grader.py,sha256=cCISIKjFaSakf-tr4oEWebieT5yrpvGxdS_R_E4iww8,305
|
|
90
91
|
adaptive_sdk/graphql_client/me.py,sha256=oR_m5QRalokWb9lZYSNgDzOBcNLuAgBIXZh7zfBerqw,227
|
|
92
|
+
adaptive_sdk/graphql_client/remove_model_from_use_case.py,sha256=izNvNE0JDc54r9Nsqk5sWa1ez3rEywZ7XkUs822_h4k,197
|
|
91
93
|
adaptive_sdk/graphql_client/remove_remote_env.py,sha256=lmEwudWc6bkU1ev2g9_QEOgkJT_n7n9P5wGm5AV2MY8,173
|
|
92
94
|
adaptive_sdk/graphql_client/remove_team_member.py,sha256=H75WLcB4bM0yctfmmzztxSAzxvbOwWmkVZhO5DUuh1g,350
|
|
93
95
|
adaptive_sdk/graphql_client/resize_inference_partition.py,sha256=osrAGQGYCFjAyfkftkxYW4Wv2VzMq_oW8i0x3_B8aKg,200
|
|
@@ -102,21 +104,22 @@ adaptive_sdk/graphql_client/update_model.py,sha256=w4-6gHg5k28k9SQwZ9qQZKbs3LQhn
|
|
|
102
104
|
adaptive_sdk/graphql_client/update_model_compute_config.py,sha256=0RtrzzfuMrt-i6lABANRhem8jcDNkXORBEDFdSVpxfs,417
|
|
103
105
|
adaptive_sdk/graphql_client/update_user.py,sha256=E6vtj177-Hx1uUUo8MiSrynULXrnxtJma3Kq69FPC6M,897
|
|
104
106
|
adaptive_sdk/input_types/__init__.py,sha256=SVy4Ks4pm1M32LZ_yHN2itlzNW0g2Kg3b3_ibNLI-7s,806
|
|
105
|
-
adaptive_sdk/input_types/typed_dicts.py,sha256=
|
|
107
|
+
adaptive_sdk/input_types/typed_dicts.py,sha256=3g5raKSGUOcdc9DBAACj604kiyydNZC9YUeCS-K1t_Q,4911
|
|
106
108
|
adaptive_sdk/output_types/__init__.py,sha256=gIfALHcp-hOt8J7bFO7KAYbJc0JXtp7d_vVBqD0zPLE,136
|
|
107
109
|
adaptive_sdk/output_types/job_wrapper.py,sha256=dXdS33TKLHuTRAVq5LFvA7bsUidsWuhE-NDbq5k59g0,4247
|
|
108
|
-
adaptive_sdk/resources/__init__.py,sha256
|
|
110
|
+
adaptive_sdk/resources/__init__.py,sha256=rl49jl2fu5KSSye2LMSm9GdV-89iyKI-UaNX6PmahHY,1414
|
|
109
111
|
adaptive_sdk/resources/abtests.py,sha256=9PCPjxuWwY9ec88ewnq54gkoELq5U5iaBmHhzLCAsFU,7698
|
|
112
|
+
adaptive_sdk/resources/artifacts.py,sha256=2A9FZYkFb8nKQwgVVewQC6InOtSg86oRDk-W9v5-41M,1947
|
|
110
113
|
adaptive_sdk/resources/base_resource.py,sha256=D9adWSFxiDB7chVstDuBu1jcuXkE71UQJexnWENpC4A,1497
|
|
111
|
-
adaptive_sdk/resources/chat.py,sha256=
|
|
114
|
+
adaptive_sdk/resources/chat.py,sha256=5Kwcsdd6tDSdp28Yv7OxsYyB4D4K4NS5iFHdfn2FbgA,12164
|
|
112
115
|
adaptive_sdk/resources/compute_pools.py,sha256=_c_eDkXyqKm53ZcfL91EtcuitYqd7ds_3Uz4PUY-TLw,3441
|
|
113
116
|
adaptive_sdk/resources/datasets.py,sha256=sgGP2BwhaezaGei8xXoH0aKHyZFc64ZvIllxFUKNTd8,13648
|
|
114
117
|
adaptive_sdk/resources/embeddings.py,sha256=-ov_EChHU6PJJOJRtDlCo4sYyr9hwyvRjnBhub8QNFg,3922
|
|
115
118
|
adaptive_sdk/resources/feedback.py,sha256=lujqwFIhxi6iovL8JWL05Kr-gkzR4QEwUXZbTx33raA,14116
|
|
116
119
|
adaptive_sdk/resources/graders.py,sha256=ekQQ5fqmLZpZHeLr6iUm6m45wDevoDJdj3mG-axR-m8,29014
|
|
117
|
-
adaptive_sdk/resources/interactions.py,sha256=
|
|
120
|
+
adaptive_sdk/resources/interactions.py,sha256=MJEhNHjgyNFZWqW0b6VHeQMETw26GqqdMBGDEWLSdiY,11846
|
|
118
121
|
adaptive_sdk/resources/jobs.py,sha256=3ueoHp_58ELIAWfRfiAp51Jt7hazMmbfngaep2wnPn8,5398
|
|
119
|
-
adaptive_sdk/resources/models.py,sha256=
|
|
122
|
+
adaptive_sdk/resources/models.py,sha256=tPjb3cNIB66unjctGOXTfmLaH97Xwb8NI5T8qowezyY,17567
|
|
120
123
|
adaptive_sdk/resources/permissions.py,sha256=ckO-oacWkvgcwXBK5iW_8qaK-g0SHXpEEy1qZy5lrB0,1053
|
|
121
124
|
adaptive_sdk/resources/recipes.py,sha256=RdxpI6YT2GoitA_UfLp-jvQSlmF1dMSg4slDgzgB-Pk,16115
|
|
122
125
|
adaptive_sdk/resources/roles.py,sha256=fD1F5Gd3ddkATsU2aFj7japTJVZngVgqt6TXskBQEOA,2218
|
|
@@ -125,7 +128,7 @@ adaptive_sdk/resources/use_cases.py,sha256=a_Ls3kbYgByJMlOYM_UGi4MTiW4wU7m057bud
|
|
|
125
128
|
adaptive_sdk/resources/users.py,sha256=SoGWwdDCdhK4KjYOcAws-ZWlW7Edii7D3Vxfdu-NZY4,4406
|
|
126
129
|
adaptive_sdk/rest/__init__.py,sha256=P9uhkOoc9cgUkJ5MBoV5soPgQWSkvPrTwHzPGX7i5tY,610
|
|
127
130
|
adaptive_sdk/rest/base_model.py,sha256=P06TNhnqXa6JEje_B_94vAa5zqPYIVxMZAp6aZ4d80U,516
|
|
128
|
-
adaptive_sdk/rest/rest_types.py,sha256=
|
|
129
|
-
adaptive_sdk-0.1.
|
|
130
|
-
adaptive_sdk-0.1.
|
|
131
|
-
adaptive_sdk-0.1.
|
|
131
|
+
adaptive_sdk/rest/rest_types.py,sha256=D7uH0iLZP8rDwUblVQiFb04208Bf5oPbqFiaq1YAnD8,8892
|
|
132
|
+
adaptive_sdk-0.1.14.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
133
|
+
adaptive_sdk-0.1.14.dist-info/METADATA,sha256=OE7kVUoFtTEn-EVtDKYNG27G5U-ECpJMaPDWK0TtRXU,1600
|
|
134
|
+
adaptive_sdk-0.1.14.dist-info/RECORD,,
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
from pydantic import Field
|
|
2
|
-
from .base_model import BaseModel
|
|
3
|
-
from .fragments import ModelServiceData
|
|
4
|
-
|
|
5
|
-
class AttachModelToUseCase(BaseModel):
|
|
6
|
-
"""@public"""
|
|
7
|
-
attach_model: 'AttachModelToUseCaseAttachModel' = Field(alias='attachModel')
|
|
8
|
-
|
|
9
|
-
class AttachModelToUseCaseAttachModel(ModelServiceData):
|
|
10
|
-
"""@public"""
|
|
11
|
-
pass
|
|
12
|
-
AttachModelToUseCase.model_rebuild()
|
|
File without changes
|