adaptive-sdk 0.1.12__py3-none-any.whl → 0.1.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- adaptive_sdk/client.py +2 -0
- adaptive_sdk/graphql_client/__init__.py +6 -5
- adaptive_sdk/graphql_client/add_model_to_use_case.py +6 -0
- adaptive_sdk/graphql_client/async_client.py +34 -26
- adaptive_sdk/graphql_client/client.py +34 -26
- adaptive_sdk/graphql_client/custom_fields.py +20 -4
- adaptive_sdk/graphql_client/custom_mutations.py +29 -14
- adaptive_sdk/graphql_client/deploy_model.py +7 -1
- adaptive_sdk/graphql_client/enums.py +3 -20
- adaptive_sdk/graphql_client/fragments.py +4 -4
- adaptive_sdk/graphql_client/input_types.py +157 -18
- adaptive_sdk/graphql_client/remove_model_from_use_case.py +6 -0
- adaptive_sdk/input_types/typed_dicts.py +14 -15
- adaptive_sdk/resources/__init__.py +3 -0
- adaptive_sdk/resources/artifacts.py +61 -0
- adaptive_sdk/resources/chat.py +11 -9
- adaptive_sdk/resources/interactions.py +57 -25
- adaptive_sdk/resources/models.py +86 -117
- adaptive_sdk/resources/recipes.py +4 -2
- adaptive_sdk/rest/rest_types.py +2 -1
- {adaptive_sdk-0.1.12.dist-info → adaptive_sdk-0.1.14.dist-info}/METADATA +4 -1
- {adaptive_sdk-0.1.12.dist-info → adaptive_sdk-0.1.14.dist-info}/RECORD +23 -21
- adaptive_sdk/graphql_client/attach_model_to_use_case.py +0 -12
- {adaptive_sdk-0.1.12.dist-info → adaptive_sdk-0.1.14.dist-info}/WHEEL +0 -0
|
@@ -1,26 +1,27 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import
|
|
4
|
-
from typing import Dict, List, Literal, TYPE_CHECKING
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Literal
|
|
5
4
|
from uuid import UUID
|
|
6
5
|
|
|
6
|
+
import humps
|
|
7
|
+
from typing_extensions import override
|
|
8
|
+
|
|
7
9
|
from adaptive_sdk import input_types
|
|
8
10
|
from adaptive_sdk.error_handling import rest_error_handler
|
|
9
11
|
from adaptive_sdk.graphql_client import (
|
|
12
|
+
CompletionData,
|
|
10
13
|
CompletionGroupBy,
|
|
11
14
|
CursorPageInput,
|
|
12
15
|
ListCompletionsFilterInput,
|
|
13
|
-
OrderPair,
|
|
14
|
-
ListInteractionsCompletions,
|
|
15
16
|
ListGroupedInteractionsCompletionsGrouped,
|
|
16
|
-
|
|
17
|
+
ListInteractionsCompletions,
|
|
18
|
+
OrderPair,
|
|
17
19
|
)
|
|
18
20
|
from adaptive_sdk.graphql_client.base_model import UNSET
|
|
19
21
|
from adaptive_sdk.rest import rest_types
|
|
20
22
|
from adaptive_sdk.utils import convert_optional_UUID
|
|
21
|
-
from typing_extensions import override
|
|
22
23
|
|
|
23
|
-
from .base_resource import
|
|
24
|
+
from .base_resource import AsyncAPIResource, SyncAPIResource, UseCaseResource
|
|
24
25
|
|
|
25
26
|
if TYPE_CHECKING:
|
|
26
27
|
from adaptive_sdk.client import Adaptive, AsyncAdaptive
|
|
@@ -32,9 +33,7 @@ def _prepare_add_interactions_inputs(
|
|
|
32
33
|
messages: list[input_types.ChatMessage],
|
|
33
34
|
feedbacks: list[input_types.InteractionFeedbackDict] | None,
|
|
34
35
|
):
|
|
35
|
-
input_messages = (
|
|
36
|
-
[rest_types.ChatMessage(role=m["role"], content=m["content"]) for m in messages]
|
|
37
|
-
)
|
|
36
|
+
input_messages = [rest_types.ChatMessage(role=m["role"], content=m["content"]) for m in messages]
|
|
38
37
|
input_feedbacks = (
|
|
39
38
|
[
|
|
40
39
|
rest_types.InteractionFeedback(
|
|
@@ -123,20 +122,18 @@ class Interactions(SyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
123
122
|
group_by: Retrieve interactions grouped by selected dimension.
|
|
124
123
|
|
|
125
124
|
"""
|
|
126
|
-
new_filters = {} if filters is None else deepcopy(filters)
|
|
127
125
|
order = [] if order is None else order
|
|
128
126
|
new_page = {} if page is None else page
|
|
129
127
|
|
|
130
|
-
|
|
128
|
+
filter_list = _build_filter_list(filters)
|
|
131
129
|
new_order = humps.camelize(order)
|
|
132
130
|
new_page = humps.camelize(new_page)
|
|
133
131
|
|
|
134
|
-
if new_filters.get("timerange"):
|
|
135
|
-
new_filters["timerange"]["from"] = new_filters["timerange"]["from_"] # type: ignore
|
|
136
|
-
del new_filters["timerange"]["from_"] # type: ignore
|
|
137
|
-
|
|
138
|
-
new_filters.update({"useCase": self.use_case_key(use_case)}) # type: ignore
|
|
139
132
|
order_inputs = [OrderPair.model_validate(o) for o in new_order] if new_order else UNSET
|
|
133
|
+
new_filters = {
|
|
134
|
+
"useCase": self.use_case_key(use_case),
|
|
135
|
+
"advancedFilters": {"and": [filter_list]},
|
|
136
|
+
}
|
|
140
137
|
if group_by:
|
|
141
138
|
return self._gql_client.list_grouped_interactions(
|
|
142
139
|
filter=ListCompletionsFilterInput.model_validate(new_filters),
|
|
@@ -234,20 +231,18 @@ class AsyncInteractions(AsyncAPIResource, UseCaseResource): # type: ignore[misc
|
|
|
234
231
|
group_by: Retrieve interactions grouped by selected dimension.
|
|
235
232
|
|
|
236
233
|
"""
|
|
237
|
-
new_filters = {} if filters is None else deepcopy(filters)
|
|
238
234
|
order = [] if order is None else order
|
|
239
235
|
new_page = {} if page is None else page
|
|
240
236
|
|
|
241
|
-
|
|
237
|
+
filter_list = _build_filter_list(filters)
|
|
242
238
|
new_order = humps.camelize(order)
|
|
243
239
|
new_page = humps.camelize(new_page)
|
|
244
240
|
|
|
245
|
-
if new_filters.get("timerange"):
|
|
246
|
-
new_filters["timerange"]["from"] = new_filters["timerange"]["from_"] # type: ignore
|
|
247
|
-
del new_filters["timerange"]["from_"] # type: ignore
|
|
248
|
-
|
|
249
|
-
new_filters.update({"useCase": self.use_case_key(use_case)}) # type: ignore
|
|
250
241
|
order_inputs = [OrderPair.model_validate(o) for o in new_order] if new_order else UNSET
|
|
242
|
+
new_filters = {
|
|
243
|
+
"useCase": self.use_case_key(use_case),
|
|
244
|
+
"advancedFilters": {"and": [filter_list]},
|
|
245
|
+
}
|
|
251
246
|
if group_by:
|
|
252
247
|
result = await self._gql_client.list_grouped_interactions(
|
|
253
248
|
filter=ListCompletionsFilterInput.model_validate(new_filters),
|
|
@@ -277,3 +272,40 @@ class AsyncInteractions(AsyncAPIResource, UseCaseResource): # type: ignore[misc
|
|
|
277
272
|
"""
|
|
278
273
|
result = await self._gql_client.describe_interaction(use_case=self.use_case_key(use_case), id=completion_id)
|
|
279
274
|
return result.completion
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
def _build_filter_list(
|
|
278
|
+
filters: input_types.ListCompletionsFilterInput | None = None,
|
|
279
|
+
) -> List[Any]:
|
|
280
|
+
filter_list: List[Any] = []
|
|
281
|
+
if filters is not None:
|
|
282
|
+
if "models" in filters:
|
|
283
|
+
filter_list.append({"model": {"in": filters["models"]}})
|
|
284
|
+
if "timerange" in filters:
|
|
285
|
+
filter_list.append(
|
|
286
|
+
{
|
|
287
|
+
"timerange": {
|
|
288
|
+
"from": filters["timerange"]["from_"],
|
|
289
|
+
"to": filters["timerange"]["to"],
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
)
|
|
293
|
+
if "session_id" in filters:
|
|
294
|
+
filter_list.append({"sessionId": filters["session_id"]})
|
|
295
|
+
if "completion_id" in filters:
|
|
296
|
+
filter_list.append({"completionId": filters["completion_id"]})
|
|
297
|
+
if "user_id" in filters:
|
|
298
|
+
filter_list.append({"userId": filters["user_id"]})
|
|
299
|
+
if "feedbacks" in filters:
|
|
300
|
+
filter_list.append({"feedbacks": filters["feedbacks"]})
|
|
301
|
+
if "comparisons" in filters:
|
|
302
|
+
filter_list.append({"comparisons": filters["comparisons"]})
|
|
303
|
+
if "labels" in filters:
|
|
304
|
+
for label_cond in filters["labels"]:
|
|
305
|
+
filter_list.append({"labels": label_cond})
|
|
306
|
+
if "prompt_hash" in filters:
|
|
307
|
+
filter_list.append({"promptHash": {"eq": filters["prompt_hash"]}})
|
|
308
|
+
if "source" in filters:
|
|
309
|
+
for source in filters["source"]:
|
|
310
|
+
filter_list.append({"source": source})
|
|
311
|
+
return filter_list
|
adaptive_sdk/resources/models.py
CHANGED
|
@@ -1,27 +1,31 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
|
-
from typing import get_args, Callable, Literal, Sequence, TYPE_CHECKING
|
|
3
2
|
|
|
3
|
+
from typing import TYPE_CHECKING, Callable, Literal, Sequence, get_args
|
|
4
|
+
|
|
5
|
+
from adaptive_sdk import input_types
|
|
4
6
|
from adaptive_sdk.graphql_client import (
|
|
5
|
-
OpenAIModel,
|
|
6
|
-
OpenAIProviderDataInput,
|
|
7
|
-
GoogleProviderDataInput,
|
|
8
|
-
ModelProviderDataInput,
|
|
9
7
|
AddExternalModelInput,
|
|
8
|
+
AddHFModelInput,
|
|
10
9
|
ExternalModelProviderName,
|
|
11
|
-
|
|
12
|
-
UpdateModelService,
|
|
13
|
-
ModelData,
|
|
10
|
+
GoogleProviderDataInput,
|
|
14
11
|
JobData,
|
|
15
|
-
ModelServiceData,
|
|
16
12
|
ListModelsModels,
|
|
17
|
-
|
|
13
|
+
ModelComputeConfigInput,
|
|
14
|
+
ModelData,
|
|
18
15
|
ModelFilter,
|
|
19
16
|
ModelPlacementInput,
|
|
20
|
-
|
|
17
|
+
ModelProviderDataInput,
|
|
18
|
+
ModelServiceData,
|
|
19
|
+
OpenAIProviderDataInput,
|
|
20
|
+
UpdateModelService,
|
|
21
|
+
)
|
|
22
|
+
from adaptive_sdk.graphql_client.input_types import (
|
|
23
|
+
AddModelToUseCaseInput,
|
|
24
|
+
DeployModelInput,
|
|
25
|
+
RemoveModelFromUseCaseInput,
|
|
21
26
|
)
|
|
22
|
-
from adaptive_sdk import input_types
|
|
23
27
|
|
|
24
|
-
from .base_resource import
|
|
28
|
+
from .base_resource import AsyncAPIResource, SyncAPIResource, UseCaseResource
|
|
25
29
|
|
|
26
30
|
if TYPE_CHECKING:
|
|
27
31
|
from adaptive_sdk.client import Adaptive, AsyncAdaptive
|
|
@@ -29,9 +33,7 @@ if TYPE_CHECKING:
|
|
|
29
33
|
provider_config = {
|
|
30
34
|
"open_ai": {
|
|
31
35
|
"provider_data": lambda api_key, model_id: ModelProviderDataInput(
|
|
32
|
-
openAI=OpenAIProviderDataInput(
|
|
33
|
-
apiKey=api_key, externalModelId=OpenAIModel(model_id)
|
|
34
|
-
)
|
|
36
|
+
openAI=OpenAIProviderDataInput(apiKey=api_key, externalModelId=model_id)
|
|
35
37
|
),
|
|
36
38
|
},
|
|
37
39
|
"google": {
|
|
@@ -88,9 +90,7 @@ def is_supported_model(model_id: str):
|
|
|
88
90
|
supported_models = get_args(SupportedHFModels)
|
|
89
91
|
if model_id not in supported_models:
|
|
90
92
|
supported_models_str = "\n".join(supported_models)
|
|
91
|
-
raise ValueError(
|
|
92
|
-
f"Model {model_id} is not supported.\n\nChoose from:\n{supported_models_str}"
|
|
93
|
-
)
|
|
93
|
+
raise ValueError(f"Model {model_id} is not supported.\n\nChoose from:\n{supported_models_str}")
|
|
94
94
|
|
|
95
95
|
|
|
96
96
|
class Models(SyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
@@ -155,22 +155,16 @@ class Models(SyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
155
155
|
provider_data = provider_data_fn(api_key, external_model_id)
|
|
156
156
|
case "azure":
|
|
157
157
|
if not endpoint:
|
|
158
|
-
raise ValueError(
|
|
159
|
-
"`endpoint` is required to connect Azure external model."
|
|
160
|
-
)
|
|
158
|
+
raise ValueError("`endpoint` is required to connect Azure external model.")
|
|
161
159
|
provider_data = provider_data_fn(api_key, external_model_id, endpoint)
|
|
162
160
|
case _:
|
|
163
161
|
raise ValueError(f"Provider {provider} is not supported")
|
|
164
162
|
|
|
165
163
|
provider_enum = ExternalModelProviderName(provider.upper())
|
|
166
|
-
input = AddExternalModelInput(
|
|
167
|
-
name=name, provider=provider_enum, providerData=provider_data
|
|
168
|
-
)
|
|
164
|
+
input = AddExternalModelInput(name=name, provider=provider_enum, providerData=provider_data)
|
|
169
165
|
return self._gql_client.add_external_model(input).add_external_model
|
|
170
166
|
|
|
171
|
-
def list(
|
|
172
|
-
self, filter: input_types.ModelFilter | None = None
|
|
173
|
-
) -> Sequence[ListModelsModels]:
|
|
167
|
+
def list(self, filter: input_types.ModelFilter | None = None) -> Sequence[ListModelsModels]:
|
|
174
168
|
"""
|
|
175
169
|
List all models in Adaptive model registry.
|
|
176
170
|
"""
|
|
@@ -203,32 +197,79 @@ class Models(SyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
203
197
|
If `True`, this call blocks until model is `Online`.
|
|
204
198
|
make_default: Make the model the use case's default on attachment.
|
|
205
199
|
"""
|
|
206
|
-
|
|
200
|
+
|
|
201
|
+
input = AddModelToUseCaseInput(
|
|
207
202
|
model=model,
|
|
208
203
|
useCase=self.use_case_key(use_case),
|
|
209
|
-
|
|
204
|
+
)
|
|
205
|
+
self._gql_client.add_model_to_use_case(input)
|
|
206
|
+
input = DeployModelInput(
|
|
207
|
+
model=model,
|
|
208
|
+
useCase=self.use_case_key(use_case),
|
|
209
|
+
placement=(ModelPlacementInput.model_validate(placement) if placement else None),
|
|
210
210
|
wait=wait,
|
|
211
|
-
placement=(
|
|
212
|
-
ModelPlacementInput.model_validate(placement) if placement else None
|
|
213
|
-
),
|
|
214
211
|
)
|
|
215
|
-
result = self._gql_client.
|
|
212
|
+
result: ModelServiceData = self._gql_client.deploy_model(input).deploy_model
|
|
216
213
|
if make_default:
|
|
217
|
-
result = self.update(model=model, is_default=make_default)
|
|
214
|
+
result = self.update(model=model, is_default=make_default)
|
|
218
215
|
return result
|
|
219
216
|
|
|
220
|
-
def
|
|
217
|
+
def add_to_use_case(
|
|
218
|
+
self,
|
|
219
|
+
model: str,
|
|
220
|
+
use_case: str | None = None,
|
|
221
|
+
) -> bool:
|
|
222
|
+
"""
|
|
223
|
+
Attach a model to the client's use case.
|
|
224
|
+
|
|
225
|
+
Args:
|
|
226
|
+
model: Model key.
|
|
227
|
+
wait: If the model is not deployed already, attaching it to the use case will automatically deploy it.
|
|
228
|
+
If `True`, this call blocks until model is `Online`.
|
|
229
|
+
make_default: Make the model the use case's default on attachment.
|
|
230
|
+
"""
|
|
231
|
+
|
|
232
|
+
input = AddModelToUseCaseInput(
|
|
233
|
+
model=model,
|
|
234
|
+
useCase=self.use_case_key(use_case),
|
|
235
|
+
)
|
|
236
|
+
return self._gql_client.add_model_to_use_case(input).add_model_to_use_case
|
|
237
|
+
|
|
238
|
+
def deploy(
|
|
221
239
|
self,
|
|
222
240
|
model: str,
|
|
241
|
+
wait: bool = False,
|
|
242
|
+
make_default: bool = False,
|
|
223
243
|
use_case: str | None = None,
|
|
244
|
+
placement: input_types.ModelPlacementInput | None = None,
|
|
224
245
|
) -> ModelServiceData:
|
|
246
|
+
input = DeployModelInput(
|
|
247
|
+
model=model,
|
|
248
|
+
useCase=self.use_case_key(use_case),
|
|
249
|
+
placement=(ModelPlacementInput.model_validate(placement) if placement else None),
|
|
250
|
+
wait=wait,
|
|
251
|
+
)
|
|
252
|
+
result: ModelServiceData = self._gql_client.deploy_model(input).deploy_model
|
|
253
|
+
if make_default:
|
|
254
|
+
result = self.update(model=model, is_default=make_default)
|
|
255
|
+
return result
|
|
256
|
+
|
|
257
|
+
def detach(
|
|
258
|
+
self,
|
|
259
|
+
model: str,
|
|
260
|
+
use_case: str,
|
|
261
|
+
) -> bool:
|
|
225
262
|
"""
|
|
226
263
|
Detach model from client's use case.
|
|
227
264
|
|
|
228
265
|
Args:
|
|
229
266
|
model: Model key.
|
|
230
267
|
"""
|
|
231
|
-
|
|
268
|
+
input = RemoveModelFromUseCaseInput(
|
|
269
|
+
model=model,
|
|
270
|
+
useCase=use_case,
|
|
271
|
+
)
|
|
272
|
+
return self._gql_client.remove_model_from_use_case(input).remove_model_from_use_case
|
|
232
273
|
|
|
233
274
|
def update_compute_config(
|
|
234
275
|
self,
|
|
@@ -247,7 +288,6 @@ class Models(SyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
247
288
|
self,
|
|
248
289
|
model: str,
|
|
249
290
|
is_default: bool | None = None,
|
|
250
|
-
attached: bool | None = None,
|
|
251
291
|
desired_online: bool | None = None,
|
|
252
292
|
use_case: str | None = None,
|
|
253
293
|
placement: input_types.ModelPlacementInput | None = None,
|
|
@@ -269,24 +309,11 @@ class Models(SyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
269
309
|
useCase=self.use_case_key(use_case),
|
|
270
310
|
modelService=model,
|
|
271
311
|
isDefault=is_default,
|
|
272
|
-
attached=attached,
|
|
273
312
|
desiredOnline=desired_online,
|
|
274
|
-
placement=(
|
|
275
|
-
ModelPlacementInput.model_validate(placement) if placement else None
|
|
276
|
-
),
|
|
313
|
+
placement=(ModelPlacementInput.model_validate(placement) if placement else None),
|
|
277
314
|
)
|
|
278
315
|
return self._gql_client.update_model(input).update_model_service
|
|
279
316
|
|
|
280
|
-
def deploy(self, model: str, wait: bool = False) -> str:
|
|
281
|
-
"""
|
|
282
|
-
Deploy a model, loading it to memory and making it ready for inference.
|
|
283
|
-
|
|
284
|
-
Args:
|
|
285
|
-
model: Model key.
|
|
286
|
-
wait: If `True`, call block until model is in `Online` state.
|
|
287
|
-
"""
|
|
288
|
-
return self._gql_client.deploy_model(id_or_key=model, wait=wait).deploy_model
|
|
289
|
-
|
|
290
317
|
def terminate(self, model: str, force: bool = False) -> str:
|
|
291
318
|
"""
|
|
292
319
|
Terminate model, removing it from memory and making it unavailable to all use cases.
|
|
@@ -296,9 +323,7 @@ class Models(SyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
296
323
|
force: If model is attached to several use cases, `force` must equal `True` in order
|
|
297
324
|
for the model to be terminated.
|
|
298
325
|
"""
|
|
299
|
-
return self._gql_client.terminate_model(
|
|
300
|
-
id_or_key=model, force=force
|
|
301
|
-
).terminate_model
|
|
326
|
+
return self._gql_client.terminate_model(id_or_key=model, force=force).terminate_model
|
|
302
327
|
|
|
303
328
|
|
|
304
329
|
class AsyncModels(AsyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
@@ -363,23 +388,17 @@ class AsyncModels(AsyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
363
388
|
provider_data = provider_data_fn(api_key, external_model_id)
|
|
364
389
|
case "azure":
|
|
365
390
|
if not endpoint:
|
|
366
|
-
raise ValueError(
|
|
367
|
-
"`endpoint` is required to connect Azure external model."
|
|
368
|
-
)
|
|
391
|
+
raise ValueError("`endpoint` is required to connect Azure external model.")
|
|
369
392
|
provider_data = provider_data_fn(api_key, external_model_id, endpoint)
|
|
370
393
|
case _:
|
|
371
394
|
raise ValueError(f"Provider {provider} is not supported")
|
|
372
395
|
|
|
373
396
|
provider_enum = ExternalModelProviderName(provider.upper())
|
|
374
|
-
input = AddExternalModelInput(
|
|
375
|
-
name=name, provider=provider_enum, providerData=provider_data
|
|
376
|
-
)
|
|
397
|
+
input = AddExternalModelInput(name=name, provider=provider_enum, providerData=provider_data)
|
|
377
398
|
result = await self._gql_client.add_external_model(input)
|
|
378
399
|
return result.add_external_model
|
|
379
400
|
|
|
380
|
-
async def list(
|
|
381
|
-
self, filter: input_types.ModelFilter | None = None
|
|
382
|
-
) -> Sequence[ListModelsModels]:
|
|
401
|
+
async def list(self, filter: input_types.ModelFilter | None = None) -> Sequence[ListModelsModels]:
|
|
383
402
|
"""
|
|
384
403
|
List all models in Adaptive model registry.
|
|
385
404
|
"""
|
|
@@ -395,38 +414,6 @@ class AsyncModels(AsyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
395
414
|
"""
|
|
396
415
|
return (await self._gql_client.describe_model(input=model)).model
|
|
397
416
|
|
|
398
|
-
async def attach(
|
|
399
|
-
self,
|
|
400
|
-
model: str,
|
|
401
|
-
wait: bool = True,
|
|
402
|
-
make_default: bool = False,
|
|
403
|
-
use_case: str | None = None,
|
|
404
|
-
placement: input_types.ModelPlacementInput | None = None,
|
|
405
|
-
) -> ModelServiceData:
|
|
406
|
-
"""
|
|
407
|
-
Attach a model to the client's use case.
|
|
408
|
-
|
|
409
|
-
Args:
|
|
410
|
-
model: Model key.
|
|
411
|
-
wait: If the model is not deployed already, attaching it to the use case will automatically deploy it.
|
|
412
|
-
If `True`, this call blocks until model is `Online`.
|
|
413
|
-
make_default: Make the model the use case's default on attachment.
|
|
414
|
-
"""
|
|
415
|
-
input = AttachModel(
|
|
416
|
-
model=model,
|
|
417
|
-
useCase=self.use_case_key(use_case),
|
|
418
|
-
attached=True,
|
|
419
|
-
wait=wait,
|
|
420
|
-
placement=(
|
|
421
|
-
ModelPlacementInput.model_validate(placement) if placement else None
|
|
422
|
-
),
|
|
423
|
-
)
|
|
424
|
-
result = await self._gql_client.attach_model_to_use_case(input)
|
|
425
|
-
result = result.attach_model
|
|
426
|
-
if make_default:
|
|
427
|
-
result = await self.update(model=model, is_default=make_default) # type: ignore[assignment]
|
|
428
|
-
return result
|
|
429
|
-
|
|
430
417
|
async def detach(
|
|
431
418
|
self,
|
|
432
419
|
model: str,
|
|
@@ -438,7 +425,7 @@ class AsyncModels(AsyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
438
425
|
Args:
|
|
439
426
|
model: Model key.
|
|
440
427
|
"""
|
|
441
|
-
return await self.update(model=model,
|
|
428
|
+
return await self.update(model=model, use_case=use_case)
|
|
442
429
|
|
|
443
430
|
async def update_compute_config(
|
|
444
431
|
self,
|
|
@@ -459,7 +446,6 @@ class AsyncModels(AsyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
459
446
|
self,
|
|
460
447
|
model: str,
|
|
461
448
|
is_default: bool | None = None,
|
|
462
|
-
attached: bool | None = None,
|
|
463
449
|
desired_online: bool | None = None,
|
|
464
450
|
use_case: str | None = None,
|
|
465
451
|
placement: input_types.ModelPlacementInput | None = None,
|
|
@@ -481,27 +467,12 @@ class AsyncModels(AsyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
481
467
|
useCase=self.use_case_key(use_case),
|
|
482
468
|
modelService=model,
|
|
483
469
|
isDefault=is_default,
|
|
484
|
-
attached=attached,
|
|
485
470
|
desiredOnline=desired_online,
|
|
486
|
-
placement=(
|
|
487
|
-
ModelPlacementInput.model_validate(placement) if placement else None
|
|
488
|
-
),
|
|
471
|
+
placement=(ModelPlacementInput.model_validate(placement) if placement else None),
|
|
489
472
|
)
|
|
490
473
|
result = await self._gql_client.update_model(input)
|
|
491
474
|
return result.update_model_service
|
|
492
475
|
|
|
493
|
-
async def deploy(self, model: str, wait: bool = False) -> str:
|
|
494
|
-
"""
|
|
495
|
-
Deploy a model, loading it to memory and making it ready for inference.
|
|
496
|
-
|
|
497
|
-
Args:
|
|
498
|
-
model: Model key.
|
|
499
|
-
wait: If `True`, call block until model is in `Online` state.
|
|
500
|
-
"""
|
|
501
|
-
return (
|
|
502
|
-
await self._gql_client.deploy_model(id_or_key=model, wait=wait)
|
|
503
|
-
).deploy_model
|
|
504
|
-
|
|
505
476
|
async def terminate(self, model: str, force: bool = False) -> str:
|
|
506
477
|
"""
|
|
507
478
|
Terminate model, removing it from memory and making it unavailable to all use cases.
|
|
@@ -511,6 +482,4 @@ class AsyncModels(AsyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
511
482
|
force: If model is attached to several use cases, `force` must equal `True` in order
|
|
512
483
|
for the model to be terminated.
|
|
513
484
|
"""
|
|
514
|
-
return (
|
|
515
|
-
await self._gql_client.terminate_model(id_or_key=model, force=force)
|
|
516
|
-
).terminate_model
|
|
485
|
+
return (await self._gql_client.terminate_model(id_or_key=model, force=force)).terminate_model
|
|
@@ -99,6 +99,7 @@ class Recipes(SyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
99
99
|
self,
|
|
100
100
|
recipe_key: str,
|
|
101
101
|
path: str | None = None,
|
|
102
|
+
entrypoint: str | None = None,
|
|
102
103
|
name: str | None = None,
|
|
103
104
|
description: str | None = None,
|
|
104
105
|
labels: Sequence[tuple[str, str]] | None = None,
|
|
@@ -112,7 +113,7 @@ class Recipes(SyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
112
113
|
)
|
|
113
114
|
|
|
114
115
|
if path:
|
|
115
|
-
with _upload_from_path(path) as file_upload:
|
|
116
|
+
with _upload_from_path(path, entrypoint=entrypoint) as file_upload:
|
|
116
117
|
return self._gql_client.update_custom_recipe(
|
|
117
118
|
use_case=self.use_case_key(use_case),
|
|
118
119
|
id=recipe_key,
|
|
@@ -239,6 +240,7 @@ class AsyncRecipes(AsyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
239
240
|
self,
|
|
240
241
|
recipe_key: str,
|
|
241
242
|
path: str | None = None,
|
|
243
|
+
entrypoint: str | None = None,
|
|
242
244
|
name: str | None = None,
|
|
243
245
|
description: str | None = None,
|
|
244
246
|
labels: Sequence[tuple[str, str]] | None = None,
|
|
@@ -252,7 +254,7 @@ class AsyncRecipes(AsyncAPIResource, UseCaseResource): # type: ignore[misc]
|
|
|
252
254
|
)
|
|
253
255
|
|
|
254
256
|
if path:
|
|
255
|
-
with _upload_from_path(path) as file_upload:
|
|
257
|
+
with _upload_from_path(path, entrypoint=entrypoint) as file_upload:
|
|
256
258
|
return (
|
|
257
259
|
await self._gql_client.update_custom_recipe(
|
|
258
260
|
use_case=self.use_case_key(use_case),
|
adaptive_sdk/rest/rest_types.py
CHANGED
|
@@ -256,4 +256,5 @@ class ChatInput(GenerateParameters):
|
|
|
256
256
|
system_prompt_args: Annotated[Optional[Dict[str, Any]], Field(description='Will be used to render system prompt template')] = None
|
|
257
257
|
tags: Optional[List[str]] = None
|
|
258
258
|
use_tools: Optional[bool] = None
|
|
259
|
-
tools: Annotated[Optional[List[ToolOverride]], Field(description='Override tool configuration for this request - enables/disables specific tools')] = None
|
|
259
|
+
tools: Annotated[Optional[List[ToolOverride]], Field(description='Override tool configuration for this request - enables/disables specific tools')] = None
|
|
260
|
+
store: Optional[bool] = None
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: adaptive-sdk
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.14
|
|
4
4
|
Summary: Python SDK for Adaptive Engine
|
|
5
5
|
Author-email: Vincent Debergue <vincent@adaptive-ml.com>, Joao Moura <joao@adaptive-ml.com>, Yacine Bouraoui <yacine@adaptive-ml.com>
|
|
6
6
|
Requires-Python: >=3.10
|
|
@@ -27,8 +27,11 @@ Requires-Dist: toml>=0.10.2 ; extra == "dev"
|
|
|
27
27
|
Requires-Dist: black==25.1.0 ; extra == "dev"
|
|
28
28
|
Requires-Dist: pytest==8.4.1 ; extra == "dev"
|
|
29
29
|
Requires-Dist: pytest-asyncio==0.26.0 ; extra == "dev"
|
|
30
|
+
Requires-Dist: pytest-dependency==0.6.0 ; extra == "dev"
|
|
30
31
|
Requires-Dist: mypy==1.16.0 ; extra == "dev"
|
|
31
32
|
Requires-Dist: tenacity==9.1.2 ; extra == "dev"
|
|
33
|
+
Requires-Dist: pyright >= 1.1 ; extra == "dev"
|
|
34
|
+
Requires-Dist: adaptive-harmony ; extra == "dev"
|
|
32
35
|
Provides-Extra: dev
|
|
33
36
|
|
|
34
37
|
# adaptive-sdk
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
adaptive_sdk/__init__.py,sha256=RQWUHUyEWFSqvTKQpM7Q98nRPnisXRgIZlYd65aJucs,308
|
|
2
2
|
adaptive_sdk/base_client.py,sha256=kDeRwwsoue3MqKSXT-k-aUbUdAUb1Abow8oc_AULTnU,4935
|
|
3
|
-
adaptive_sdk/client.py,sha256=
|
|
3
|
+
adaptive_sdk/client.py,sha256=tTQ793NF_hjOf5yZiVVFLEm5f5RAQtbjcG-W8d0zifU,5663
|
|
4
4
|
adaptive_sdk/error_handling.py,sha256=emicEqdEOCgWwCgMkQyvB7VWdKRMnfrWA2urEthQ_g0,3345
|
|
5
5
|
adaptive_sdk/openapi.json,sha256=1zCNC1PsbRnfWy_2TF1MUgF4PDL3pbwYqJbzVTqmm-U,14607
|
|
6
6
|
adaptive_sdk/patch.py,sha256=mF9iw2VnxirxA7_T7TdatpCTwk3EXfXml9elYAvmy7U,223
|
|
@@ -13,20 +13,20 @@ adaptive_sdk/external/reward_client.py,sha256=TSVdi437McUkMModDQV6ZswduaDmmG1Bvk
|
|
|
13
13
|
adaptive_sdk/external/reward_server.py,sha256=yT8vqAEIoaq8nFZYaku5IoK0_7fX9uX_dfF1CxcDKnA,7488
|
|
14
14
|
adaptive_sdk/external/reward_types.py,sha256=aZmTolT0OjgObo-64zJkarUKOY4RdxHnsQt8AKAaq1w,1710
|
|
15
15
|
adaptive_sdk/external/websocket_utils.py,sha256=QN-K1IRbqe1LdQTz6vXhVgn-R2SjOB80NjGD2w_LaAo,1284
|
|
16
|
-
adaptive_sdk/graphql_client/__init__.py,sha256=
|
|
16
|
+
adaptive_sdk/graphql_client/__init__.py,sha256=2T0Zv8HkVhQ0EwawugXeoqWrvcwfbbAqSJ8Zy47hjJM,22094
|
|
17
17
|
adaptive_sdk/graphql_client/add_external_model.py,sha256=9VpQHlJMm5t_Ja_SX2MkYcSG1IQpem6mrdXMfNTa1oo,352
|
|
18
18
|
adaptive_sdk/graphql_client/add_hf_model.py,sha256=aC2IcYftepe28Hi01Kg0w3A7bjKrStWasx6aXiqghJU,312
|
|
19
19
|
adaptive_sdk/graphql_client/add_model.py,sha256=Uyhpxkziz1Pv2S7Q0wOaKGK4krjmEf2PeLK0yEs89Hs,461
|
|
20
|
+
adaptive_sdk/graphql_client/add_model_to_use_case.py,sha256=PQ6_hdCiiCRbbW9ylbfzjr5aAMgHXG16Fdtm_UjvJiE,182
|
|
20
21
|
adaptive_sdk/graphql_client/add_remote_env.py,sha256=X94F94tnMzuX9TC_Ieowngw35gJxnKL2YST2sqEJ7CY,328
|
|
21
22
|
adaptive_sdk/graphql_client/async_base_client_open_telemetry.py,sha256=XSRJGEcyfWnFjrDz4Un6xfQWOxr7jWto1vYjbYTQeo0,19761
|
|
22
|
-
adaptive_sdk/graphql_client/async_client.py,sha256=
|
|
23
|
-
adaptive_sdk/graphql_client/attach_model_to_use_case.py,sha256=WyERM4wxKrUS1u9VZ3FUasZu5AVSbRswzy9jmxssTFM,361
|
|
23
|
+
adaptive_sdk/graphql_client/async_client.py,sha256=RhNbCIsBPl8te-7c2QfkY4VMCBpLP2tU1DLbViSSyh8,119538
|
|
24
24
|
adaptive_sdk/graphql_client/base_client_open_telemetry.py,sha256=IV96gRr5FuH-dWMU5PBvQhTwEsV7udsXU-Dyh-Mx-4k,9398
|
|
25
25
|
adaptive_sdk/graphql_client/base_model.py,sha256=2xxKA4sIdlGPIezw06XP9bseSxBURU7nivgt_gL44iA,621
|
|
26
26
|
adaptive_sdk/graphql_client/base_operation.py,sha256=aooq1M4r79cvMoganZ2RvQ3-v0io22fGLOVfl3UBqPQ,4635
|
|
27
27
|
adaptive_sdk/graphql_client/cancel_ab_campaign.py,sha256=JAEpmedz0jOu90U3KR0PYCMAhf2_E6h6WOT30HSE91k,176
|
|
28
28
|
adaptive_sdk/graphql_client/cancel_job.py,sha256=3ZLoqrULi1mp5A5i4rD-gXliKhD8038IPfvCNBg0dPs,291
|
|
29
|
-
adaptive_sdk/graphql_client/client.py,sha256
|
|
29
|
+
adaptive_sdk/graphql_client/client.py,sha256=-eJ4ok5_N4CkNGVXtlntvGzXzGNAVQC2x65DqPgF90E,118581
|
|
30
30
|
adaptive_sdk/graphql_client/create_ab_campaign.py,sha256=___2iFSGnWKkjXz_MfxKUxi1EoQcSBv9AX8S7YoUeVw,374
|
|
31
31
|
adaptive_sdk/graphql_client/create_custom_recipe.py,sha256=eIVLDHbdFG2qWjoZBLC3Xs40Fjy6L-u6wrABV5ibUxo,382
|
|
32
32
|
adaptive_sdk/graphql_client/create_dataset_from_multipart_upload.py,sha256=eoqOfEviCFww5ElTknknV0qOpFTJQmQ1BeGLsRQ5iHc,730
|
|
@@ -39,8 +39,8 @@ adaptive_sdk/graphql_client/create_role.py,sha256=6aTdNOZxavMyjkH-g01uYOZgpjYWcA
|
|
|
39
39
|
adaptive_sdk/graphql_client/create_team.py,sha256=6Alt1ralE1-Xvp2wrEaLUHMW5RtiFqz2fIsUYE_2LbM,370
|
|
40
40
|
adaptive_sdk/graphql_client/create_use_case.py,sha256=sekD76jWCo3zNCfMsBGhVYfNSIK4JPPBz9066BOt49g,332
|
|
41
41
|
adaptive_sdk/graphql_client/create_user.py,sha256=gurD0kZgncXt1HBr7Oo5AkK5ubqFKpJvaR1rn506gHo,301
|
|
42
|
-
adaptive_sdk/graphql_client/custom_fields.py,sha256=
|
|
43
|
-
adaptive_sdk/graphql_client/custom_mutations.py,sha256=
|
|
42
|
+
adaptive_sdk/graphql_client/custom_fields.py,sha256=lRdkGYIa2gsbAf4_akOSfqHOFgwpIx67GeIcDYnWHYY,100895
|
|
43
|
+
adaptive_sdk/graphql_client/custom_mutations.py,sha256=cKtpF9qmuwj8QqSesUtW58tW5TmDqnLFpjOjRMxPb3U,26335
|
|
44
44
|
adaptive_sdk/graphql_client/custom_queries.py,sha256=dVMj82YFJg9wjcnZdmnCbEKEaLiL8QUgpCjr5ONuQAU,18242
|
|
45
45
|
adaptive_sdk/graphql_client/custom_typing_fields.py,sha256=yT_a4kZOMIccqIm-sAG0zu70c-plQtQrZ6ynmfxjlDU,20786
|
|
46
46
|
adaptive_sdk/graphql_client/dataset_upload_processing_status.py,sha256=Xwj9bxmRf0RVxMT5kf30yX0vQaCYEuTI5-alCiqedqI,705
|
|
@@ -49,7 +49,7 @@ adaptive_sdk/graphql_client/delete_dataset.py,sha256=k5enUd6zO89RmA349eVzYajtZig
|
|
|
49
49
|
adaptive_sdk/graphql_client/delete_grader.py,sha256=U9r26BtvOaThzyf0VGouvkuEaJ1wJGPGjbHluSDWBsc,350
|
|
50
50
|
adaptive_sdk/graphql_client/delete_judge.py,sha256=rdiA12Q3Q0JEZTOBjTK3Tz33f2aurj5G9vqSoJUjw3I,342
|
|
51
51
|
adaptive_sdk/graphql_client/delete_user.py,sha256=zMlvEaxGQAfDn-vMc0LWuhgzv1DJb69DuFwJcKGdF5o,301
|
|
52
|
-
adaptive_sdk/graphql_client/deploy_model.py,sha256=
|
|
52
|
+
adaptive_sdk/graphql_client/deploy_model.py,sha256=UI6__BkSWUjQWusPRcukl-6te0dv2CFKt73QqfC3mz8,325
|
|
53
53
|
adaptive_sdk/graphql_client/describe_ab_campaign.py,sha256=1-z2U5aPHx3EBk0JawW1MO0n1hUpuFNk_oEgzn_6hhQ,594
|
|
54
54
|
adaptive_sdk/graphql_client/describe_dataset.py,sha256=3E4vhnIsTXAjBz51TpfNhw8ldItoePumGn5hw5B3EkM,300
|
|
55
55
|
adaptive_sdk/graphql_client/describe_interaction.py,sha256=1gJ2b7ssrrcOujEKzzj7oH18h--V3DSk8HL84hlwuUc,331
|
|
@@ -59,13 +59,13 @@ adaptive_sdk/graphql_client/describe_metric_admin.py,sha256=_SKKwnFhZnbOTT97elEr
|
|
|
59
59
|
adaptive_sdk/graphql_client/describe_model.py,sha256=UnsOnAyBjNsnkJaS4q5uwkSSvInHwRqUj3XqAoO0yO4,434
|
|
60
60
|
adaptive_sdk/graphql_client/describe_model_admin.py,sha256=XUt_CBSMw1HRleUEWZn2snYt2BNSux_siqrVlwtqH-w,484
|
|
61
61
|
adaptive_sdk/graphql_client/describe_use_case.py,sha256=WW0QkTmdfggN8YBUNGi8ShrP_fr2jXPR6Fer6jlQxu0,353
|
|
62
|
-
adaptive_sdk/graphql_client/enums.py,sha256=
|
|
62
|
+
adaptive_sdk/graphql_client/enums.py,sha256=tIHf3CU79ldu_UOq9hu0P1bLCvFmtHyafUnZvISXdFo,4902
|
|
63
63
|
adaptive_sdk/graphql_client/exceptions.py,sha256=NiC6v-5S7aRDlvQTcHH3K5KvxWvk-c-PkIQQHkipTB8,2268
|
|
64
|
-
adaptive_sdk/graphql_client/fragments.py,sha256=
|
|
64
|
+
adaptive_sdk/graphql_client/fragments.py,sha256=mZgJhxudB3x4u7vs5RnCp3rycUhEuP09-A91N-xClbk,22515
|
|
65
65
|
adaptive_sdk/graphql_client/get_custom_recipe.py,sha256=7qxBZGQTqpc69k-NwzgFctaHWaRz0tHl7YlVSsEad6U,383
|
|
66
66
|
adaptive_sdk/graphql_client/get_grader.py,sha256=kubHDBtUcq6mZtUR5_Of0QbjnGUPSYuavF3_xwmwbY8,233
|
|
67
67
|
adaptive_sdk/graphql_client/get_judge.py,sha256=urEnHW3XfURi5GAFBPfbqzOZGQDxgsGRA6nZmUKmoMA,224
|
|
68
|
-
adaptive_sdk/graphql_client/input_types.py,sha256=
|
|
68
|
+
adaptive_sdk/graphql_client/input_types.py,sha256=6bFD-TnDFWFzaEVKNpRVTZKr782RRmmPCnW8SG8Kg-E,25302
|
|
69
69
|
adaptive_sdk/graphql_client/link_metric.py,sha256=EDH67ckBzzc6MYIGfsmgZRBnjqxLsCGwFUaFMXPEsBY,327
|
|
70
70
|
adaptive_sdk/graphql_client/list_ab_campaigns.py,sha256=SIbU6I2OQkNHt0Gw6YStoiiwJHUk2rfXnpoGLzrFjxc,379
|
|
71
71
|
adaptive_sdk/graphql_client/list_compute_pools.py,sha256=4Qli5Foxm3jztbUAL5gbwqtcrElwwlC4LGJMOMBI6Cc,782
|
|
@@ -89,6 +89,7 @@ adaptive_sdk/graphql_client/list_users.py,sha256=9LCNz49jqxrUapHyOdnzs-ZtU1xsejq
|
|
|
89
89
|
adaptive_sdk/graphql_client/load_dataset.py,sha256=P_h3wPTT4E5mbgJoR5jX1_5GhDLIJsA6lmYPT27VxY8,323
|
|
90
90
|
adaptive_sdk/graphql_client/lock_grader.py,sha256=cCISIKjFaSakf-tr4oEWebieT5yrpvGxdS_R_E4iww8,305
|
|
91
91
|
adaptive_sdk/graphql_client/me.py,sha256=oR_m5QRalokWb9lZYSNgDzOBcNLuAgBIXZh7zfBerqw,227
|
|
92
|
+
adaptive_sdk/graphql_client/remove_model_from_use_case.py,sha256=izNvNE0JDc54r9Nsqk5sWa1ez3rEywZ7XkUs822_h4k,197
|
|
92
93
|
adaptive_sdk/graphql_client/remove_remote_env.py,sha256=lmEwudWc6bkU1ev2g9_QEOgkJT_n7n9P5wGm5AV2MY8,173
|
|
93
94
|
adaptive_sdk/graphql_client/remove_team_member.py,sha256=H75WLcB4bM0yctfmmzztxSAzxvbOwWmkVZhO5DUuh1g,350
|
|
94
95
|
adaptive_sdk/graphql_client/resize_inference_partition.py,sha256=osrAGQGYCFjAyfkftkxYW4Wv2VzMq_oW8i0x3_B8aKg,200
|
|
@@ -103,30 +104,31 @@ adaptive_sdk/graphql_client/update_model.py,sha256=w4-6gHg5k28k9SQwZ9qQZKbs3LQhn
|
|
|
103
104
|
adaptive_sdk/graphql_client/update_model_compute_config.py,sha256=0RtrzzfuMrt-i6lABANRhem8jcDNkXORBEDFdSVpxfs,417
|
|
104
105
|
adaptive_sdk/graphql_client/update_user.py,sha256=E6vtj177-Hx1uUUo8MiSrynULXrnxtJma3Kq69FPC6M,897
|
|
105
106
|
adaptive_sdk/input_types/__init__.py,sha256=SVy4Ks4pm1M32LZ_yHN2itlzNW0g2Kg3b3_ibNLI-7s,806
|
|
106
|
-
adaptive_sdk/input_types/typed_dicts.py,sha256=
|
|
107
|
+
adaptive_sdk/input_types/typed_dicts.py,sha256=3g5raKSGUOcdc9DBAACj604kiyydNZC9YUeCS-K1t_Q,4911
|
|
107
108
|
adaptive_sdk/output_types/__init__.py,sha256=gIfALHcp-hOt8J7bFO7KAYbJc0JXtp7d_vVBqD0zPLE,136
|
|
108
109
|
adaptive_sdk/output_types/job_wrapper.py,sha256=dXdS33TKLHuTRAVq5LFvA7bsUidsWuhE-NDbq5k59g0,4247
|
|
109
|
-
adaptive_sdk/resources/__init__.py,sha256
|
|
110
|
+
adaptive_sdk/resources/__init__.py,sha256=rl49jl2fu5KSSye2LMSm9GdV-89iyKI-UaNX6PmahHY,1414
|
|
110
111
|
adaptive_sdk/resources/abtests.py,sha256=9PCPjxuWwY9ec88ewnq54gkoELq5U5iaBmHhzLCAsFU,7698
|
|
112
|
+
adaptive_sdk/resources/artifacts.py,sha256=2A9FZYkFb8nKQwgVVewQC6InOtSg86oRDk-W9v5-41M,1947
|
|
111
113
|
adaptive_sdk/resources/base_resource.py,sha256=D9adWSFxiDB7chVstDuBu1jcuXkE71UQJexnWENpC4A,1497
|
|
112
|
-
adaptive_sdk/resources/chat.py,sha256=
|
|
114
|
+
adaptive_sdk/resources/chat.py,sha256=5Kwcsdd6tDSdp28Yv7OxsYyB4D4K4NS5iFHdfn2FbgA,12164
|
|
113
115
|
adaptive_sdk/resources/compute_pools.py,sha256=_c_eDkXyqKm53ZcfL91EtcuitYqd7ds_3Uz4PUY-TLw,3441
|
|
114
116
|
adaptive_sdk/resources/datasets.py,sha256=sgGP2BwhaezaGei8xXoH0aKHyZFc64ZvIllxFUKNTd8,13648
|
|
115
117
|
adaptive_sdk/resources/embeddings.py,sha256=-ov_EChHU6PJJOJRtDlCo4sYyr9hwyvRjnBhub8QNFg,3922
|
|
116
118
|
adaptive_sdk/resources/feedback.py,sha256=lujqwFIhxi6iovL8JWL05Kr-gkzR4QEwUXZbTx33raA,14116
|
|
117
119
|
adaptive_sdk/resources/graders.py,sha256=ekQQ5fqmLZpZHeLr6iUm6m45wDevoDJdj3mG-axR-m8,29014
|
|
118
|
-
adaptive_sdk/resources/interactions.py,sha256=
|
|
120
|
+
adaptive_sdk/resources/interactions.py,sha256=MJEhNHjgyNFZWqW0b6VHeQMETw26GqqdMBGDEWLSdiY,11846
|
|
119
121
|
adaptive_sdk/resources/jobs.py,sha256=3ueoHp_58ELIAWfRfiAp51Jt7hazMmbfngaep2wnPn8,5398
|
|
120
|
-
adaptive_sdk/resources/models.py,sha256=
|
|
122
|
+
adaptive_sdk/resources/models.py,sha256=tPjb3cNIB66unjctGOXTfmLaH97Xwb8NI5T8qowezyY,17567
|
|
121
123
|
adaptive_sdk/resources/permissions.py,sha256=ckO-oacWkvgcwXBK5iW_8qaK-g0SHXpEEy1qZy5lrB0,1053
|
|
122
|
-
adaptive_sdk/resources/recipes.py,sha256=
|
|
124
|
+
adaptive_sdk/resources/recipes.py,sha256=RdxpI6YT2GoitA_UfLp-jvQSlmF1dMSg4slDgzgB-Pk,16115
|
|
123
125
|
adaptive_sdk/resources/roles.py,sha256=fD1F5Gd3ddkATsU2aFj7japTJVZngVgqt6TXskBQEOA,2218
|
|
124
126
|
adaptive_sdk/resources/teams.py,sha256=KItuOfqKKyFSRFoEAF7rAXb8nbIqRkwRxjEbACjeEoY,1476
|
|
125
127
|
adaptive_sdk/resources/use_cases.py,sha256=a_Ls3kbYgByJMlOYM_UGi4MTiW4wU7m057budrgfEE0,9014
|
|
126
128
|
adaptive_sdk/resources/users.py,sha256=SoGWwdDCdhK4KjYOcAws-ZWlW7Edii7D3Vxfdu-NZY4,4406
|
|
127
129
|
adaptive_sdk/rest/__init__.py,sha256=P9uhkOoc9cgUkJ5MBoV5soPgQWSkvPrTwHzPGX7i5tY,610
|
|
128
130
|
adaptive_sdk/rest/base_model.py,sha256=P06TNhnqXa6JEje_B_94vAa5zqPYIVxMZAp6aZ4d80U,516
|
|
129
|
-
adaptive_sdk/rest/rest_types.py,sha256=
|
|
130
|
-
adaptive_sdk-0.1.
|
|
131
|
-
adaptive_sdk-0.1.
|
|
132
|
-
adaptive_sdk-0.1.
|
|
131
|
+
adaptive_sdk/rest/rest_types.py,sha256=D7uH0iLZP8rDwUblVQiFb04208Bf5oPbqFiaq1YAnD8,8892
|
|
132
|
+
adaptive_sdk-0.1.14.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
133
|
+
adaptive_sdk-0.1.14.dist-info/METADATA,sha256=OE7kVUoFtTEn-EVtDKYNG27G5U-ECpJMaPDWK0TtRXU,1600
|
|
134
|
+
adaptive_sdk-0.1.14.dist-info/RECORD,,
|