anyscale 0.26.20__py3-none-any.whl → 0.26.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- anyscale/_private/anyscale_client/anyscale_client.py +103 -43
- anyscale/_private/anyscale_client/common.py +37 -7
- anyscale/_private/anyscale_client/fake_anyscale_client.py +98 -27
- anyscale/_private/models/model_base.py +95 -0
- anyscale/_private/workload/workload_sdk.py +3 -1
- anyscale/aggregated_instance_usage/models.py +4 -4
- anyscale/client/README.md +1 -9
- anyscale/client/openapi_client/__init__.py +0 -3
- anyscale/client/openapi_client/api/default_api.py +151 -715
- anyscale/client/openapi_client/models/__init__.py +0 -3
- anyscale/commands/cloud_commands.py +15 -4
- anyscale/commands/command_examples.py +4 -0
- anyscale/commands/list_util.py +107 -0
- anyscale/commands/service_commands.py +267 -31
- anyscale/commands/util.py +5 -4
- anyscale/controllers/cloud_controller.py +358 -49
- anyscale/controllers/service_controller.py +7 -86
- anyscale/service/__init__.py +53 -3
- anyscale/service/_private/service_sdk.py +177 -41
- anyscale/service/commands.py +78 -1
- anyscale/service/models.py +65 -0
- anyscale/util.py +35 -1
- anyscale/utils/gcp_utils.py +20 -4
- anyscale/version.py +1 -1
- {anyscale-0.26.20.dist-info → anyscale-0.26.22.dist-info}/METADATA +1 -1
- {anyscale-0.26.20.dist-info → anyscale-0.26.22.dist-info}/RECORD +31 -33
- anyscale/client/openapi_client/models/organization_public_identifier.py +0 -121
- anyscale/client/openapi_client/models/organization_response.py +0 -121
- anyscale/client/openapi_client/models/organizationpublicidentifier_response.py +0 -121
- {anyscale-0.26.20.dist-info → anyscale-0.26.22.dist-info}/LICENSE +0 -0
- {anyscale-0.26.20.dist-info → anyscale-0.26.22.dist-info}/NOTICE +0 -0
- {anyscale-0.26.20.dist-info → anyscale-0.26.22.dist-info}/WHEEL +0 -0
- {anyscale-0.26.20.dist-info → anyscale-0.26.22.dist-info}/entry_points.txt +0 -0
- {anyscale-0.26.20.dist-info → anyscale-0.26.22.dist-info}/top_level.txt +0 -0
@@ -60,6 +60,8 @@ from anyscale.client.openapi_client.models import (
|
|
60
60
|
Dataset as InternalDataset,
|
61
61
|
DatasetUpload,
|
62
62
|
DecoratedComputeTemplate,
|
63
|
+
DecoratedlistserviceapimodelListResponse,
|
64
|
+
DecoratedProductionServiceV2APIModel,
|
63
65
|
DecoratedSession,
|
64
66
|
DeletedPlatformFineTunedModel,
|
65
67
|
ExperimentalWorkspace,
|
@@ -85,7 +87,7 @@ from anyscale.cluster_compute import parse_cluster_compute_name_version
|
|
85
87
|
from anyscale.feature_flags import FLAG_DEFAULT_WORKING_DIR_FOR_PROJ
|
86
88
|
from anyscale.sdk.anyscale_client.api.default_api import DefaultApi as ExternalApi
|
87
89
|
from anyscale.sdk.anyscale_client.models import (
|
88
|
-
|
90
|
+
ApplyProductionServiceV2Model,
|
89
91
|
Cluster,
|
90
92
|
ClusterCompute,
|
91
93
|
ClusterComputeConfig,
|
@@ -102,7 +104,6 @@ from anyscale.sdk.anyscale_client.models import (
|
|
102
104
|
ProductionServiceV2VersionModel,
|
103
105
|
Project,
|
104
106
|
RollbackServiceModel,
|
105
|
-
ServiceModel,
|
106
107
|
TextQuery,
|
107
108
|
)
|
108
109
|
from anyscale.sdk.anyscale_client.models.jobs_query import JobsQuery
|
@@ -250,6 +251,7 @@ class AnyscaleClient(AnyscaleClientInterface):
|
|
250
251
|
# Cached IDs and models to avoid duplicate lookups.
|
251
252
|
self._default_project_id_from_cloud_id: Dict[Optional[str], str] = {}
|
252
253
|
self._cloud_id_cache: Dict[Optional[str], str] = {}
|
254
|
+
self._cluster_env_build_cache: Dict[str, ClusterEnvironmentBuild] = {}
|
253
255
|
self._current_workspace_cluster: Optional[Cluster] = None
|
254
256
|
self._logger = logger or BlockLogger()
|
255
257
|
self._host = host or ANYSCALE_HOST
|
@@ -715,16 +717,41 @@ class AnyscaleClient(AnyscaleClientInterface):
|
|
715
717
|
|
716
718
|
@handle_api_exceptions
|
717
719
|
def get_cluster_env_build(self, build_id: str) -> Optional[ClusterEnvironmentBuild]:
|
718
|
-
|
720
|
+
# Check cache first
|
721
|
+
if build_id in self._cluster_env_build_cache:
|
722
|
+
return self._cluster_env_build_cache[build_id]
|
723
|
+
|
724
|
+
# Fetch from API if not in cache
|
725
|
+
try:
|
726
|
+
res = self._external_api_client.get_cluster_environment_build(build_id)
|
727
|
+
build = res.result
|
728
|
+
except ExternalApiException as e:
|
729
|
+
if e.status == 404:
|
730
|
+
return None
|
731
|
+
|
732
|
+
raise e from None
|
733
|
+
|
734
|
+
# Store in cache ONLY if the build exists and is in a terminal state
|
735
|
+
if build:
|
736
|
+
terminal_states = {
|
737
|
+
ClusterEnvironmentBuildStatus.SUCCEEDED,
|
738
|
+
ClusterEnvironmentBuildStatus.FAILED,
|
739
|
+
ClusterEnvironmentBuildStatus.CANCELED,
|
740
|
+
}
|
741
|
+
if build.status in terminal_states:
|
742
|
+
self._cluster_env_build_cache[build_id] = build
|
743
|
+
|
744
|
+
return build
|
719
745
|
|
720
746
|
@handle_api_exceptions
|
721
747
|
def get_cluster_env_build_image_uri(
|
722
748
|
self, cluster_env_build_id: str, use_image_alias: bool = False
|
723
749
|
) -> Optional[ImageURI]:
|
724
750
|
try:
|
725
|
-
build
|
726
|
-
|
727
|
-
|
751
|
+
build = self.get_cluster_env_build(cluster_env_build_id)
|
752
|
+
if build is None:
|
753
|
+
return None
|
754
|
+
|
728
755
|
cluster_env = self._external_api_client.get_cluster_environment(
|
729
756
|
build.cluster_environment_id
|
730
757
|
).result
|
@@ -786,9 +813,7 @@ class AnyscaleClient(AnyscaleClientInterface):
|
|
786
813
|
"""
|
787
814
|
elapsed_secs = 0
|
788
815
|
while elapsed_secs < timeout_secs:
|
789
|
-
build = self.
|
790
|
-
build_id
|
791
|
-
).result
|
816
|
+
build = self.get_cluster_env_build(build_id)
|
792
817
|
if build.status == ClusterEnvironmentBuildStatus.SUCCEEDED:
|
793
818
|
self.logger.info("")
|
794
819
|
return
|
@@ -1001,35 +1026,60 @@ class AnyscaleClient(AnyscaleClientInterface):
|
|
1001
1026
|
cloud: Optional[str],
|
1002
1027
|
project: Optional[str],
|
1003
1028
|
include_archived: bool = False,
|
1004
|
-
) -> Optional[
|
1005
|
-
#
|
1006
|
-
|
1007
|
-
|
1008
|
-
cloud_id = self.get_cloud_id(cloud_name=cloud)
|
1009
|
-
project_id = self.get_project_id(parent_cloud_id=cloud_id, name=project)
|
1010
|
-
archive_status = (
|
1011
|
-
ArchiveStatus.ALL if include_archived else ArchiveStatus.NOT_ARCHIVED
|
1029
|
+
) -> Optional[DecoratedProductionServiceV2APIModel]:
|
1030
|
+
# we don't have an api to get a service by name, so we need to list services and filter by name
|
1031
|
+
resp = self.list_services(
|
1032
|
+
name=name, cloud=cloud, project=project, include_archived=include_archived
|
1012
1033
|
)
|
1034
|
+
for result in resp.results:
|
1035
|
+
if result.name == name:
|
1036
|
+
return result
|
1037
|
+
return None
|
1013
1038
|
|
1014
|
-
|
1015
|
-
|
1016
|
-
|
1017
|
-
|
1018
|
-
|
1019
|
-
|
1020
|
-
|
1021
|
-
archive_status=archive_status,
|
1022
|
-
)
|
1023
|
-
for result in resp.results:
|
1024
|
-
if result.name == name:
|
1025
|
-
service = result
|
1026
|
-
break
|
1039
|
+
@handle_api_exceptions
|
1040
|
+
def get_service_by_id(
|
1041
|
+
self, service_id: str
|
1042
|
+
) -> Optional[DecoratedProductionServiceV2APIModel]:
|
1043
|
+
return self._internal_api_client.get_service_api_v2_services_v2_service_id_get(
|
1044
|
+
service_id
|
1045
|
+
).result
|
1027
1046
|
|
1028
|
-
|
1029
|
-
|
1030
|
-
|
1047
|
+
@handle_api_exceptions
|
1048
|
+
def list_services(
|
1049
|
+
self,
|
1050
|
+
*,
|
1051
|
+
name: Optional[str] = None,
|
1052
|
+
state_filter: Optional[List[str]] = None,
|
1053
|
+
creator_id: Optional[str] = None,
|
1054
|
+
cloud: Optional[str] = None,
|
1055
|
+
project: Optional[str] = None,
|
1056
|
+
include_archived: bool = False,
|
1057
|
+
count: Optional[int] = None,
|
1058
|
+
paging_token: Optional[str] = None,
|
1059
|
+
sort_field: Optional[str] = None,
|
1060
|
+
sort_order: Optional[str] = None,
|
1061
|
+
) -> DecoratedlistserviceapimodelListResponse:
|
1062
|
+
cloud_id = self.get_cloud_id(cloud_name=cloud) if cloud else None
|
1063
|
+
project_id = (
|
1064
|
+
self.get_project_id(parent_cloud_id=cloud_id, name=project)
|
1065
|
+
if project
|
1066
|
+
else None
|
1067
|
+
)
|
1031
1068
|
|
1032
|
-
return
|
1069
|
+
return self._internal_api_client.list_services_api_v2_services_v2_get(
|
1070
|
+
project_id=project_id,
|
1071
|
+
cloud_id=cloud_id,
|
1072
|
+
name=name,
|
1073
|
+
state_filter=state_filter,
|
1074
|
+
creator_id=creator_id,
|
1075
|
+
archive_status=ArchiveStatus.ALL
|
1076
|
+
if include_archived
|
1077
|
+
else ArchiveStatus.NOT_ARCHIVED,
|
1078
|
+
count=count if count else self.LIST_ENDPOINT_COUNT,
|
1079
|
+
paging_token=paging_token,
|
1080
|
+
sort_field=sort_field,
|
1081
|
+
sort_order=sort_order,
|
1082
|
+
)
|
1033
1083
|
|
1034
1084
|
@handle_api_exceptions
|
1035
1085
|
def get_project(self, project_id: str) -> Optional[Project]:
|
@@ -1105,15 +1155,19 @@ class AnyscaleClient(AnyscaleClientInterface):
|
|
1105
1155
|
return job_runs
|
1106
1156
|
|
1107
1157
|
@handle_api_exceptions
|
1108
|
-
def rollout_service(
|
1109
|
-
|
1158
|
+
def rollout_service(
|
1159
|
+
self, model: ApplyProductionServiceV2Model
|
1160
|
+
) -> DecoratedProductionServiceV2APIModel:
|
1161
|
+
result = self._internal_api_client.apply_service_api_v2_services_v2_apply_put(
|
1162
|
+
model
|
1163
|
+
).result
|
1110
1164
|
return result
|
1111
1165
|
|
1112
1166
|
@handle_api_exceptions
|
1113
1167
|
def rollback_service(
|
1114
1168
|
self, service_id: str, *, max_surge_percent: Optional[int] = None
|
1115
|
-
) ->
|
1116
|
-
result
|
1169
|
+
) -> DecoratedProductionServiceV2APIModel:
|
1170
|
+
result = self._internal_api_client.rollback_service_api_v2_services_v2_service_id_rollback_post(
|
1117
1171
|
service_id,
|
1118
1172
|
rollback_service_model=RollbackServiceModel(
|
1119
1173
|
max_surge_percent=max_surge_percent
|
@@ -1122,17 +1176,23 @@ class AnyscaleClient(AnyscaleClientInterface):
|
|
1122
1176
|
return result
|
1123
1177
|
|
1124
1178
|
@handle_api_exceptions
|
1125
|
-
def terminate_service(
|
1126
|
-
|
1179
|
+
def terminate_service(
|
1180
|
+
self, service_id: str
|
1181
|
+
) -> DecoratedProductionServiceV2APIModel:
|
1182
|
+
result = self._internal_api_client.terminate_service_api_v2_services_v2_service_id_terminate_post(
|
1183
|
+
service_id
|
1184
|
+
)
|
1127
1185
|
return result
|
1128
1186
|
|
1129
1187
|
@handle_api_exceptions
|
1130
|
-
def archive_service(self, service_id: str) ->
|
1131
|
-
result
|
1188
|
+
def archive_service(self, service_id: str) -> DecoratedProductionServiceV2APIModel:
|
1189
|
+
result = self._internal_api_client.archive_service_api_v2_services_v2_service_id_archive_post(
|
1190
|
+
service_id
|
1191
|
+
)
|
1132
1192
|
return result
|
1133
1193
|
|
1134
1194
|
@handle_api_exceptions
|
1135
|
-
def delete_service(self, service_id: str):
|
1195
|
+
def delete_service(self, service_id: str) -> None:
|
1136
1196
|
self._internal_api_client.delete_service_api_v2_services_v2_service_id_delete(
|
1137
1197
|
service_id
|
1138
1198
|
)
|
@@ -15,6 +15,8 @@ from anyscale.client.openapi_client.models import (
|
|
15
15
|
CreateResourceQuota,
|
16
16
|
CreateUserProjectCollaborator,
|
17
17
|
DecoratedComputeTemplate,
|
18
|
+
DecoratedlistserviceapimodelListResponse,
|
19
|
+
DecoratedProductionServiceV2APIModel,
|
18
20
|
DeletedPlatformFineTunedModel,
|
19
21
|
FineTunedModel,
|
20
22
|
InternalProductionJob,
|
@@ -31,13 +33,12 @@ from anyscale.client.openapi_client.models.decorated_session import DecoratedSes
|
|
31
33
|
from anyscale.client.openapi_client.models.production_job import ProductionJob
|
32
34
|
from anyscale.client.openapi_client.models.session_ssh_key import SessionSshKey
|
33
35
|
from anyscale.sdk.anyscale_client.models import (
|
34
|
-
|
36
|
+
ApplyProductionServiceV2Model,
|
35
37
|
Cluster,
|
36
38
|
ClusterCompute,
|
37
39
|
ClusterEnvironment,
|
38
40
|
Job as APIJobRun,
|
39
41
|
ProductionServiceV2VersionModel,
|
40
|
-
ServiceModel,
|
41
42
|
)
|
42
43
|
from anyscale.sdk.anyscale_client.models.cluster_environment_build import (
|
43
44
|
ClusterEnvironmentBuild,
|
@@ -303,13 +304,38 @@ class AnyscaleClientInterface(ABC):
|
|
303
304
|
cloud: Optional[str],
|
304
305
|
project: Optional[str],
|
305
306
|
include_archived=False,
|
306
|
-
) -> Optional[
|
307
|
+
) -> Optional[DecoratedProductionServiceV2APIModel]:
|
307
308
|
"""Get a service by name. Filter by cloud and project.
|
308
309
|
|
309
310
|
Returns None if not found.
|
310
311
|
"""
|
311
312
|
raise NotImplementedError
|
312
313
|
|
314
|
+
@abstractmethod
|
315
|
+
def get_service_by_id(
|
316
|
+
self, service_id: str
|
317
|
+
) -> Optional[DecoratedProductionServiceV2APIModel]:
|
318
|
+
"""Get a service by id."""
|
319
|
+
raise NotImplementedError
|
320
|
+
|
321
|
+
@abstractmethod
|
322
|
+
def list_services(
|
323
|
+
self,
|
324
|
+
*,
|
325
|
+
name: Optional[str],
|
326
|
+
state_filter: Optional[List[str]],
|
327
|
+
creator_id: Optional[str],
|
328
|
+
cloud: Optional[str],
|
329
|
+
project: Optional[str],
|
330
|
+
include_archived: bool,
|
331
|
+
count: Optional[int],
|
332
|
+
paging_token: Optional[str],
|
333
|
+
sort_field: Optional[str],
|
334
|
+
sort_order: Optional[str],
|
335
|
+
) -> DecoratedlistserviceapimodelListResponse:
|
336
|
+
"""List services."""
|
337
|
+
raise NotImplementedError
|
338
|
+
|
313
339
|
@abstractmethod
|
314
340
|
def get_project(self, project_id: str) -> Optional[Project]:
|
315
341
|
"""Get a project by id.
|
@@ -349,7 +375,9 @@ class AnyscaleClientInterface(ABC):
|
|
349
375
|
raise NotImplementedError
|
350
376
|
|
351
377
|
@abstractmethod
|
352
|
-
def rollout_service(
|
378
|
+
def rollout_service(
|
379
|
+
self, model: ApplyProductionServiceV2Model
|
380
|
+
) -> DecoratedProductionServiceV2APIModel:
|
353
381
|
"""Deploy or update the service to use the provided config.
|
354
382
|
|
355
383
|
Returns the service ID.
|
@@ -367,17 +395,19 @@ class AnyscaleClientInterface(ABC):
|
|
367
395
|
raise NotImplementedError
|
368
396
|
|
369
397
|
@abstractmethod
|
370
|
-
def terminate_service(
|
398
|
+
def terminate_service(
|
399
|
+
self, service_id: str
|
400
|
+
) -> DecoratedProductionServiceV2APIModel:
|
371
401
|
"""Mark the service to be terminated asynchronously."""
|
372
402
|
raise NotImplementedError
|
373
403
|
|
374
404
|
@abstractmethod
|
375
|
-
def archive_service(self, service_id: str):
|
405
|
+
def archive_service(self, service_id: str) -> DecoratedProductionServiceV2APIModel:
|
376
406
|
"""Mark the service to be archived asynchronously."""
|
377
407
|
raise NotImplementedError
|
378
408
|
|
379
409
|
@abstractmethod
|
380
|
-
def delete_service(self, service_id: str):
|
410
|
+
def delete_service(self, service_id: str) -> None:
|
381
411
|
"""Mark the service to be deleted asynchronously."""
|
382
412
|
raise NotImplementedError
|
383
413
|
|
@@ -26,6 +26,8 @@ from anyscale.client.openapi_client.models import (
|
|
26
26
|
CreateResourceQuota,
|
27
27
|
CreateUserProjectCollaborator,
|
28
28
|
DecoratedComputeTemplate,
|
29
|
+
DecoratedlistserviceapimodelListResponse,
|
30
|
+
DecoratedProductionServiceV2APIModel,
|
29
31
|
DeletedPlatformFineTunedModel,
|
30
32
|
ExperimentalWorkspace,
|
31
33
|
FineTunedModel,
|
@@ -33,6 +35,7 @@ from anyscale.client.openapi_client.models import (
|
|
33
35
|
HaJobGoalStates,
|
34
36
|
HaJobStates,
|
35
37
|
InternalProductionJob,
|
38
|
+
ListResponseMetadata,
|
36
39
|
MiniCloud,
|
37
40
|
MiniUser,
|
38
41
|
OrganizationCollaborator,
|
@@ -53,7 +56,7 @@ from anyscale.cluster_compute import parse_cluster_compute_name_version
|
|
53
56
|
from anyscale.llm.dataset._private.models import Dataset
|
54
57
|
from anyscale.sdk.anyscale_client.configuration import Configuration
|
55
58
|
from anyscale.sdk.anyscale_client.models import (
|
56
|
-
|
59
|
+
ApplyProductionServiceV2Model,
|
57
60
|
Cluster,
|
58
61
|
ClusterCompute,
|
59
62
|
ClusterComputeConfig,
|
@@ -63,7 +66,6 @@ from anyscale.sdk.anyscale_client.models import (
|
|
63
66
|
Job as APIJobRun,
|
64
67
|
ProductionServiceV2VersionModel,
|
65
68
|
ServiceEventCurrentState,
|
66
|
-
ServiceModel,
|
67
69
|
ServiceVersionState,
|
68
70
|
SessionState,
|
69
71
|
)
|
@@ -143,14 +145,14 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
|
|
143
145
|
self._archived_compute_configs: Dict[str, ClusterCompute] = {}
|
144
146
|
self._workspace_cluster: Optional[Cluster] = None
|
145
147
|
self._workspace_dependency_tracking_enabled: bool = False
|
146
|
-
self._services: Dict[str,
|
147
|
-
self._archived_services: Dict[str,
|
148
|
-
self._deleted_services: Dict[str,
|
148
|
+
self._services: Dict[str, DecoratedProductionServiceV2APIModel] = {}
|
149
|
+
self._archived_services: Dict[str, DecoratedProductionServiceV2APIModel] = {}
|
150
|
+
self._deleted_services: Dict[str, DecoratedProductionServiceV2APIModel] = {}
|
149
151
|
self._jobs: Dict[str, ProductionJob] = {}
|
150
152
|
self._job_runs: Dict[str, List[APIJobRun]] = defaultdict(list)
|
151
153
|
self._project_to_id: Dict[Optional[str] : Dict[Optional[str], str]] = {}
|
152
154
|
self._project_collaborators: Dict[str, List[CreateUserProjectCollaborator]] = {}
|
153
|
-
self._rolled_out_model: Optional[
|
155
|
+
self._rolled_out_model: Optional[ApplyProductionServiceV2Model] = None
|
154
156
|
self._sent_workspace_notifications: List[WorkspaceNotification] = []
|
155
157
|
self._rolled_back_service: Optional[Tuple[str, Optional[int]]] = None
|
156
158
|
self._terminated_service: Optional[str] = None
|
@@ -598,17 +600,17 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
|
|
598
600
|
cluster_env = self._images[build.cluster_environment_id]
|
599
601
|
return ImageURI.from_cluster_env_build(cluster_env, build)
|
600
602
|
|
601
|
-
def update_service(self, model:
|
603
|
+
def update_service(self, model: DecoratedProductionServiceV2APIModel):
|
602
604
|
self._services[model.id] = model
|
603
605
|
|
604
606
|
def get_service(
|
605
607
|
self,
|
606
608
|
name: str,
|
607
609
|
*,
|
608
|
-
cloud: Optional[str],
|
609
|
-
project: Optional[str],
|
610
|
-
include_archived=False,
|
611
|
-
) -> Optional[
|
610
|
+
cloud: Optional[str] = None,
|
611
|
+
project: Optional[str] = None,
|
612
|
+
include_archived: bool = False,
|
613
|
+
) -> Optional[DecoratedProductionServiceV2APIModel]:
|
612
614
|
cloud_id = self.get_cloud_id(cloud_name=cloud)
|
613
615
|
cloud_project_dict = self._project_to_id.get(cloud_id, None)
|
614
616
|
project_id = (
|
@@ -720,10 +722,12 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
|
|
720
722
|
return project_id
|
721
723
|
|
722
724
|
@property
|
723
|
-
def rolled_out_model(self) -> Optional[
|
725
|
+
def rolled_out_model(self) -> Optional[ApplyProductionServiceV2Model]:
|
724
726
|
return self._rolled_out_model
|
725
727
|
|
726
|
-
def rollout_service(
|
728
|
+
def rollout_service(
|
729
|
+
self, model: ApplyProductionServiceV2Model
|
730
|
+
) -> DecoratedProductionServiceV2APIModel:
|
727
731
|
self._rolled_out_model = model
|
728
732
|
# TODO(mowen): This feels convoluted, is there a better way to pull cloud name and project name from the model?
|
729
733
|
project_model = self.get_project(model.project_id)
|
@@ -738,18 +742,17 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
|
|
738
742
|
else:
|
739
743
|
service_id = f"service-id-{uuid.uuid4()!s}"
|
740
744
|
|
741
|
-
service =
|
745
|
+
service = DecoratedProductionServiceV2APIModel(
|
742
746
|
id=service_id,
|
743
747
|
name=model.name,
|
744
|
-
current_state=ServiceEventCurrentState.RUNNING,
|
745
|
-
cloud_id=cloud_id,
|
746
748
|
project_id=model.project_id,
|
747
|
-
|
748
|
-
|
749
|
-
|
750
|
-
|
749
|
+
cloud_id=self.get_cloud_id(compute_config_id=model.compute_config_id),
|
750
|
+
current_state=ServiceEventCurrentState.STARTING,
|
751
|
+
base_url=f"http://{model.name}.fake.url",
|
752
|
+
auth_token="fake-auth-token",
|
751
753
|
primary_version=ProductionServiceV2VersionModel(
|
752
754
|
id=str(uuid.uuid4()),
|
755
|
+
created_at=datetime.now(),
|
753
756
|
version="primary",
|
754
757
|
current_state=ServiceVersionState.RUNNING,
|
755
758
|
weight=100,
|
@@ -761,10 +764,13 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
|
|
761
764
|
),
|
762
765
|
local_vars_configuration=OPENAPI_NO_VALIDATION,
|
763
766
|
)
|
764
|
-
|
765
767
|
self.update_service(service)
|
766
768
|
return service
|
767
769
|
|
770
|
+
@property
|
771
|
+
def submitted_job(self) -> Optional[CreateInternalProductionJob]:
|
772
|
+
return self._submitted_job
|
773
|
+
|
768
774
|
@property
|
769
775
|
def rolled_back_service(self) -> Optional[Tuple[str, Optional[int]]]:
|
770
776
|
return self._rolled_back_service
|
@@ -790,23 +796,19 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
|
|
790
796
|
].primary_version.current_state = ServiceVersionState.TERMINATED
|
791
797
|
|
792
798
|
@property
|
793
|
-
def archived_services(self) -> Dict[str,
|
799
|
+
def archived_services(self) -> Dict[str, DecoratedProductionServiceV2APIModel]:
|
794
800
|
return self._archived_services
|
795
801
|
|
796
802
|
def archive_service(self, service_id: str):
|
797
803
|
self._archived_services[service_id] = self._services.pop(service_id)
|
798
804
|
|
799
805
|
@property
|
800
|
-
def deleted_services(self) -> Dict[str,
|
806
|
+
def deleted_services(self) -> Dict[str, DecoratedProductionServiceV2APIModel]:
|
801
807
|
return self._deleted_services
|
802
808
|
|
803
809
|
def delete_service(self, service_id: str):
|
804
810
|
self._deleted_services[service_id] = self._services.pop(service_id)
|
805
811
|
|
806
|
-
@property
|
807
|
-
def submitted_job(self) -> Optional[CreateInternalProductionJob]:
|
808
|
-
return self._submitted_job
|
809
|
-
|
810
812
|
def submit_job(self, model: CreateInternalProductionJob) -> InternalProductionJob:
|
811
813
|
self._submitted_job = model
|
812
814
|
|
@@ -1379,3 +1381,72 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
|
|
1379
1381
|
raise ValueError(f"Resource Quota with id '{resource_quota_id}' not found.")
|
1380
1382
|
|
1381
1383
|
resource_quota.is_enabled = is_enabled
|
1384
|
+
|
1385
|
+
def get_service_by_id(
|
1386
|
+
self, service_id: str
|
1387
|
+
) -> Optional[DecoratedProductionServiceV2APIModel]:
|
1388
|
+
if service_id in self._services:
|
1389
|
+
return self._services[service_id]
|
1390
|
+
if service_id in self._archived_services:
|
1391
|
+
return self._archived_services[service_id]
|
1392
|
+
if service_id in self._deleted_services:
|
1393
|
+
return self._deleted_services[service_id]
|
1394
|
+
return None
|
1395
|
+
|
1396
|
+
def list_services(
|
1397
|
+
self,
|
1398
|
+
*,
|
1399
|
+
name: Optional[str] = None,
|
1400
|
+
state_filter: Optional[List[str]] = None,
|
1401
|
+
creator_id: Optional[str] = None, # noqa: ARG002
|
1402
|
+
cloud: Optional[str] = None,
|
1403
|
+
project: Optional[str] = None,
|
1404
|
+
include_archived: bool = False,
|
1405
|
+
count: Optional[int] = None,
|
1406
|
+
paging_token: Optional[str] = None, # noqa: ARG002
|
1407
|
+
sort_field: Optional[str] = None, # noqa: ARG002
|
1408
|
+
sort_order: Optional[str] = None, # noqa: ARG002
|
1409
|
+
) -> DecoratedlistserviceapimodelListResponse:
|
1410
|
+
target_services = list(self._services.values())
|
1411
|
+
if include_archived:
|
1412
|
+
target_services.extend(list(self._archived_services.values()))
|
1413
|
+
|
1414
|
+
target_cloud_id = self.get_cloud_id(cloud_name=cloud) if cloud else None
|
1415
|
+
target_project_id = (
|
1416
|
+
self.get_project_id(parent_cloud_id=target_cloud_id, name=project)
|
1417
|
+
if project
|
1418
|
+
else None
|
1419
|
+
)
|
1420
|
+
|
1421
|
+
filtered_results = []
|
1422
|
+
for svc in target_services:
|
1423
|
+
if name is not None and svc.name != name:
|
1424
|
+
continue
|
1425
|
+
# Ensure state comparison works whether svc.current_state is Enum or str
|
1426
|
+
current_state_str = (
|
1427
|
+
svc.current_state.value
|
1428
|
+
if hasattr(svc.current_state, "value")
|
1429
|
+
else svc.current_state
|
1430
|
+
)
|
1431
|
+
if state_filter is not None and current_state_str not in state_filter:
|
1432
|
+
continue
|
1433
|
+
if target_project_id is not None and svc.project_id != target_project_id:
|
1434
|
+
continue
|
1435
|
+
if target_cloud_id is not None and svc.cloud_id != target_cloud_id:
|
1436
|
+
continue
|
1437
|
+
|
1438
|
+
filtered_results.append(svc)
|
1439
|
+
|
1440
|
+
if count is None:
|
1441
|
+
count = len(filtered_results)
|
1442
|
+
final_results = filtered_results[:count]
|
1443
|
+
|
1444
|
+
response = DecoratedlistserviceapimodelListResponse(
|
1445
|
+
results=final_results,
|
1446
|
+
metadata=ListResponseMetadata(
|
1447
|
+
total=len(final_results),
|
1448
|
+
next_paging_token=None, # Fake doesn't support paging
|
1449
|
+
),
|
1450
|
+
local_vars_configuration=OPENAPI_NO_VALIDATION,
|
1451
|
+
)
|
1452
|
+
return response
|
@@ -1,9 +1,13 @@
|
|
1
|
+
import asyncio
|
2
|
+
from collections import deque
|
1
3
|
from dataclasses import asdict, fields
|
2
4
|
from enum import Enum, EnumMeta
|
3
5
|
import inspect
|
4
6
|
from typing import (
|
5
7
|
Any,
|
8
|
+
Awaitable,
|
6
9
|
Callable,
|
10
|
+
Deque,
|
7
11
|
Dict,
|
8
12
|
Generic,
|
9
13
|
Iterable,
|
@@ -252,3 +256,94 @@ class ListResponse(List[TModelBase]):
|
|
252
256
|
return
|
253
257
|
if index >= len(self) and self.has_more:
|
254
258
|
self._fetch_next_page()
|
259
|
+
|
260
|
+
|
261
|
+
RT = TypeVar("RT")
|
262
|
+
|
263
|
+
|
264
|
+
class ResultIterator(Generic[RT]):
|
265
|
+
"""
|
266
|
+
Lazily fetch and parse pages from a paged-list API that returns
|
267
|
+
Pydantic models with `.results` and `.metadata.next_paging_token`.
|
268
|
+
"""
|
269
|
+
|
270
|
+
def __init__(
|
271
|
+
self,
|
272
|
+
*,
|
273
|
+
page_token: Optional[str],
|
274
|
+
max_items: Optional[int],
|
275
|
+
fetch_page: Callable[[Optional[str]], Any],
|
276
|
+
parse_fn: Optional[Callable[[Any], RT]] = None,
|
277
|
+
async_parse_fn: Optional[Callable[[Any], Awaitable[RT]]] = None,
|
278
|
+
):
|
279
|
+
if parse_fn and async_parse_fn:
|
280
|
+
raise ValueError("Only one of parse_fn or async_parse_fn may be provided")
|
281
|
+
|
282
|
+
self._token = page_token
|
283
|
+
self._max = max_items
|
284
|
+
self._fetch = fetch_page
|
285
|
+
self._parse = parse_fn
|
286
|
+
self._aparse = async_parse_fn
|
287
|
+
self._buffer: Deque[RT] = deque()
|
288
|
+
self._count = 0
|
289
|
+
self._finished = False
|
290
|
+
|
291
|
+
def __iter__(self) -> Iterator[RT]:
|
292
|
+
while True:
|
293
|
+
# 1) Drain the buffer
|
294
|
+
while self._buffer:
|
295
|
+
if self._max is not None and self._count >= self._max:
|
296
|
+
return
|
297
|
+
self._count += 1
|
298
|
+
yield self._buffer.popleft()
|
299
|
+
|
300
|
+
# 2) Done?
|
301
|
+
if self._finished or (self._max is not None and self._count >= self._max):
|
302
|
+
return
|
303
|
+
|
304
|
+
# 3) Fetch the next page (Pydantic model)
|
305
|
+
page = self._fetch(self._token)
|
306
|
+
raw_results = page.results
|
307
|
+
self._token = page.metadata.next_paging_token
|
308
|
+
|
309
|
+
# 4) No more data?
|
310
|
+
if not raw_results:
|
311
|
+
self._finished = True
|
312
|
+
return
|
313
|
+
|
314
|
+
# 5) Parse—sync or async
|
315
|
+
if self._aparse:
|
316
|
+
processed = asyncio.run(
|
317
|
+
ResultIterator._process_items_async(raw_results, self._aparse)
|
318
|
+
)
|
319
|
+
self._buffer.extend(processed)
|
320
|
+
|
321
|
+
elif self._parse:
|
322
|
+
try:
|
323
|
+
for raw in raw_results:
|
324
|
+
self._buffer.append(self._parse(raw))
|
325
|
+
except Exception as e: # noqa: BLE001
|
326
|
+
raise RuntimeError(f"sync parse error: {e}") from e
|
327
|
+
|
328
|
+
else:
|
329
|
+
# No parser: assume items are already RT
|
330
|
+
self._buffer.extend(raw_results) # type: ignore
|
331
|
+
|
332
|
+
# 6) If no next token, finish on next loop
|
333
|
+
if self._token is None:
|
334
|
+
self._finished = True
|
335
|
+
|
336
|
+
@staticmethod
|
337
|
+
async def _process_items_async(
|
338
|
+
items: List[Any], parser: Callable[[Any], Awaitable[RT]],
|
339
|
+
) -> List[RT]:
|
340
|
+
if not items:
|
341
|
+
return []
|
342
|
+
tasks = [parser(item) for item in items]
|
343
|
+
results = await asyncio.gather(*tasks, return_exceptions=True)
|
344
|
+
processed: List[RT] = []
|
345
|
+
for idx, res in enumerate(results):
|
346
|
+
if isinstance(res, Exception):
|
347
|
+
raise RuntimeError(f"async parse failed on item {idx}: {res}") from res
|
348
|
+
processed.append(res)
|
349
|
+
return processed
|
@@ -328,4 +328,6 @@ class WorkloadSDK(BaseSDK):
|
|
328
328
|
if not compute_config.anonymous:
|
329
329
|
return compute_config_name
|
330
330
|
|
331
|
-
return self._compute_config_sdk.
|
331
|
+
return self._compute_config_sdk._convert_api_model_to_compute_config_version( # noqa: SLF001
|
332
|
+
compute_config
|
333
|
+
).config
|