anyscale 0.26.50__py3-none-any.whl → 0.26.52__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. anyscale/_private/anyscale_client/README.md +1 -1
  2. anyscale/_private/anyscale_client/anyscale_client.py +178 -46
  3. anyscale/_private/anyscale_client/common.py +61 -2
  4. anyscale/_private/anyscale_client/fake_anyscale_client.py +145 -8
  5. anyscale/_private/docgen/__main__.py +34 -23
  6. anyscale/_private/docgen/generator.py +15 -18
  7. anyscale/_private/docgen/models.md +4 -2
  8. anyscale/_private/workload/workload_sdk.py +103 -8
  9. anyscale/client/README.md +5 -0
  10. anyscale/client/openapi_client/__init__.py +1 -0
  11. anyscale/client/openapi_client/api/default_api.py +538 -0
  12. anyscale/client/openapi_client/models/__init__.py +1 -0
  13. anyscale/client/openapi_client/models/baseimagesenum.py +83 -1
  14. anyscale/client/openapi_client/models/cloud_resource.py +59 -3
  15. anyscale/client/openapi_client/models/cloud_resource_gcp.py +59 -3
  16. anyscale/client/openapi_client/models/clouddeployment_response.py +121 -0
  17. anyscale/client/openapi_client/models/create_cloud_resource.py +59 -3
  18. anyscale/client/openapi_client/models/create_cloud_resource_gcp.py +59 -3
  19. anyscale/client/openapi_client/models/object_storage.py +2 -2
  20. anyscale/client/openapi_client/models/ray_runtime_env_config.py +57 -1
  21. anyscale/client/openapi_client/models/supportedbaseimagesenum.py +80 -1
  22. anyscale/cloud/models.py +1 -1
  23. anyscale/commands/cloud_commands.py +73 -70
  24. anyscale/commands/command_examples.py +28 -40
  25. anyscale/commands/project_commands.py +377 -106
  26. anyscale/commands/workspace_commands_v2.py +62 -29
  27. anyscale/controllers/cloud_controller.py +91 -91
  28. anyscale/job/_private/job_sdk.py +38 -20
  29. anyscale/project/__init__.py +101 -1
  30. anyscale/project/_private/project_sdk.py +90 -2
  31. anyscale/project/commands.py +188 -1
  32. anyscale/project/models.py +198 -2
  33. anyscale/sdk/anyscale_client/models/baseimagesenum.py +83 -1
  34. anyscale/sdk/anyscale_client/models/ray_runtime_env_config.py +57 -1
  35. anyscale/sdk/anyscale_client/models/supportedbaseimagesenum.py +80 -1
  36. anyscale/service/_private/service_sdk.py +2 -1
  37. anyscale/shared_anyscale_utils/latest_ray_version.py +1 -1
  38. anyscale/util.py +3 -0
  39. anyscale/utils/cloud_utils.py +20 -0
  40. anyscale/utils/runtime_env.py +3 -1
  41. anyscale/version.py +1 -1
  42. {anyscale-0.26.50.dist-info → anyscale-0.26.52.dist-info}/METADATA +1 -1
  43. {anyscale-0.26.50.dist-info → anyscale-0.26.52.dist-info}/RECORD +48 -47
  44. {anyscale-0.26.50.dist-info → anyscale-0.26.52.dist-info}/WHEEL +0 -0
  45. {anyscale-0.26.50.dist-info → anyscale-0.26.52.dist-info}/entry_points.txt +0 -0
  46. {anyscale-0.26.50.dist-info → anyscale-0.26.52.dist-info}/licenses/LICENSE +0 -0
  47. {anyscale-0.26.50.dist-info → anyscale-0.26.52.dist-info}/licenses/NOTICE +0 -0
  48. {anyscale-0.26.50.dist-info → anyscale-0.26.52.dist-info}/top_level.txt +0 -0
@@ -87,7 +87,7 @@ Here's an example job configuration for testing:
87
87
  ```yaml
88
88
  name: generate-doggos-embeddings
89
89
  compute_config: doggos-azure:1
90
- working_dir: azure://cloud-dev-blob/org_7c1Kalm9WcX2bNIjW53GUT/cld_wgmfc248s6t7513awyubirlwu9/runtime_env_packages/pkg_b60e2d10615fb9845a9bad7d9307547a.zip
90
+ working_dir: abfss://cloud-dev-blob@anyscaleclouddev.dfs.core.windows.net/org_7c1Kalm9WcX2bNIjW53GUT/cld_wgmfc248s6t7513awyubirlwu9/runtime_env_packages/pkg_b60e2d10615fb9845a9bad7d9307547a.zip
91
91
  requirements:
92
92
  - matplotlib==3.10.0
93
93
  - torch==2.7.1
@@ -67,6 +67,9 @@ from anyscale.client.openapi_client.models import (
67
67
  ListResourceQuotasQuery,
68
68
  OrganizationCollaborator,
69
69
  OrganizationInvitation,
70
+ Project,
71
+ ProjectBase,
72
+ ProjectListResponse,
70
73
  ResourceQuota,
71
74
  ResourceQuotaStatus,
72
75
  ServerSessionToken,
@@ -76,6 +79,7 @@ from anyscale.client.openapi_client.models import (
76
79
  StopSessionOptions,
77
80
  SystemWorkloadName,
78
81
  WorkspaceDataplaneProxiedArtifacts,
82
+ WriteProject,
79
83
  )
80
84
  from anyscale.client.openapi_client.models.create_schedule import CreateSchedule
81
85
  from anyscale.client.openapi_client.models.decorated_job_queue import DecoratedJobQueue
@@ -104,7 +108,7 @@ from anyscale.sdk.anyscale_client.models import (
104
108
  CreateClusterEnvironmentBuild,
105
109
  Job as APIJobRun,
106
110
  ProductionServiceV2VersionModel,
107
- Project,
111
+ Project as ProjectExternal,
108
112
  RollbackServiceModel,
109
113
  TextQuery,
110
114
  )
@@ -158,7 +162,7 @@ def handle_api_exceptions(func):
158
162
  body_dict = json.loads(e.body)
159
163
  msg = body_dict["error"]["detail"]
160
164
  raise ValueError(msg) from None
161
- except KeyError:
165
+ except (KeyError, TypeError):
162
166
  # ApiException doesn't conform to expected format, raise original error
163
167
  raise e from None
164
168
  raise e from None
@@ -484,7 +488,7 @@ class AnyscaleClient(AnyscaleClientInterface):
484
488
 
485
489
  if self._default_project_id_from_cloud_id.get(parent_cloud_id) is None:
486
490
  # Cloud isolation organizations follow the permissions model in https://docs.anyscale.com/organization-and-user-account/access-controls
487
- default_project: Project = self._external_api_client.get_default_project(
491
+ default_project: ProjectExternal = self._external_api_client.get_default_project(
488
492
  parent_cloud_id=parent_cloud_id
489
493
  ).result
490
494
  self._default_project_id_from_cloud_id[parent_cloud_id] = default_project.id
@@ -1050,11 +1054,53 @@ class AnyscaleClient(AnyscaleClientInterface):
1050
1054
  )
1051
1055
 
1052
1056
  @handle_api_exceptions
1053
- def get_project(self, project_id: str) -> Optional[Project]:
1057
+ def get_project(self, project_id: str) -> Project:
1054
1058
  return self._internal_api_client.get_project_api_v2_projects_project_id_get(
1055
1059
  project_id
1056
1060
  ).result
1057
1061
 
1062
+ @handle_api_exceptions
1063
+ def list_projects(
1064
+ self,
1065
+ *,
1066
+ name_contains: Optional[str] = None,
1067
+ creator_id: Optional[str] = None,
1068
+ parent_cloud_id: Optional[str] = None,
1069
+ include_defaults: bool = True,
1070
+ sort_field: Optional[str] = None,
1071
+ sort_order: Optional[str] = None,
1072
+ paging_token: Optional[str] = None,
1073
+ count: Optional[int] = None,
1074
+ ) -> ProjectListResponse:
1075
+ return self._internal_api_client.list_projects_api_v2_projects_get(
1076
+ name_contains=name_contains,
1077
+ creator_id=creator_id,
1078
+ parent_cloud_id=parent_cloud_id,
1079
+ include_defaults=include_defaults,
1080
+ sort_field=sort_field,
1081
+ sort_order=sort_order,
1082
+ paging_token=paging_token,
1083
+ count=count or self.LIST_ENDPOINT_COUNT,
1084
+ )
1085
+
1086
+ @handle_api_exceptions
1087
+ def create_project(self, project: WriteProject) -> ProjectBase:
1088
+ return self._internal_api_client.create_project_api_v2_projects_post(
1089
+ project
1090
+ ).result
1091
+
1092
+ @handle_api_exceptions
1093
+ def delete_project(self, project_id: str) -> None:
1094
+ self._internal_api_client.delete_project_api_v2_projects_project_id_delete(
1095
+ project_id
1096
+ )
1097
+
1098
+ @handle_api_exceptions
1099
+ def get_default_project(self, parent_cloud_id: str) -> Project:
1100
+ return self._internal_api_client.get_default_project_api_v2_projects_default_project_get(
1101
+ parent_cloud_id=parent_cloud_id,
1102
+ ).result
1103
+
1058
1104
  @handle_api_exceptions
1059
1105
  def add_project_collaborators(
1060
1106
  self, project_id: str, collaborators: List[CreateUserProjectCollaborator]
@@ -1243,6 +1289,64 @@ class AnyscaleClient(AnyscaleClientInterface):
1243
1289
  job_id
1244
1290
  )
1245
1291
 
1292
+ def _upload_local_runtime_env(
1293
+ self,
1294
+ cloud_id: str,
1295
+ cloud_deployment_id: Optional[str],
1296
+ zip_file_bytes: bytes,
1297
+ content_hash: str,
1298
+ overwrite_existing_file: bool,
1299
+ ) -> CloudDataBucketPresignedUrlResponse:
1300
+ file_name = RUNTIME_ENV_PACKAGE_FORMAT.format(content_hash=content_hash)
1301
+ request = CloudDataBucketPresignedUrlRequest(
1302
+ file_type=CloudDataBucketFileType.RUNTIME_ENV_PACKAGES,
1303
+ file_name=file_name,
1304
+ access_mode=CloudDataBucketAccessMode.WRITE,
1305
+ cloud_deployment_id=cloud_deployment_id,
1306
+ )
1307
+ info: CloudDataBucketPresignedUrlResponse = self._internal_api_client.generate_cloud_data_bucket_presigned_url_api_v2_clouds_cloud_id_generate_cloud_data_bucket_presigned_url_post(
1308
+ cloud_id, request
1309
+ ).result
1310
+
1311
+ # Skip the upload entirely if the file already exists.
1312
+ if info.file_exists and not overwrite_existing_file:
1313
+ internal_logger.debug(
1314
+ f"Skipping file upload for '{file_name}' because it already exists in cloud storage."
1315
+ )
1316
+ return info
1317
+
1318
+ if info.url_scheme == CloudDataBucketPresignedUrlScheme.SMART_OPEN:
1319
+ # If the presigned URL scheme is SMART_OPEN, upload to cloud storage using the provided bucket name, path, & environment, and the smart_open library.
1320
+ bucket_name = info.bucket_name
1321
+ bucket_path = info.bucket_path
1322
+
1323
+ env_vars: Dict[str, str] = {
1324
+ "AWS_ENDPOINT_URL": info.url,
1325
+ }
1326
+ with set_env(**env_vars), smart_open.open(
1327
+ f"{bucket_name}/{bucket_path}", "wb",
1328
+ ) as fout:
1329
+ fout.write(zip_file_bytes)
1330
+
1331
+ else:
1332
+ # Default to HTTP PUT.
1333
+ internal_logger.debug(f"Uploading file '{file_name}' to cloud storage.")
1334
+ headers = None
1335
+ if info.file_uri.startswith("azure") or info.file_uri.startswith("abfss"):
1336
+ headers = {
1337
+ "x-ms-blob-type": "BlockBlob",
1338
+ "x-ms-version": "2025-07-05",
1339
+ "x-ms-date": datetime.utcnow().strftime(
1340
+ "%a, %d %b %Y %H:%M:%S GMT"
1341
+ ),
1342
+ "x-ms-blob-content-type": "application/zip",
1343
+ }
1344
+ requests.put(
1345
+ info.url, data=zip_file_bytes, headers=headers
1346
+ ).raise_for_status()
1347
+
1348
+ return info
1349
+
1246
1350
  @handle_api_exceptions
1247
1351
  def upload_local_dir_to_cloud_storage(
1248
1352
  self,
@@ -1251,63 +1355,91 @@ class AnyscaleClient(AnyscaleClientInterface):
1251
1355
  cloud_id: str,
1252
1356
  excludes: Optional[List[str]] = None,
1253
1357
  overwrite_existing_file: bool = OVERWRITE_EXISTING_CLOUD_STORAGE_FILES,
1358
+ cloud_deployment: Optional[str] = None,
1254
1359
  ) -> str:
1255
1360
  if not pathlib.Path(local_dir).is_dir():
1256
1361
  raise RuntimeError(f"Path '{local_dir}' is not a valid directory.")
1257
1362
 
1363
+ cloud_deployment_id = None
1364
+ if cloud_deployment is not None:
1365
+ cloud_deployments = self._internal_api_client.get_cloud_deployments_api_v2_clouds_cloud_id_deployments_get(
1366
+ cloud_id=cloud_id,
1367
+ ).results
1368
+ cloud_deployment_id = next(
1369
+ (
1370
+ deployment.cloud_deployment_id
1371
+ for deployment in cloud_deployments
1372
+ if deployment.name == cloud_deployment
1373
+ ),
1374
+ None,
1375
+ )
1376
+ if cloud_deployment_id is None:
1377
+ raise ValueError(
1378
+ f"Cloud deployment '{cloud_deployment}' not found in cloud '{cloud_id}'"
1379
+ )
1380
+
1258
1381
  with zip_local_dir(local_dir, excludes=excludes) as (
1259
1382
  _,
1260
1383
  zip_file_bytes,
1261
1384
  content_hash,
1262
1385
  ):
1263
- file_name = RUNTIME_ENV_PACKAGE_FORMAT.format(content_hash=content_hash)
1264
- request = CloudDataBucketPresignedUrlRequest(
1265
- file_type=CloudDataBucketFileType.RUNTIME_ENV_PACKAGES,
1266
- file_name=file_name,
1267
- access_mode=CloudDataBucketAccessMode.WRITE,
1386
+ info = self._upload_local_runtime_env(
1387
+ cloud_id=cloud_id,
1388
+ cloud_deployment_id=cloud_deployment_id,
1389
+ zip_file_bytes=zip_file_bytes,
1390
+ content_hash=content_hash,
1391
+ overwrite_existing_file=overwrite_existing_file,
1268
1392
  )
1269
- info: CloudDataBucketPresignedUrlResponse = self._internal_api_client.generate_cloud_data_bucket_presigned_url_api_v2_clouds_cloud_id_generate_cloud_data_bucket_presigned_url_post(
1270
- cloud_id, request
1271
- ).result
1393
+ return info.file_uri
1272
1394
 
1273
- # Skip the upload entirely if the file already exists.
1274
- if info.file_exists and not overwrite_existing_file:
1275
- internal_logger.debug(
1276
- f"Skipping file upload for '{file_name}' because it already exists in cloud storage."
1277
- )
1278
- return info.file_uri
1395
+ def upload_local_dir_to_cloud_storage_multi_deployment(
1396
+ self,
1397
+ local_dir: str,
1398
+ *,
1399
+ cloud_id: str,
1400
+ cloud_deployments: List[Optional[str]],
1401
+ excludes: Optional[List[str]] = None,
1402
+ overwrite_existing_file: bool = False,
1403
+ ) -> str:
1404
+ if not pathlib.Path(local_dir).is_dir():
1405
+ raise RuntimeError(f"Path '{local_dir}' is not a valid directory.")
1279
1406
 
1280
- if info.url_scheme == CloudDataBucketPresignedUrlScheme.SMART_OPEN:
1281
- # If the presigned URL scheme is SMART_OPEN, upload to cloud storage using the provided bucket name, path, & environment, and the smart_open library.
1282
- bucket_name = info.bucket_name
1283
- bucket_path = info.bucket_path
1407
+ all_cloud_deployments = self._internal_api_client.get_cloud_deployments_api_v2_clouds_cloud_id_deployments_get(
1408
+ cloud_id=cloud_id,
1409
+ ).results
1410
+ cloud_deployment_ids = {
1411
+ deployment.name: deployment.cloud_deployment_id
1412
+ for deployment in all_cloud_deployments
1413
+ }
1284
1414
 
1285
- env_vars: Dict[str, str] = {
1286
- "AWS_ENDPOINT_URL": info.url,
1287
- }
1288
- with set_env(**env_vars), smart_open.open(
1289
- f"{bucket_name}/{bucket_path}", "wb",
1290
- ) as fout:
1291
- fout.write(zip_file_bytes)
1415
+ bucket_paths = set()
1292
1416
 
1293
- else:
1294
- # Default to HTTP PUT.
1295
- internal_logger.debug(f"Uploading file '{file_name}' to cloud storage.")
1296
- headers = None
1297
- if info.file_uri.startswith("azure"):
1298
- headers = {
1299
- "x-ms-blob-type": "BlockBlob",
1300
- "x-ms-version": "2025-07-05",
1301
- "x-ms-date": datetime.utcnow().strftime(
1302
- "%a, %d %b %Y %H:%M:%S GMT"
1303
- ),
1304
- "x-ms-blob-content-type": "application/zip",
1305
- }
1306
- requests.put(
1307
- info.url, data=zip_file_bytes, headers=headers
1308
- ).raise_for_status()
1417
+ with zip_local_dir(local_dir, excludes=excludes) as (
1418
+ _,
1419
+ zip_file_bytes,
1420
+ content_hash,
1421
+ ):
1422
+ for cloud_deployment in cloud_deployments:
1423
+ if cloud_deployment is not None:
1424
+ if cloud_deployment not in cloud_deployment_ids:
1425
+ raise ValueError(
1426
+ f"Cloud deployment '{cloud_deployment}' not found in cloud '{cloud_id}'"
1427
+ )
1428
+ cloud_deployment_id = cloud_deployment_ids[cloud_deployment]
1429
+ else:
1430
+ cloud_deployment_id = None
1431
+
1432
+ info = self._upload_local_runtime_env(
1433
+ cloud_id=cloud_id,
1434
+ cloud_deployment_id=cloud_deployment_id,
1435
+ zip_file_bytes=zip_file_bytes,
1436
+ content_hash=content_hash,
1437
+ overwrite_existing_file=overwrite_existing_file,
1438
+ )
1439
+ bucket_paths.add(info.bucket_path)
1309
1440
 
1310
- return info.file_uri
1441
+ assert len(bucket_paths) == 1
1442
+ return bucket_paths.pop()
1311
1443
 
1312
1444
  def _fetch_log_chunks(self, job_run_id: str) -> Tuple[List[str], Any]:
1313
1445
  all_log_chunk_urls = []
@@ -25,10 +25,13 @@ from anyscale.client.openapi_client.models import (
25
25
  OrganizationCollaborator,
26
26
  OrganizationInvitation,
27
27
  Project,
28
+ ProjectBase,
29
+ ProjectListResponse,
28
30
  ResourceQuota,
29
31
  ServerSessionToken,
30
32
  SessionState,
31
33
  WorkspaceDataplaneProxiedArtifacts,
34
+ WriteProject,
32
35
  )
33
36
  from anyscale.client.openapi_client.models.create_schedule import CreateSchedule
34
37
  from anyscale.client.openapi_client.models.decorated_job_queue import DecoratedJobQueue
@@ -54,7 +57,7 @@ from anyscale.utils.workspace_notification import WorkspaceNotification
54
57
  # Maybe just make it part of the release process to update it, or fetch the
55
58
  # default builds and get the latest one. The best thing to do is probably
56
59
  # to populate this in the backend.
57
- DEFAULT_RAY_VERSION = "2.48.0" # RAY_RELEASE_UPDATE: update to latest version.
60
+ DEFAULT_RAY_VERSION = "2.49.0" # RAY_RELEASE_UPDATE: update to latest version.
58
61
  DEFAULT_PYTHON_VERSION = "py311"
59
62
  RUNTIME_ENV_PACKAGE_FORMAT = "pkg_{content_hash}.zip"
60
63
 
@@ -355,13 +358,44 @@ class AnyscaleClientInterface(ABC):
355
358
  raise NotImplementedError
356
359
 
357
360
  @abstractmethod
358
- def get_project(self, project_id: str) -> Optional[Project]:
361
+ def get_project(self, project_id: str) -> Project:
359
362
  """Get a project by id.
360
363
 
361
364
  Returns None if not found.
362
365
  """
363
366
  raise NotImplementedError
364
367
 
368
+ @abstractmethod
369
+ def list_projects(
370
+ self,
371
+ *,
372
+ name_contains: Optional[str] = None,
373
+ creator_id: Optional[str] = None,
374
+ parent_cloud_id: Optional[str] = None,
375
+ include_defaults: bool = True,
376
+ sort_field: Optional[str] = None,
377
+ sort_order: Optional[str] = None,
378
+ paging_token: Optional[str] = None,
379
+ count: Optional[int] = None,
380
+ ) -> ProjectListResponse:
381
+ """List projects."""
382
+ raise NotImplementedError
383
+
384
+ @abstractmethod
385
+ def create_project(self, project: WriteProject) -> ProjectBase:
386
+ """Create a project."""
387
+ raise NotImplementedError
388
+
389
+ @abstractmethod
390
+ def delete_project(self, project_id: str) -> None:
391
+ """Delete a project."""
392
+ raise NotImplementedError
393
+
394
+ @abstractmethod
395
+ def get_default_project(self, parent_cloud_id: str) -> Project:
396
+ """Get the default project for the provided cloud ID."""
397
+ raise NotImplementedError
398
+
365
399
  @abstractmethod
366
400
  def add_project_collaborators(
367
401
  self, project_id: str, collaborators: List[CreateUserProjectCollaborator]
@@ -486,6 +520,7 @@ class AnyscaleClientInterface(ABC):
486
520
  cloud_id: str,
487
521
  excludes: Optional[List[str]] = None,
488
522
  overwrite_existing_file: bool = False,
523
+ cloud_deployment: Optional[str] = None,
489
524
  ) -> str:
490
525
  """Upload the provided directory to cloud storage and return a URI for it.
491
526
 
@@ -499,6 +534,30 @@ class AnyscaleClientInterface(ABC):
499
534
  """
500
535
  raise NotImplementedError
501
536
 
537
+ @abstractmethod
538
+ def upload_local_dir_to_cloud_storage_multi_deployment(
539
+ self,
540
+ local_dir: str,
541
+ *,
542
+ cloud_id: str,
543
+ cloud_deployments: List[Optional[str]],
544
+ excludes: Optional[List[str]] = None,
545
+ overwrite_existing_file: bool = False,
546
+ ) -> str:
547
+ """Upload the provided directory to the object storage for each of the provided
548
+ cloud deployments and return the bucket path of the uploaded file.
549
+
550
+ The directory will be zipped and the resulting bucket path will later be converted
551
+ to a URI that can be used in a Ray runtime_env.
552
+
553
+ The upload is preformed using a pre-signed URL fetched from Anyscale, so no
554
+ local cloud provider authentication is required.
555
+
556
+ The path is content-addressable (containing a hash of the directory contents), so by
557
+ default if the target file path already exists it will not be overwritten.
558
+ """
559
+ raise NotImplementedError
560
+
502
561
  @abstractmethod
503
562
  def logs_for_job_run(
504
563
  self,
@@ -47,9 +47,12 @@ from anyscale.client.openapi_client.models import (
47
47
  ProductionJob,
48
48
  ProductionJobStateTransition,
49
49
  Project,
50
+ ProjectBase,
51
+ ProjectListResponse,
50
52
  ResourceQuota,
51
53
  ServerSessionToken,
52
54
  WorkspaceDataplaneProxiedArtifacts,
55
+ WriteProject,
53
56
  )
54
57
  from anyscale.client.openapi_client.models.create_schedule import CreateSchedule
55
58
  from anyscale.client.openapi_client.models.decorated_job_queue import DecoratedJobQueue
@@ -108,7 +111,7 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
108
111
 
109
112
  SCHEDULE_NEXT_TRIGGER_AT_TIME = datetime.utcnow()
110
113
 
111
- def __init__(self):
114
+ def __init__(self) -> None:
112
115
  self._builds: Dict[str, ClusterEnvironmentBuild] = {
113
116
  self.DEFAULT_CLUSTER_ENV_BUILD_ID: ClusterEnvironmentBuild(
114
117
  id=self.DEFAULT_CLUSTER_ENV_BUILD_ID,
@@ -155,7 +158,8 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
155
158
  self._jobs: Dict[str, ProductionJob] = {}
156
159
  self._job_runs: Dict[str, List[APIJobRun]] = defaultdict(list)
157
160
  self._job_queues: Dict[str, DecoratedJobQueue] = {}
158
- self._project_to_id: Dict[Optional[str] : Dict[Optional[str], str]] = {}
161
+ self._project_to_id: Dict[Optional[str] : Dict[Optional[str], str]] = {} # type: ignore
162
+ self._projects: Dict[str, Project] = {}
159
163
  self._project_collaborators: Dict[str, List[CreateUserProjectCollaborator]] = {}
160
164
  self._rolled_out_model: Optional[ApplyProductionServiceV2Model] = None
161
165
  self._sent_workspace_notifications: List[WorkspaceNotification] = []
@@ -164,6 +168,9 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
164
168
  self._archived_jobs: Dict[str, ProductionJob] = {}
165
169
  self._requirements_path: Optional[str] = None
166
170
  self._upload_uri_mapping: Dict[str, str] = {}
171
+ self._upload_bucket_path_mapping: Dict[
172
+ str, Tuple[List[Optional[str]], str]
173
+ ] = {}
167
174
  self._submitted_job: Optional[CreateInternalProductionJob] = None
168
175
  self._env_vars: Optional[Dict[str, str]] = None
169
176
  self._job_run_logs: Dict[str, str] = {}
@@ -680,18 +687,128 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
680
687
 
681
688
  return None
682
689
 
690
+ def build_project_with_args(self, **kwargs) -> Project:
691
+ # set values for required fields if not provided
692
+ if "id" not in kwargs:
693
+ kwargs["id"] = f"project-id-{uuid.uuid4()!s}"
694
+ if "name" not in kwargs:
695
+ kwargs["name"] = f"project-{kwargs['id']}"
696
+ if "description" not in kwargs:
697
+ kwargs["description"] = f"project-description-{kwargs['id']}"
698
+ if "parent_cloud_id" not in kwargs:
699
+ kwargs["parent_cloud_id"] = self.DEFAULT_CLOUD_ID
700
+ if "created_at" not in kwargs:
701
+ kwargs["created_at"] = datetime.utcnow()
702
+ if "is_owner" not in kwargs:
703
+ kwargs["is_owner"] = True
704
+ if "is_read_only" not in kwargs:
705
+ kwargs["is_read_only"] = False
706
+ if "directory_name" not in kwargs:
707
+ kwargs["directory_name"] = "default"
708
+ if "is_default" not in kwargs:
709
+ kwargs["is_default"] = False
710
+ return Project(**kwargs, local_vars_configuration=OPENAPI_NO_VALIDATION)
711
+
712
+ def create_project_with_args(self, **kwargs) -> str:
713
+ project = self.build_project_with_args(**kwargs)
714
+ project_id: str = project.id # type: ignore
715
+ self._projects[project_id] = project
716
+ return project_id
717
+
718
+ def get_project_by_id_or_name(
719
+ self, *, project_id: Optional[str] = None, project_name: Optional[str] = None,
720
+ ) -> Optional[Project]:
721
+ if project_id:
722
+ return self._projects.get(project_id, None)
723
+ if project_name:
724
+ for project in self._projects.values():
725
+ if project.name == project_name:
726
+ return project
727
+ return None
728
+
683
729
  def get_project(self, project_id: str) -> Optional[Project]:
684
730
  for cloud_project_dict in self._project_to_id.values():
685
731
  for p_name, p_id in cloud_project_dict.items():
686
732
  if p_id == project_id:
687
733
  # return stub project
688
- return Project(
689
- name=p_name,
690
- id=p_id,
691
- local_vars_configuration=OPENAPI_NO_VALIDATION,
692
- )
734
+ return self.build_project_with_args(id=p_id, name=p_name,)
693
735
  return None
694
736
 
737
+ def list_projects(
738
+ self,
739
+ *,
740
+ name_contains: Optional[str] = None,
741
+ creator_id: Optional[str] = None,
742
+ parent_cloud_id: Optional[str] = None,
743
+ include_defaults: bool = True,
744
+ sort_field: Optional[str] = None, # noqa: ARG002
745
+ sort_order: Optional[str] = None, # noqa: ARG002
746
+ paging_token: Optional[str] = None, # noqa: ARG002
747
+ count: Optional[int] = None,
748
+ ) -> ProjectListResponse:
749
+ projects = list(self._projects.values())
750
+ if name_contains:
751
+ projects = [p for p in projects if p.name and name_contains in p.name]
752
+ if creator_id:
753
+ projects = [p for p in projects if p.creator_id == creator_id]
754
+ if parent_cloud_id:
755
+ projects = [p for p in projects if p.parent_cloud_id == parent_cloud_id]
756
+ if not include_defaults:
757
+ projects = [p for p in projects if not p.is_default]
758
+ if sort_field and sort_order and sort_field == "NAME":
759
+ projects.sort(
760
+ key=lambda x: x.name if x.name else "", reverse=sort_order == "DESC"
761
+ )
762
+ if count:
763
+ projects = projects[:count]
764
+ return ProjectListResponse(
765
+ results=projects,
766
+ metadata=ListResponseMetadata(
767
+ next_paging_token=paging_token,
768
+ local_vars_configuration=OPENAPI_NO_VALIDATION,
769
+ ),
770
+ local_vars_configuration=OPENAPI_NO_VALIDATION,
771
+ )
772
+
773
+ def create_project(self, project: WriteProject) -> ProjectBase:
774
+ project_id = f"project-id-{uuid.uuid4()!s}"
775
+ self._projects[project_id] = Project(
776
+ id=project_id,
777
+ name=project.name,
778
+ description=project.description,
779
+ cloud_id=project.cloud_id,
780
+ initial_cluster_config=project.initial_cluster_config,
781
+ parent_cloud_id=project.parent_cloud_id,
782
+ created_at=datetime.utcnow(),
783
+ creator_id=self.DEFAULT_USER_ID,
784
+ is_default=False,
785
+ is_owner=True,
786
+ is_read_only=False,
787
+ directory_name="default",
788
+ owners=[
789
+ MiniUser(
790
+ id=self.DEFAULT_USER_ID,
791
+ email=self.DEFAULT_USER_EMAIL,
792
+ local_vars_configuration=OPENAPI_NO_VALIDATION,
793
+ )
794
+ ],
795
+ local_vars_configuration=OPENAPI_NO_VALIDATION,
796
+ )
797
+ return ProjectBase(
798
+ id=project_id, local_vars_configuration=OPENAPI_NO_VALIDATION,
799
+ )
800
+
801
+ def delete_project(self, project_id: str) -> None:
802
+ if project_id not in self._projects:
803
+ raise ValueError(f"Project {project_id} not found")
804
+ self._projects.pop(project_id)
805
+
806
+ def get_default_project(self, parent_cloud_id: str) -> Project:
807
+ for project in self._projects.values():
808
+ if project.parent_cloud_id == parent_cloud_id and project.is_default:
809
+ return project
810
+ raise ValueError(f"No default project found for cloud {parent_cloud_id}")
811
+
695
812
  def add_project_collaborators(
696
813
  self, project_id: str, collaborators: List[CreateUserProjectCollaborator]
697
814
  ):
@@ -897,14 +1014,17 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
897
1014
 
898
1015
  def upload_local_dir_to_cloud_storage(
899
1016
  self,
900
- local_dir: str, # noqa: ARG002
1017
+ local_dir: str,
901
1018
  *,
902
1019
  cloud_id: str,
903
1020
  excludes: Optional[List[str]] = None, # noqa: ARG002
904
1021
  overwrite_existing_file: bool = False, # noqa: ARG002
1022
+ cloud_deployment: Optional[str] = None,
905
1023
  ) -> str:
906
1024
  # Ensure that URIs are consistent for the same passed directory.
907
1025
  bucket = self.CLOUD_BUCKET.format(cloud_id=cloud_id)
1026
+ if cloud_deployment is not None:
1027
+ bucket += f"_{cloud_deployment}"
908
1028
  if local_dir not in self._upload_uri_mapping:
909
1029
  self._upload_uri_mapping[
910
1030
  local_dir
@@ -912,6 +1032,23 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
912
1032
 
913
1033
  return self._upload_uri_mapping[local_dir]
914
1034
 
1035
+ def upload_local_dir_to_cloud_storage_multi_deployment(
1036
+ self,
1037
+ local_dir: str,
1038
+ *,
1039
+ cloud_id: str,
1040
+ cloud_deployments: List[Optional[str]],
1041
+ excludes: Optional[List[str]] = None, # noqa: ARG002
1042
+ overwrite_existing_file: bool = False, # noqa: ARG002
1043
+ ) -> str:
1044
+ bucket = self.CLOUD_BUCKET.format(cloud_id=cloud_id)
1045
+ if local_dir not in self._upload_bucket_path_mapping:
1046
+ self._upload_bucket_path_mapping[local_dir] = (
1047
+ cloud_deployments,
1048
+ f"{bucket}/fake_pkg_{str(uuid.uuid4())}.zip",
1049
+ )
1050
+ return self._upload_bucket_path_mapping[local_dir][1]
1051
+
915
1052
  def add_job_run_logs(self, job_run_id: str, logs: str):
916
1053
  self._job_run_logs[job_run_id] = logs
917
1054