anyscale 0.26.47__py3-none-any.whl → 0.26.49__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. anyscale/__init__.py +0 -7
  2. anyscale/_private/anyscale_client/README.md +115 -0
  3. anyscale/_private/anyscale_client/anyscale_client.py +12 -213
  4. anyscale/_private/anyscale_client/common.py +0 -55
  5. anyscale/_private/anyscale_client/fake_anyscale_client.py +19 -46
  6. anyscale/_private/docgen/__main__.py +32 -47
  7. anyscale/_private/docgen/generator.py +32 -16
  8. anyscale/_private/docgen/generator_legacy.py +58 -6
  9. anyscale/_private/docgen/models.md +3 -2
  10. anyscale/_private/workload/workload_config.py +16 -8
  11. anyscale/_private/workload/workload_sdk.py +24 -7
  12. anyscale/client/README.md +10 -2
  13. anyscale/client/openapi_client/__init__.py +6 -2
  14. anyscale/client/openapi_client/api/default_api.py +558 -8
  15. anyscale/client/openapi_client/models/__init__.py +6 -2
  16. anyscale/client/openapi_client/models/{alert_type.py → alert_issue_type.py} +8 -20
  17. anyscale/client/openapi_client/models/baseimagesenum.py +1 -2
  18. anyscale/client/openapi_client/models/cloud.py +31 -3
  19. anyscale/client/openapi_client/models/cloud_deployment.py +30 -3
  20. anyscale/client/openapi_client/models/cloud_with_cloud_resource.py +29 -1
  21. anyscale/client/openapi_client/models/cloud_with_cloud_resource_gcp.py +29 -1
  22. anyscale/client/openapi_client/models/dataset_metrics.py +6 -6
  23. anyscale/client/openapi_client/models/dataset_state.py +2 -1
  24. anyscale/client/openapi_client/models/decorated_cloud_deployment.py +481 -0
  25. anyscale/client/openapi_client/models/decoratedclouddeployment_response.py +121 -0
  26. anyscale/client/openapi_client/models/describe_system_workload_response.py +32 -6
  27. anyscale/client/openapi_client/models/experimental_workspace.py +29 -1
  28. anyscale/client/openapi_client/models/experimental_workspaces_sort_field.py +2 -1
  29. anyscale/client/openapi_client/models/metrics_query_response.py +121 -0
  30. anyscale/client/openapi_client/models/{clouddeployment_response.py → metricsqueryresponse_response.py} +11 -11
  31. anyscale/client/openapi_client/models/operator_metrics.py +8 -9
  32. anyscale/client/openapi_client/models/operator_status.py +102 -0
  33. anyscale/client/openapi_client/models/organization_usage_alert.py +20 -20
  34. anyscale/client/openapi_client/models/supportedbaseimagesenum.py +1 -2
  35. anyscale/cloud/models.py +330 -0
  36. anyscale/commands/cloud_commands.py +136 -44
  37. anyscale/commands/command_examples.py +54 -134
  38. anyscale/commands/compute_config_commands.py +7 -11
  39. anyscale/compute_config/__init__.py +2 -16
  40. anyscale/compute_config/_private/compute_config_sdk.py +27 -17
  41. anyscale/compute_config/commands.py +14 -44
  42. anyscale/compute_config/models.py +49 -26
  43. anyscale/controllers/cloud_controller.py +289 -171
  44. anyscale/controllers/cloud_file_storage_utils.py +204 -0
  45. anyscale/controllers/kubernetes_verifier.py +1570 -0
  46. anyscale/job/_private/job_sdk.py +17 -8
  47. anyscale/job/models.py +1 -1
  48. anyscale/scripts.py +0 -2
  49. anyscale/sdk/anyscale_client/models/baseimagesenum.py +1 -2
  50. anyscale/sdk/anyscale_client/models/cloud.py +31 -3
  51. anyscale/sdk/anyscale_client/models/supportedbaseimagesenum.py +1 -2
  52. anyscale/shared_anyscale_utils/headers.py +3 -0
  53. anyscale/shared_anyscale_utils/utils/id_gen.py +1 -0
  54. anyscale/version.py +1 -1
  55. anyscale/workspace/models.py +14 -7
  56. {anyscale-0.26.47.dist-info → anyscale-0.26.49.dist-info}/METADATA +1 -1
  57. {anyscale-0.26.47.dist-info → anyscale-0.26.49.dist-info}/RECORD +62 -73
  58. anyscale/commands/llm/dataset_commands.py +0 -269
  59. anyscale/commands/llm/group.py +0 -15
  60. anyscale/commands/llm/models_commands.py +0 -123
  61. anyscale/controllers/llm/__init__.py +0 -0
  62. anyscale/controllers/llm/models_controller.py +0 -144
  63. anyscale/llm/__init__.py +0 -2
  64. anyscale/llm/dataset/__init__.py +0 -2
  65. anyscale/llm/dataset/_private/__init__.py +0 -0
  66. anyscale/llm/dataset/_private/docs.py +0 -63
  67. anyscale/llm/dataset/_private/models.py +0 -71
  68. anyscale/llm/dataset/_private/sdk.py +0 -147
  69. anyscale/llm/model/__init__.py +0 -2
  70. anyscale/llm/model/_private/models_sdk.py +0 -62
  71. anyscale/llm/model/commands.py +0 -93
  72. anyscale/llm/model/models.py +0 -171
  73. anyscale/llm/model/sdk.py +0 -62
  74. anyscale/llm/sdk.py +0 -27
  75. {anyscale-0.26.47.dist-info → anyscale-0.26.49.dist-info}/WHEEL +0 -0
  76. {anyscale-0.26.47.dist-info → anyscale-0.26.49.dist-info}/entry_points.txt +0 -0
  77. {anyscale-0.26.47.dist-info → anyscale-0.26.49.dist-info}/licenses/LICENSE +0 -0
  78. {anyscale-0.26.47.dist-info → anyscale-0.26.49.dist-info}/licenses/NOTICE +0 -0
  79. {anyscale-0.26.47.dist-info → anyscale-0.26.49.dist-info}/top_level.txt +0 -0
anyscale/__init__.py CHANGED
@@ -26,7 +26,6 @@ from anyscale import (
26
26
  integrations,
27
27
  job,
28
28
  job_queue,
29
- llm,
30
29
  organization_invitation,
31
30
  project,
32
31
  resource_quota,
@@ -48,7 +47,6 @@ from anyscale.connect import ClientBuilder
48
47
  from anyscale.image import ImageSDK
49
48
  from anyscale.job import JobSDK
50
49
  from anyscale.job_queue import JobQueueSDK
51
- from anyscale.llm import LLMSDK
52
50
  from anyscale.organization_invitation import OrganizationInvitationSDK
53
51
  from anyscale.project import ProjectSDK
54
52
  from anyscale.resource_quota import ResourceQuotaSDK
@@ -127,7 +125,6 @@ class Anyscale:
127
125
  self._cloud_sdk = CloudSDK(client=self._anyscale_client)
128
126
  self._schedule_sdk = ScheduleSDK(client=self._anyscale_client)
129
127
  self._image_sdk = ImageSDK(client=self._anyscale_client)
130
- self._llm_sdk = LLMSDK(client=self._anyscale_client)
131
128
  self._organization_invitation_sdk = OrganizationInvitationSDK(
132
129
  client=self._anyscale_client
133
130
  )
@@ -217,10 +214,6 @@ class Anyscale:
217
214
  def image(self) -> ImageSDK: # noqa: F811
218
215
  return self._image_sdk
219
216
 
220
- @property
221
- def llm(self) -> LLMSDK: # noqa: F811
222
- return self._llm_sdk
223
-
224
217
  @property
225
218
  def organization_invitation(self) -> OrganizationInvitationSDK: # noqa: F811
226
219
  return self._organization_invitation_sdk
@@ -7,6 +7,17 @@ The purpose of centralizing this logic is to:
7
7
  - Keep all external dependencies in one place.
8
8
  - Enable writing comprehensive unit tests for upstream components (without using mocks!) using the `FakeAnyscaleClient`.
9
9
 
10
+ ## Installation
11
+
12
+ To install the anyscale client package in development mode:
13
+
14
+ ```bash
15
+ cd frontend/cli
16
+ pip install -e .
17
+ ```
18
+
19
+ This will install the `anyscale` package in editable mode, allowing you to make changes to the code and have them immediately available.
20
+
10
21
  ## Testing
11
22
 
12
23
  The `AnyscaleClient` is tested using a fake version of the internal and external OpenAPI clients.
@@ -14,3 +25,107 @@ The `AnyscaleClient` is tested using a fake version of the internal and external
14
25
  Upstream components should use the `FakeAnyscaleClient` to write their tests.
15
26
  This client should mirror the behavior of the real `AnyscaleClient` as closely as possible (avoid making methods and functionality
16
27
  complete "dummies").
28
+
29
+ ### Running Tests
30
+
31
+ 1. **Unit Tests**: Run the test suite to verify client functionality
32
+ ```bash
33
+ cd frontend/cli
34
+ python -m pytest tests/unit/test_anyscale_client.py
35
+ ```
36
+
37
+ 2. **Integration Tests**: Test the client against the actual Anyscale API
38
+ ```bash
39
+ # Set up your Anyscale credentials
40
+ export ANYSCALE_TOKEN="your_token_here"
41
+
42
+ # Run integration tests
43
+ python -m pytest tests/test_integrations.py
44
+ ```
45
+
46
+ 3. **All Client-Related Tests**: Run all tests that use the anyscale client
47
+ ```bash
48
+ cd frontend/cli
49
+ python -m pytest tests/ -k "anyscale_client" -v
50
+ ```
51
+
52
+ ### Testing Job Submission
53
+
54
+ To test job submission functionality:
55
+
56
+ 1. **Create a test job configuration file** (`job.yaml`):
57
+ ```yaml
58
+ name: test-job
59
+ compute_config: default:1
60
+ working_dir: /path/to/working/directory
61
+ requirements:
62
+ - numpy==1.24.0
63
+ - pandas==2.0.0
64
+ entrypoint: python your_script.py
65
+ max_retries: 0
66
+ ```
67
+
68
+ 2. **Submit a test job**:
69
+ ```bash
70
+ anyscale job submit -f job.yaml
71
+ ```
72
+
73
+ 3. **Monitor job status**:
74
+ ```bash
75
+ anyscale job status <job_id>
76
+ ```
77
+
78
+ 4. **View job logs**:
79
+ ```bash
80
+ anyscale job logs <job_id>
81
+ ```
82
+
83
+ ### Example Test Job
84
+
85
+ Here's an example job configuration for testing:
86
+
87
+ ```yaml
88
+ name: generate-doggos-embeddings
89
+ compute_config: doggos-azure:1
90
+ working_dir: azure://cloud-dev-blob/org_7c1Kalm9WcX2bNIjW53GUT/cld_wgmfc248s6t7513awyubirlwu9/runtime_env_packages/pkg_b60e2d10615fb9845a9bad7d9307547a.zip
91
+ requirements:
92
+ - matplotlib==3.10.0
93
+ - torch==2.7.1
94
+ - transformers==4.52.3
95
+ - scikit-learn==1.6.0
96
+ - mlflow==2.19.0
97
+ - ipywidgets==8.1.3
98
+ entrypoint: python doggos/embed.py
99
+ max_retries: 0
100
+ ```
101
+
102
+ ### Testing with Fake Client
103
+
104
+ For unit testing components that depend on the Anyscale client:
105
+
106
+ ```python
107
+ from anyscale._private.anyscale_client import FakeAnyscaleClient
108
+
109
+ # Create a fake client for testing
110
+ fake_client = FakeAnyscaleClient()
111
+
112
+ # Use the fake client in your tests
113
+ # The fake client should behave like the real client
114
+ result = fake_client.submit_job(job_config)
115
+ assert result.job_id is not None
116
+ ```
117
+
118
+ ### Debugging
119
+
120
+ - Use `--verbose` flag for detailed output: `anyscale job submit -f job.yaml --verbose`
121
+ - Check job status in the UI: The CLI will provide a URL to view the job in the Anyscale console
122
+ - Use `--wait` flag to wait for job completion and stream logs: `anyscale job submit -f job.yaml --wait`
123
+
124
+ ### Common Issues
125
+
126
+ 1. **Authentication**: Ensure your Anyscale token is properly set
127
+ 2. **Network**: Check your internet connection and firewall settings
128
+ 3. **Dependencies**: Verify all required packages are installed
129
+ 4. **Job Configuration**: Ensure your YAML file is properly formatted
130
+
131
+ For more detailed testing scenarios, refer to the test files in the `tests/` directory.
@@ -1,5 +1,6 @@
1
1
  from abc import ABC, abstractmethod
2
2
  import contextlib
3
+ from datetime import datetime
3
4
  from functools import wraps
4
5
  import io
5
6
  import json
@@ -23,12 +24,7 @@ from anyscale._private.anyscale_client.common import (
23
24
  RUNTIME_ENV_PACKAGE_FORMAT,
24
25
  )
25
26
  from anyscale._private.models.image_uri import ImageURI
26
- from anyscale._private.models.model_base import InternalListResponse, ListResponse
27
- from anyscale._private.utils.progress_util import (
28
- FileDownloadProgress,
29
- ProgressFileReader,
30
- )
31
- from anyscale.api_utils.common_utils import source_cloud_id_and_project_id
27
+ from anyscale._private.utils.progress_util import FileDownloadProgress
32
28
  from anyscale.api_utils.logs_util import _download_log_from_s3_url_sync
33
29
  from anyscale.authenticate import AuthenticationBlock, get_auth_api_client
34
30
  from anyscale.cli_logger import BlockLogger
@@ -53,23 +49,17 @@ from anyscale.client.openapi_client.models import (
53
49
  ComputeTemplateQuery,
54
50
  CreateCloudCollaborator,
55
51
  CreateComputeTemplate,
56
- CreateDataset,
57
52
  CreateExperimentalWorkspace,
58
53
  CreateInternalProductionJob,
59
54
  CreateOrganizationInvitation,
60
55
  CreateResourceQuota,
61
56
  CreateUserProjectCollaborator,
62
- Dataset as InternalDataset,
63
- DatasetUpload,
64
57
  DecoratedComputeTemplate,
65
58
  DecoratedjobqueueListResponse,
66
59
  DecoratedlistserviceapimodelListResponse,
67
60
  DecoratedProductionServiceV2APIModel,
68
61
  DecoratedSession,
69
- DeletedPlatformFineTunedModel,
70
62
  ExperimentalWorkspace,
71
- FineTunedModel,
72
- FinetunedmodelListResponse,
73
63
  GetOrCreateBuildFromImageUriRequest,
74
64
  InternalProductionJob,
75
65
  JobQueueSortDirective,
@@ -1303,11 +1293,16 @@ class AnyscaleClient(AnyscaleClientInterface):
1303
1293
  else:
1304
1294
  # Default to HTTP PUT.
1305
1295
  internal_logger.debug(f"Uploading file '{file_name}' to cloud storage.")
1306
- headers = (
1307
- {"x-ms-blob-type": "BlockBlob"}
1308
- if info.file_uri.startswith("azure")
1309
- else None
1310
- )
1296
+ headers = None
1297
+ if info.file_uri.startswith("azure"):
1298
+ headers = {
1299
+ "x-ms-blob-type": "BlockBlob",
1300
+ "x-ms-version": "2025-07-05",
1301
+ "x-ms-date": datetime.utcnow().strftime(
1302
+ "%a, %d %b %Y %H:%M:%S GMT"
1303
+ ),
1304
+ "x-ms-blob-content-type": "application/zip",
1305
+ }
1311
1306
  requests.put(
1312
1307
  info.url, data=zip_file_bytes, headers=headers
1313
1308
  ).raise_for_status()
@@ -1564,160 +1559,6 @@ class AnyscaleClient(AnyscaleClientInterface):
1564
1559
  id
1565
1560
  )
1566
1561
 
1567
- @handle_api_exceptions
1568
- def get_dataset(self, name: str, version: Optional[int], project: Optional[str]):
1569
- project_id = self._source_project_id(project)
1570
- internal_dataset = self._internal_api_client.find_dataset_api_v2_datasets_find_get(
1571
- name=name, version=version, project_id=project_id
1572
- ).result
1573
- from anyscale.llm.dataset._private.models import Dataset
1574
-
1575
- dataset = Dataset.parse_from_internal_model(internal_dataset)
1576
- return dataset
1577
-
1578
- def upload_dataset(
1579
- self,
1580
- dataset_file: str,
1581
- name: Optional[str],
1582
- description: Optional[str],
1583
- cloud: Optional[str],
1584
- project: Optional[str],
1585
- ):
1586
- # Resolve `~/.../file` to `/home/user/.../file`
1587
- dataset_file = os.path.expanduser(dataset_file)
1588
-
1589
- if not os.path.isfile(dataset_file):
1590
- raise ValueError(f"Path '{dataset_file}' is not a valid file.")
1591
- dataset_file_size = os.path.getsize(dataset_file)
1592
- if dataset_file_size > 5 * Bytes.GB:
1593
- raise ValueError(
1594
- f"File '{dataset_file}' is too large to upload. The maximum size is 5 GB."
1595
- )
1596
- project_id = self._get_project_id_by_name(name=project) if project else None
1597
- cloud_id = self.get_cloud_id(cloud_name=cloud) if cloud else None
1598
-
1599
- with FileDownloadProgress() as progress:
1600
- task_id = progress.add_task(
1601
- description=f"Creating an upload request for '{dataset_file}'",
1602
- total=dataset_file_size,
1603
- )
1604
- _, project_id = source_cloud_id_and_project_id(
1605
- internal_api=self._internal_api_client,
1606
- external_api=self._external_api_client,
1607
- cloud_id=cloud_id,
1608
- project_id=project_id,
1609
- )
1610
- dataset_upload: DatasetUpload = self._internal_api_client.create_dataset_upload_api_v2_datasets_upload_post(
1611
- create_dataset=CreateDataset(
1612
- filename=os.path.basename(dataset_file),
1613
- description=description,
1614
- name=name,
1615
- project_id=project_id,
1616
- )
1617
- ).result
1618
-
1619
- progress.update(task_id, description=f"Uploading '{dataset_file}'")
1620
-
1621
- with open(dataset_file, "rb") as file_reader:
1622
- progress_reader = ProgressFileReader(file_reader, progress, task_id)
1623
- response = requests.put(
1624
- dataset_upload.upload_url, data=progress_reader,
1625
- )
1626
- response.raise_for_status()
1627
-
1628
- progress.update(task_id, completed=os.path.getsize(dataset_file))
1629
- progress.console.print(
1630
- "Upload complete!", style=Style(bold=True, color="green")
1631
- )
1632
- internal_dataset = dataset_upload.dataset
1633
- from anyscale.llm.dataset._private.models import Dataset
1634
-
1635
- dataset = Dataset.parse_from_internal_model(internal_dataset)
1636
- return dataset
1637
-
1638
- @handle_api_exceptions
1639
- def download_dataset(
1640
- self, name: str, version: Optional[int], project: Optional[str]
1641
- ) -> bytes:
1642
- project_id = self._source_project_id(project)
1643
- with FileDownloadProgress() as progress:
1644
- task_id = progress.add_task(
1645
- description=f"Getting download info for '{name}'",
1646
- )
1647
- download_url: str = self._internal_api_client.get_dataset_download_url_api_v2_datasets_download_get(
1648
- name, version=version, project_id=project_id,
1649
- )
1650
- progress.update(task_id, description=f"Downloading '{name}'")
1651
- response = requests.get(download_url, stream=True)
1652
- total_size = int(response.headers.get("content-length", 0))
1653
- progress.update(task_id, total=total_size)
1654
-
1655
- # For CLI, consider writing to disk instead of loading the entire file into memory.
1656
- dataset_bytes = b""
1657
- for data in response.iter_content(Bytes.MB):
1658
- dataset_bytes += data
1659
- progress.update(task_id, advance=len(data))
1660
-
1661
- progress.update(task_id, completed=total_size)
1662
- progress.console.print(
1663
- "Download complete!", style=Style(bold=True, color="green")
1664
- )
1665
-
1666
- return dataset_bytes
1667
-
1668
- @handle_api_exceptions
1669
- def list_datasets(
1670
- self,
1671
- limit: Optional[int] = None,
1672
- after: Optional[str] = None, # Unique ID to start listing after
1673
- name_contains: Optional[str] = None,
1674
- cloud: Optional[str] = None,
1675
- project: Optional[str] = None,
1676
- ):
1677
- project_id = self._source_project_id(project)
1678
- cloud_id = self.get_cloud_id(cloud_name=cloud) if cloud else None
1679
-
1680
- def get_next_page(
1681
- after_id: Optional[str],
1682
- ) -> InternalListResponse[InternalDataset]:
1683
- internal_datasets: InternalListResponse = self._internal_api_client.list_datasets_api_v2_datasets_get(
1684
- project_id=project_id,
1685
- cloud_id=cloud_id,
1686
- name_contains=name_contains,
1687
- after=after_id,
1688
- )
1689
- return internal_datasets
1690
-
1691
- from anyscale.llm.dataset._private.models import Dataset
1692
-
1693
- list_response = ListResponse(
1694
- after=after, limit=limit, get_next_page=get_next_page, cls=Dataset,
1695
- )
1696
- return list_response
1697
-
1698
- def _source_project_id(self, project_name: Optional[str]) -> Optional[str]:
1699
- """Sources a optional project ID from an optionally-provided project name."""
1700
- if project_name:
1701
- project_id = self._get_project_id_by_name(name=project_name)
1702
- else:
1703
- project_id = None
1704
- return project_id
1705
-
1706
- @handle_api_exceptions
1707
- def get_finetuned_model(
1708
- self, model_id: Optional[str], job_id: Optional[str]
1709
- ) -> FineTunedModel:
1710
- if model_id:
1711
- return self._internal_api_client.get_model_api_v2_llm_models_model_id_get(
1712
- model_id
1713
- ).result
1714
- elif job_id:
1715
- return self._internal_api_client.get_model_by_job_id_api_v2_llm_models_get_by_job_id_job_id_get(
1716
- job_id
1717
- ).result
1718
- else:
1719
- raise ValueError("Atleast one of `model_id` or `job_id` must be provided")
1720
-
1721
1562
  @handle_api_exceptions
1722
1563
  def create_workspace(self, model: CreateExperimentalWorkspace) -> str:
1723
1564
  return self._internal_api_client.create_workspace_api_v2_experimental_workspaces_post(
@@ -1898,48 +1739,6 @@ class AnyscaleClient(AnyscaleClientInterface):
1898
1739
  else:
1899
1740
  return project.name
1900
1741
 
1901
- @handle_api_exceptions
1902
- def delete_finetuned_model(self, model_id: str) -> DeletedPlatformFineTunedModel:
1903
- deleted_model = self._internal_api_client.delete_model_api_v2_llm_models_model_id_delete(
1904
- model_id
1905
- ).result
1906
- return deleted_model
1907
-
1908
- @handle_api_exceptions
1909
- def list_finetuned_models(
1910
- self, cloud_id: Optional[str], project_id: Optional[str], max_items: int,
1911
- ) -> List[FineTunedModel]:
1912
- if self.inside_workspace():
1913
- # Resolve `cloud_id` and `project_id`. If not provided and if this is being run in a workspace,
1914
- # we use the `cloud_id` and `project_id` of the workspace
1915
- cloud_id, project_id = source_cloud_id_and_project_id(
1916
- internal_api=self._internal_api_client,
1917
- external_api=self._external_api_client,
1918
- cloud_id=cloud_id,
1919
- project_id=project_id,
1920
- )
1921
-
1922
- paging_token = None
1923
- results = []
1924
- while True:
1925
- count = min(self.LIST_ENDPOINT_COUNT, max_items)
1926
- resp: FinetunedmodelListResponse = self._internal_api_client.list_models_api_v2_llm_models_get(
1927
- cloud_id=cloud_id,
1928
- project_id=project_id,
1929
- paging_token=paging_token,
1930
- count=count,
1931
- )
1932
- models = resp.results
1933
- results.extend(models)
1934
- if not len(models) or not resp.metadata.next_paging_token:
1935
- break
1936
-
1937
- if max_items and len(results) >= max_items:
1938
- break
1939
- paging_token = resp.metadata.next_paging_token
1940
-
1941
- return results[:max_items] if max_items else results
1942
-
1943
1742
  @handle_api_exceptions
1944
1743
  def download_aggregated_instance_usage_csv(
1945
1744
  self,
@@ -20,8 +20,6 @@ from anyscale.client.openapi_client.models import (
20
20
  DecoratedjobqueueListResponse,
21
21
  DecoratedlistserviceapimodelListResponse,
22
22
  DecoratedProductionServiceV2APIModel,
23
- DeletedPlatformFineTunedModel,
24
- FineTunedModel,
25
23
  InternalProductionJob,
26
24
  JobQueueSortDirective,
27
25
  OrganizationCollaborator,
@@ -575,41 +573,6 @@ class AnyscaleClientInterface(ABC):
575
573
  """
576
574
  raise NotImplementedError
577
575
 
578
- @abstractmethod
579
- def get_dataset(self, name: str, version: Optional[int], project: Optional[str]):
580
- """See docstring for `anyscale.llm.dataset.get()`."""
581
- raise NotImplementedError
582
-
583
- @abstractmethod
584
- def upload_dataset(
585
- self,
586
- dataset_file: str,
587
- name: Optional[str],
588
- description: Optional[str],
589
- cloud: Optional[str],
590
- project: Optional[str],
591
- ):
592
- """See docstring for `anyscale.llm.dataset.upload()`."""
593
- raise NotImplementedError
594
-
595
- @abstractmethod
596
- def download_dataset(
597
- self, name: str, version: Optional[int], project: Optional[str]
598
- ) -> bytes:
599
- """See docstring for `anyscale.llm.dataset.download()`."""
600
-
601
- @abstractmethod
602
- def list_datasets(
603
- self,
604
- limit: Optional[int] = None,
605
- after: Optional[str] = None, # Unique ID to start listing after
606
- name_contains: Optional[str] = None,
607
- cloud: Optional[str] = None,
608
- project: Optional[str] = None,
609
- ):
610
- """See docstring for `anyscale.llm.dataset.list()`."""
611
- raise NotImplementedError
612
-
613
576
  @abstractmethod
614
577
  def create_workspace(self, model: CreateExperimentalWorkspace) -> str:
615
578
  """Creates a workspace
@@ -657,13 +620,6 @@ class AnyscaleClientInterface(ABC):
657
620
  """Updates the dynamic dependencies of a workspace"""
658
621
  raise NotImplementedError
659
622
 
660
- @abstractmethod
661
- def get_finetuned_model(
662
- self, model_id: Optional[str], job_id: Optional[str]
663
- ) -> FineTunedModel: # noqa: A002
664
- """Returns LLM model information for the given model ID"""
665
- raise NotImplementedError
666
-
667
623
  @abstractmethod
668
624
  def get_workspace_cluster(
669
625
  self, workspace_id: Optional[str]
@@ -693,17 +649,6 @@ class AnyscaleClientInterface(ABC):
693
649
  """Get the default directory name for a workspace."""
694
650
  raise NotImplementedError
695
651
 
696
- @abstractmethod
697
- def delete_finetuned_model(self, model_id: str) -> DeletedPlatformFineTunedModel:
698
- """Deletes a finetuned model from the model registry given the model ID"""
699
- raise NotImplementedError
700
-
701
- @abstractmethod
702
- def list_finetuned_models(
703
- self, cloud_id: Optional[str], project_id: Optional[str], max_items: int,
704
- ) -> List[FineTunedModel]:
705
- raise NotImplementedError
706
-
707
652
  @abstractmethod
708
653
  def update_workspace(
709
654
  self,
@@ -11,7 +11,6 @@ from anyscale._private.anyscale_client.common import (
11
11
  WORKSPACE_CLUSTER_NAME_PREFIX,
12
12
  )
13
13
  from anyscale._private.models.image_uri import ImageURI
14
- from anyscale._private.models.model_base import ListResponse
15
14
  from anyscale.cli_logger import BlockLogger
16
15
  from anyscale.client.openapi_client.models import (
17
16
  AdminCreatedUser,
@@ -58,7 +57,6 @@ from anyscale.client.openapi_client.models.decorated_schedule import DecoratedSc
58
57
  from anyscale.client.openapi_client.models.decorated_session import DecoratedSession
59
58
  from anyscale.client.openapi_client.models.session_ssh_key import SessionSshKey
60
59
  from anyscale.cluster_compute import parse_cluster_compute_name_version
61
- from anyscale.llm.dataset._private.models import Dataset
62
60
  from anyscale.sdk.anyscale_client.configuration import Configuration
63
61
  from anyscale.sdk.anyscale_client.models import (
64
62
  ApplyProductionServiceV2Model,
@@ -105,6 +103,7 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
105
103
  WORKSPACE_CLUSTER_ID = "fake-workspace-cluster-id"
106
104
  WORKSPACE_PROJECT_ID = "fake-workspace-project-id"
107
105
  WORKSPACE_CLUSTER_COMPUTE_ID = "fake-workspace-cluster-compute-id"
106
+ WORKSPACE_CLUSTER_COMPUTE_NAME = "fake-workspace-cluster-compute"
108
107
  WORKSPACE_CLUSTER_ENV_BUILD_ID = "fake-workspace-cluster-env-build-id"
109
108
 
110
109
  SCHEDULE_NEXT_TRIGGER_AT_TIME = datetime.utcnow()
@@ -210,10 +209,28 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
210
209
  ),
211
210
  local_vars_configuration=OPENAPI_NO_VALIDATION,
212
211
  )
212
+
213
+ workspace_compute_config = ClusterCompute(
214
+ id=self.WORKSPACE_CLUSTER_COMPUTE_ID,
215
+ name=self.WORKSPACE_CLUSTER_COMPUTE_NAME,
216
+ config=ClusterComputeConfig(
217
+ cloud_id=self.WORKSPACE_CLOUD_ID,
218
+ head_node_type=ComputeNodeType(
219
+ name="default-head-node",
220
+ instance_type="m5.2xlarge",
221
+ resources={"CPU": 8, "GPU": 1},
222
+ ),
223
+ local_vars_configuration=OPENAPI_NO_VALIDATION,
224
+ ),
225
+ local_vars_configuration=OPENAPI_NO_VALIDATION,
226
+ )
227
+
213
228
  self._default_compute_configs: Dict[str, ClusterCompute] = {
214
229
  self.DEFAULT_CLOUD_ID: compute_config,
230
+ self.WORKSPACE_CLOUD_ID: workspace_compute_config,
215
231
  }
216
232
  self.add_compute_config(compute_config)
233
+ self.add_compute_config(workspace_compute_config)
217
234
 
218
235
  def get_job_ui_url(self, job_id: str) -> str:
219
236
  return f"{self.BASE_UI_URL}/jobs/{job_id}"
@@ -993,50 +1010,6 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
993
1010
  def trigger_counts(self, id: str): # noqa: A002
994
1011
  return self._schedule_trigger_counts[id]
995
1012
 
996
- def get_dataset(
997
- self, name: str, version: Optional[int], project: Optional[str] # noqa: ARG002
998
- ) -> Dataset:
999
- raise NotImplementedError
1000
-
1001
- def upload_dataset(
1002
- self,
1003
- dataset_file: str, # noqa: ARG002
1004
- name: Optional[str], # noqa: ARG002
1005
- description: Optional[str], # noqa: ARG002
1006
- cloud: Optional[str], # noqa: ARG002
1007
- project: Optional[str], # noqa: ARG002
1008
- ) -> Dataset:
1009
- raise NotImplementedError
1010
-
1011
- def download_dataset(
1012
- self, name: str, version: Optional[int], project: Optional[str] # noqa: ARG002
1013
- ) -> bytes:
1014
- return b""
1015
-
1016
- def list_datasets(
1017
- self,
1018
- limit: Optional[int] = None, # noqa: ARG002
1019
- after: Optional[str] = None, # noqa: ARG002
1020
- name_contains: Optional[str] = None, # noqa: ARG002
1021
- cloud: Optional[str] = None, # noqa: ARG002
1022
- project: Optional[str] = None, # noqa: ARG002
1023
- ) -> ListResponse[Dataset]:
1024
- raise NotImplementedError
1025
-
1026
- def get_finetuned_model(
1027
- self, model_id: Optional[str], job_id: Optional[str] # noqa: ARG002
1028
- ) -> FineTunedModel:
1029
- return FineTunedModel(
1030
- id=model_id if model_id else "test-model-id",
1031
- model_id=model_id if model_id else "test-model-id",
1032
- base_model_id="my_base_model_id",
1033
- ft_type=FineTuneType.LORA,
1034
- creator_id="",
1035
- creator_email="",
1036
- created_at=datetime.utcnow(),
1037
- storage_uri="s3://fake_bucket/fake_folder/",
1038
- )
1039
-
1040
1013
  def create_workspace(self, model: CreateExperimentalWorkspace) -> str:
1041
1014
  workspace_id = uuid.uuid4()
1042
1015