anyscale 0.26.46__py3-none-any.whl → 0.26.48__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- anyscale/__init__.py +0 -7
- anyscale/_private/anyscale_client/anyscale_client.py +1 -208
- anyscale/_private/anyscale_client/common.py +0 -55
- anyscale/_private/anyscale_client/fake_anyscale_client.py +19 -46
- anyscale/_private/docgen/__main__.py +24 -45
- anyscale/_private/docgen/generator.py +32 -16
- anyscale/_private/docgen/generator_legacy.py +58 -6
- anyscale/_private/docgen/models.md +3 -2
- anyscale/_private/workload/workload_config.py +16 -8
- anyscale/_private/workload/workload_sdk.py +22 -5
- anyscale/client/README.md +4 -1
- anyscale/client/openapi_client/__init__.py +2 -1
- anyscale/client/openapi_client/api/default_api.py +253 -4
- anyscale/client/openapi_client/models/__init__.py +2 -1
- anyscale/client/openapi_client/models/{alert_type.py → alert_issue_type.py} +8 -20
- anyscale/client/openapi_client/models/baseimagesenum.py +1 -2
- anyscale/client/openapi_client/models/cloud.py +31 -3
- anyscale/client/openapi_client/models/cloud_deployment.py +30 -3
- anyscale/client/openapi_client/models/cloud_with_cloud_resource.py +29 -1
- anyscale/client/openapi_client/models/cloud_with_cloud_resource_gcp.py +29 -1
- anyscale/client/openapi_client/models/dataset_metrics.py +6 -6
- anyscale/client/openapi_client/models/dataset_state.py +2 -1
- anyscale/client/openapi_client/models/describe_system_workload_response.py +32 -6
- anyscale/client/openapi_client/models/experimental_workspace.py +29 -1
- anyscale/client/openapi_client/models/experimental_workspaces_sort_field.py +2 -1
- anyscale/client/openapi_client/models/operator_metrics.py +8 -9
- anyscale/client/openapi_client/models/operator_status.py +102 -0
- anyscale/client/openapi_client/models/organization_usage_alert.py +20 -20
- anyscale/client/openapi_client/models/supportedbaseimagesenum.py +1 -2
- anyscale/cloud/models.py +330 -0
- anyscale/commands/cloud_commands.py +132 -43
- anyscale/commands/command_examples.py +54 -134
- anyscale/commands/compute_config_commands.py +7 -11
- anyscale/compute_config/__init__.py +2 -16
- anyscale/compute_config/_private/compute_config_sdk.py +27 -17
- anyscale/compute_config/commands.py +14 -44
- anyscale/compute_config/models.py +49 -26
- anyscale/controllers/cloud_controller.py +289 -171
- anyscale/controllers/cloud_file_storage_utils.py +204 -0
- anyscale/controllers/kubernetes_verifier.py +1567 -0
- anyscale/job/_private/job_sdk.py +17 -8
- anyscale/job/models.py +1 -1
- anyscale/scripts.py +0 -2
- anyscale/sdk/anyscale_client/models/baseimagesenum.py +1 -2
- anyscale/sdk/anyscale_client/models/cloud.py +31 -3
- anyscale/sdk/anyscale_client/models/supportedbaseimagesenum.py +1 -2
- anyscale/shared_anyscale_utils/utils/id_gen.py +1 -0
- anyscale/version.py +1 -1
- anyscale/workspace/models.py +14 -7
- {anyscale-0.26.46.dist-info → anyscale-0.26.48.dist-info}/METADATA +1 -1
- {anyscale-0.26.46.dist-info → anyscale-0.26.48.dist-info}/RECORD +56 -70
- anyscale/commands/llm/dataset_commands.py +0 -269
- anyscale/commands/llm/group.py +0 -15
- anyscale/commands/llm/models_commands.py +0 -123
- anyscale/controllers/llm/__init__.py +0 -0
- anyscale/controllers/llm/models_controller.py +0 -144
- anyscale/llm/__init__.py +0 -2
- anyscale/llm/dataset/__init__.py +0 -2
- anyscale/llm/dataset/_private/__init__.py +0 -0
- anyscale/llm/dataset/_private/docs.py +0 -63
- anyscale/llm/dataset/_private/models.py +0 -71
- anyscale/llm/dataset/_private/sdk.py +0 -147
- anyscale/llm/model/__init__.py +0 -2
- anyscale/llm/model/_private/models_sdk.py +0 -62
- anyscale/llm/model/commands.py +0 -93
- anyscale/llm/model/models.py +0 -171
- anyscale/llm/model/sdk.py +0 -62
- anyscale/llm/sdk.py +0 -27
- {anyscale-0.26.46.dist-info → anyscale-0.26.48.dist-info}/WHEEL +0 -0
- {anyscale-0.26.46.dist-info → anyscale-0.26.48.dist-info}/entry_points.txt +0 -0
- {anyscale-0.26.46.dist-info → anyscale-0.26.48.dist-info}/licenses/LICENSE +0 -0
- {anyscale-0.26.46.dist-info → anyscale-0.26.48.dist-info}/licenses/NOTICE +0 -0
- {anyscale-0.26.46.dist-info → anyscale-0.26.48.dist-info}/top_level.txt +0 -0
anyscale/__init__.py
CHANGED
@@ -26,7 +26,6 @@ from anyscale import (
|
|
26
26
|
integrations,
|
27
27
|
job,
|
28
28
|
job_queue,
|
29
|
-
llm,
|
30
29
|
organization_invitation,
|
31
30
|
project,
|
32
31
|
resource_quota,
|
@@ -48,7 +47,6 @@ from anyscale.connect import ClientBuilder
|
|
48
47
|
from anyscale.image import ImageSDK
|
49
48
|
from anyscale.job import JobSDK
|
50
49
|
from anyscale.job_queue import JobQueueSDK
|
51
|
-
from anyscale.llm import LLMSDK
|
52
50
|
from anyscale.organization_invitation import OrganizationInvitationSDK
|
53
51
|
from anyscale.project import ProjectSDK
|
54
52
|
from anyscale.resource_quota import ResourceQuotaSDK
|
@@ -127,7 +125,6 @@ class Anyscale:
|
|
127
125
|
self._cloud_sdk = CloudSDK(client=self._anyscale_client)
|
128
126
|
self._schedule_sdk = ScheduleSDK(client=self._anyscale_client)
|
129
127
|
self._image_sdk = ImageSDK(client=self._anyscale_client)
|
130
|
-
self._llm_sdk = LLMSDK(client=self._anyscale_client)
|
131
128
|
self._organization_invitation_sdk = OrganizationInvitationSDK(
|
132
129
|
client=self._anyscale_client
|
133
130
|
)
|
@@ -217,10 +214,6 @@ class Anyscale:
|
|
217
214
|
def image(self) -> ImageSDK: # noqa: F811
|
218
215
|
return self._image_sdk
|
219
216
|
|
220
|
-
@property
|
221
|
-
def llm(self) -> LLMSDK: # noqa: F811
|
222
|
-
return self._llm_sdk
|
223
|
-
|
224
217
|
@property
|
225
218
|
def organization_invitation(self) -> OrganizationInvitationSDK: # noqa: F811
|
226
219
|
return self._organization_invitation_sdk
|
@@ -23,12 +23,7 @@ from anyscale._private.anyscale_client.common import (
|
|
23
23
|
RUNTIME_ENV_PACKAGE_FORMAT,
|
24
24
|
)
|
25
25
|
from anyscale._private.models.image_uri import ImageURI
|
26
|
-
from anyscale._private.
|
27
|
-
from anyscale._private.utils.progress_util import (
|
28
|
-
FileDownloadProgress,
|
29
|
-
ProgressFileReader,
|
30
|
-
)
|
31
|
-
from anyscale.api_utils.common_utils import source_cloud_id_and_project_id
|
26
|
+
from anyscale._private.utils.progress_util import FileDownloadProgress
|
32
27
|
from anyscale.api_utils.logs_util import _download_log_from_s3_url_sync
|
33
28
|
from anyscale.authenticate import AuthenticationBlock, get_auth_api_client
|
34
29
|
from anyscale.cli_logger import BlockLogger
|
@@ -53,23 +48,17 @@ from anyscale.client.openapi_client.models import (
|
|
53
48
|
ComputeTemplateQuery,
|
54
49
|
CreateCloudCollaborator,
|
55
50
|
CreateComputeTemplate,
|
56
|
-
CreateDataset,
|
57
51
|
CreateExperimentalWorkspace,
|
58
52
|
CreateInternalProductionJob,
|
59
53
|
CreateOrganizationInvitation,
|
60
54
|
CreateResourceQuota,
|
61
55
|
CreateUserProjectCollaborator,
|
62
|
-
Dataset as InternalDataset,
|
63
|
-
DatasetUpload,
|
64
56
|
DecoratedComputeTemplate,
|
65
57
|
DecoratedjobqueueListResponse,
|
66
58
|
DecoratedlistserviceapimodelListResponse,
|
67
59
|
DecoratedProductionServiceV2APIModel,
|
68
60
|
DecoratedSession,
|
69
|
-
DeletedPlatformFineTunedModel,
|
70
61
|
ExperimentalWorkspace,
|
71
|
-
FineTunedModel,
|
72
|
-
FinetunedmodelListResponse,
|
73
62
|
GetOrCreateBuildFromImageUriRequest,
|
74
63
|
InternalProductionJob,
|
75
64
|
JobQueueSortDirective,
|
@@ -1564,160 +1553,6 @@ class AnyscaleClient(AnyscaleClientInterface):
|
|
1564
1553
|
id
|
1565
1554
|
)
|
1566
1555
|
|
1567
|
-
@handle_api_exceptions
|
1568
|
-
def get_dataset(self, name: str, version: Optional[int], project: Optional[str]):
|
1569
|
-
project_id = self._source_project_id(project)
|
1570
|
-
internal_dataset = self._internal_api_client.find_dataset_api_v2_datasets_find_get(
|
1571
|
-
name=name, version=version, project_id=project_id
|
1572
|
-
).result
|
1573
|
-
from anyscale.llm.dataset._private.models import Dataset
|
1574
|
-
|
1575
|
-
dataset = Dataset.parse_from_internal_model(internal_dataset)
|
1576
|
-
return dataset
|
1577
|
-
|
1578
|
-
def upload_dataset(
|
1579
|
-
self,
|
1580
|
-
dataset_file: str,
|
1581
|
-
name: Optional[str],
|
1582
|
-
description: Optional[str],
|
1583
|
-
cloud: Optional[str],
|
1584
|
-
project: Optional[str],
|
1585
|
-
):
|
1586
|
-
# Resolve `~/.../file` to `/home/user/.../file`
|
1587
|
-
dataset_file = os.path.expanduser(dataset_file)
|
1588
|
-
|
1589
|
-
if not os.path.isfile(dataset_file):
|
1590
|
-
raise ValueError(f"Path '{dataset_file}' is not a valid file.")
|
1591
|
-
dataset_file_size = os.path.getsize(dataset_file)
|
1592
|
-
if dataset_file_size > 5 * Bytes.GB:
|
1593
|
-
raise ValueError(
|
1594
|
-
f"File '{dataset_file}' is too large to upload. The maximum size is 5 GB."
|
1595
|
-
)
|
1596
|
-
project_id = self._get_project_id_by_name(name=project) if project else None
|
1597
|
-
cloud_id = self.get_cloud_id(cloud_name=cloud) if cloud else None
|
1598
|
-
|
1599
|
-
with FileDownloadProgress() as progress:
|
1600
|
-
task_id = progress.add_task(
|
1601
|
-
description=f"Creating an upload request for '{dataset_file}'",
|
1602
|
-
total=dataset_file_size,
|
1603
|
-
)
|
1604
|
-
_, project_id = source_cloud_id_and_project_id(
|
1605
|
-
internal_api=self._internal_api_client,
|
1606
|
-
external_api=self._external_api_client,
|
1607
|
-
cloud_id=cloud_id,
|
1608
|
-
project_id=project_id,
|
1609
|
-
)
|
1610
|
-
dataset_upload: DatasetUpload = self._internal_api_client.create_dataset_upload_api_v2_datasets_upload_post(
|
1611
|
-
create_dataset=CreateDataset(
|
1612
|
-
filename=os.path.basename(dataset_file),
|
1613
|
-
description=description,
|
1614
|
-
name=name,
|
1615
|
-
project_id=project_id,
|
1616
|
-
)
|
1617
|
-
).result
|
1618
|
-
|
1619
|
-
progress.update(task_id, description=f"Uploading '{dataset_file}'")
|
1620
|
-
|
1621
|
-
with open(dataset_file, "rb") as file_reader:
|
1622
|
-
progress_reader = ProgressFileReader(file_reader, progress, task_id)
|
1623
|
-
response = requests.put(
|
1624
|
-
dataset_upload.upload_url, data=progress_reader,
|
1625
|
-
)
|
1626
|
-
response.raise_for_status()
|
1627
|
-
|
1628
|
-
progress.update(task_id, completed=os.path.getsize(dataset_file))
|
1629
|
-
progress.console.print(
|
1630
|
-
"Upload complete!", style=Style(bold=True, color="green")
|
1631
|
-
)
|
1632
|
-
internal_dataset = dataset_upload.dataset
|
1633
|
-
from anyscale.llm.dataset._private.models import Dataset
|
1634
|
-
|
1635
|
-
dataset = Dataset.parse_from_internal_model(internal_dataset)
|
1636
|
-
return dataset
|
1637
|
-
|
1638
|
-
@handle_api_exceptions
|
1639
|
-
def download_dataset(
|
1640
|
-
self, name: str, version: Optional[int], project: Optional[str]
|
1641
|
-
) -> bytes:
|
1642
|
-
project_id = self._source_project_id(project)
|
1643
|
-
with FileDownloadProgress() as progress:
|
1644
|
-
task_id = progress.add_task(
|
1645
|
-
description=f"Getting download info for '{name}'",
|
1646
|
-
)
|
1647
|
-
download_url: str = self._internal_api_client.get_dataset_download_url_api_v2_datasets_download_get(
|
1648
|
-
name, version=version, project_id=project_id,
|
1649
|
-
)
|
1650
|
-
progress.update(task_id, description=f"Downloading '{name}'")
|
1651
|
-
response = requests.get(download_url, stream=True)
|
1652
|
-
total_size = int(response.headers.get("content-length", 0))
|
1653
|
-
progress.update(task_id, total=total_size)
|
1654
|
-
|
1655
|
-
# For CLI, consider writing to disk instead of loading the entire file into memory.
|
1656
|
-
dataset_bytes = b""
|
1657
|
-
for data in response.iter_content(Bytes.MB):
|
1658
|
-
dataset_bytes += data
|
1659
|
-
progress.update(task_id, advance=len(data))
|
1660
|
-
|
1661
|
-
progress.update(task_id, completed=total_size)
|
1662
|
-
progress.console.print(
|
1663
|
-
"Download complete!", style=Style(bold=True, color="green")
|
1664
|
-
)
|
1665
|
-
|
1666
|
-
return dataset_bytes
|
1667
|
-
|
1668
|
-
@handle_api_exceptions
|
1669
|
-
def list_datasets(
|
1670
|
-
self,
|
1671
|
-
limit: Optional[int] = None,
|
1672
|
-
after: Optional[str] = None, # Unique ID to start listing after
|
1673
|
-
name_contains: Optional[str] = None,
|
1674
|
-
cloud: Optional[str] = None,
|
1675
|
-
project: Optional[str] = None,
|
1676
|
-
):
|
1677
|
-
project_id = self._source_project_id(project)
|
1678
|
-
cloud_id = self.get_cloud_id(cloud_name=cloud) if cloud else None
|
1679
|
-
|
1680
|
-
def get_next_page(
|
1681
|
-
after_id: Optional[str],
|
1682
|
-
) -> InternalListResponse[InternalDataset]:
|
1683
|
-
internal_datasets: InternalListResponse = self._internal_api_client.list_datasets_api_v2_datasets_get(
|
1684
|
-
project_id=project_id,
|
1685
|
-
cloud_id=cloud_id,
|
1686
|
-
name_contains=name_contains,
|
1687
|
-
after=after_id,
|
1688
|
-
)
|
1689
|
-
return internal_datasets
|
1690
|
-
|
1691
|
-
from anyscale.llm.dataset._private.models import Dataset
|
1692
|
-
|
1693
|
-
list_response = ListResponse(
|
1694
|
-
after=after, limit=limit, get_next_page=get_next_page, cls=Dataset,
|
1695
|
-
)
|
1696
|
-
return list_response
|
1697
|
-
|
1698
|
-
def _source_project_id(self, project_name: Optional[str]) -> Optional[str]:
|
1699
|
-
"""Sources a optional project ID from an optionally-provided project name."""
|
1700
|
-
if project_name:
|
1701
|
-
project_id = self._get_project_id_by_name(name=project_name)
|
1702
|
-
else:
|
1703
|
-
project_id = None
|
1704
|
-
return project_id
|
1705
|
-
|
1706
|
-
@handle_api_exceptions
|
1707
|
-
def get_finetuned_model(
|
1708
|
-
self, model_id: Optional[str], job_id: Optional[str]
|
1709
|
-
) -> FineTunedModel:
|
1710
|
-
if model_id:
|
1711
|
-
return self._internal_api_client.get_model_api_v2_llm_models_model_id_get(
|
1712
|
-
model_id
|
1713
|
-
).result
|
1714
|
-
elif job_id:
|
1715
|
-
return self._internal_api_client.get_model_by_job_id_api_v2_llm_models_get_by_job_id_job_id_get(
|
1716
|
-
job_id
|
1717
|
-
).result
|
1718
|
-
else:
|
1719
|
-
raise ValueError("Atleast one of `model_id` or `job_id` must be provided")
|
1720
|
-
|
1721
1556
|
@handle_api_exceptions
|
1722
1557
|
def create_workspace(self, model: CreateExperimentalWorkspace) -> str:
|
1723
1558
|
return self._internal_api_client.create_workspace_api_v2_experimental_workspaces_post(
|
@@ -1898,48 +1733,6 @@ class AnyscaleClient(AnyscaleClientInterface):
|
|
1898
1733
|
else:
|
1899
1734
|
return project.name
|
1900
1735
|
|
1901
|
-
@handle_api_exceptions
|
1902
|
-
def delete_finetuned_model(self, model_id: str) -> DeletedPlatformFineTunedModel:
|
1903
|
-
deleted_model = self._internal_api_client.delete_model_api_v2_llm_models_model_id_delete(
|
1904
|
-
model_id
|
1905
|
-
).result
|
1906
|
-
return deleted_model
|
1907
|
-
|
1908
|
-
@handle_api_exceptions
|
1909
|
-
def list_finetuned_models(
|
1910
|
-
self, cloud_id: Optional[str], project_id: Optional[str], max_items: int,
|
1911
|
-
) -> List[FineTunedModel]:
|
1912
|
-
if self.inside_workspace():
|
1913
|
-
# Resolve `cloud_id` and `project_id`. If not provided and if this is being run in a workspace,
|
1914
|
-
# we use the `cloud_id` and `project_id` of the workspace
|
1915
|
-
cloud_id, project_id = source_cloud_id_and_project_id(
|
1916
|
-
internal_api=self._internal_api_client,
|
1917
|
-
external_api=self._external_api_client,
|
1918
|
-
cloud_id=cloud_id,
|
1919
|
-
project_id=project_id,
|
1920
|
-
)
|
1921
|
-
|
1922
|
-
paging_token = None
|
1923
|
-
results = []
|
1924
|
-
while True:
|
1925
|
-
count = min(self.LIST_ENDPOINT_COUNT, max_items)
|
1926
|
-
resp: FinetunedmodelListResponse = self._internal_api_client.list_models_api_v2_llm_models_get(
|
1927
|
-
cloud_id=cloud_id,
|
1928
|
-
project_id=project_id,
|
1929
|
-
paging_token=paging_token,
|
1930
|
-
count=count,
|
1931
|
-
)
|
1932
|
-
models = resp.results
|
1933
|
-
results.extend(models)
|
1934
|
-
if not len(models) or not resp.metadata.next_paging_token:
|
1935
|
-
break
|
1936
|
-
|
1937
|
-
if max_items and len(results) >= max_items:
|
1938
|
-
break
|
1939
|
-
paging_token = resp.metadata.next_paging_token
|
1940
|
-
|
1941
|
-
return results[:max_items] if max_items else results
|
1942
|
-
|
1943
1736
|
@handle_api_exceptions
|
1944
1737
|
def download_aggregated_instance_usage_csv(
|
1945
1738
|
self,
|
@@ -20,8 +20,6 @@ from anyscale.client.openapi_client.models import (
|
|
20
20
|
DecoratedjobqueueListResponse,
|
21
21
|
DecoratedlistserviceapimodelListResponse,
|
22
22
|
DecoratedProductionServiceV2APIModel,
|
23
|
-
DeletedPlatformFineTunedModel,
|
24
|
-
FineTunedModel,
|
25
23
|
InternalProductionJob,
|
26
24
|
JobQueueSortDirective,
|
27
25
|
OrganizationCollaborator,
|
@@ -575,41 +573,6 @@ class AnyscaleClientInterface(ABC):
|
|
575
573
|
"""
|
576
574
|
raise NotImplementedError
|
577
575
|
|
578
|
-
@abstractmethod
|
579
|
-
def get_dataset(self, name: str, version: Optional[int], project: Optional[str]):
|
580
|
-
"""See docstring for `anyscale.llm.dataset.get()`."""
|
581
|
-
raise NotImplementedError
|
582
|
-
|
583
|
-
@abstractmethod
|
584
|
-
def upload_dataset(
|
585
|
-
self,
|
586
|
-
dataset_file: str,
|
587
|
-
name: Optional[str],
|
588
|
-
description: Optional[str],
|
589
|
-
cloud: Optional[str],
|
590
|
-
project: Optional[str],
|
591
|
-
):
|
592
|
-
"""See docstring for `anyscale.llm.dataset.upload()`."""
|
593
|
-
raise NotImplementedError
|
594
|
-
|
595
|
-
@abstractmethod
|
596
|
-
def download_dataset(
|
597
|
-
self, name: str, version: Optional[int], project: Optional[str]
|
598
|
-
) -> bytes:
|
599
|
-
"""See docstring for `anyscale.llm.dataset.download()`."""
|
600
|
-
|
601
|
-
@abstractmethod
|
602
|
-
def list_datasets(
|
603
|
-
self,
|
604
|
-
limit: Optional[int] = None,
|
605
|
-
after: Optional[str] = None, # Unique ID to start listing after
|
606
|
-
name_contains: Optional[str] = None,
|
607
|
-
cloud: Optional[str] = None,
|
608
|
-
project: Optional[str] = None,
|
609
|
-
):
|
610
|
-
"""See docstring for `anyscale.llm.dataset.list()`."""
|
611
|
-
raise NotImplementedError
|
612
|
-
|
613
576
|
@abstractmethod
|
614
577
|
def create_workspace(self, model: CreateExperimentalWorkspace) -> str:
|
615
578
|
"""Creates a workspace
|
@@ -657,13 +620,6 @@ class AnyscaleClientInterface(ABC):
|
|
657
620
|
"""Updates the dynamic dependencies of a workspace"""
|
658
621
|
raise NotImplementedError
|
659
622
|
|
660
|
-
@abstractmethod
|
661
|
-
def get_finetuned_model(
|
662
|
-
self, model_id: Optional[str], job_id: Optional[str]
|
663
|
-
) -> FineTunedModel: # noqa: A002
|
664
|
-
"""Returns LLM model information for the given model ID"""
|
665
|
-
raise NotImplementedError
|
666
|
-
|
667
623
|
@abstractmethod
|
668
624
|
def get_workspace_cluster(
|
669
625
|
self, workspace_id: Optional[str]
|
@@ -693,17 +649,6 @@ class AnyscaleClientInterface(ABC):
|
|
693
649
|
"""Get the default directory name for a workspace."""
|
694
650
|
raise NotImplementedError
|
695
651
|
|
696
|
-
@abstractmethod
|
697
|
-
def delete_finetuned_model(self, model_id: str) -> DeletedPlatformFineTunedModel:
|
698
|
-
"""Deletes a finetuned model from the model registry given the model ID"""
|
699
|
-
raise NotImplementedError
|
700
|
-
|
701
|
-
@abstractmethod
|
702
|
-
def list_finetuned_models(
|
703
|
-
self, cloud_id: Optional[str], project_id: Optional[str], max_items: int,
|
704
|
-
) -> List[FineTunedModel]:
|
705
|
-
raise NotImplementedError
|
706
|
-
|
707
652
|
@abstractmethod
|
708
653
|
def update_workspace(
|
709
654
|
self,
|
@@ -11,7 +11,6 @@ from anyscale._private.anyscale_client.common import (
|
|
11
11
|
WORKSPACE_CLUSTER_NAME_PREFIX,
|
12
12
|
)
|
13
13
|
from anyscale._private.models.image_uri import ImageURI
|
14
|
-
from anyscale._private.models.model_base import ListResponse
|
15
14
|
from anyscale.cli_logger import BlockLogger
|
16
15
|
from anyscale.client.openapi_client.models import (
|
17
16
|
AdminCreatedUser,
|
@@ -58,7 +57,6 @@ from anyscale.client.openapi_client.models.decorated_schedule import DecoratedSc
|
|
58
57
|
from anyscale.client.openapi_client.models.decorated_session import DecoratedSession
|
59
58
|
from anyscale.client.openapi_client.models.session_ssh_key import SessionSshKey
|
60
59
|
from anyscale.cluster_compute import parse_cluster_compute_name_version
|
61
|
-
from anyscale.llm.dataset._private.models import Dataset
|
62
60
|
from anyscale.sdk.anyscale_client.configuration import Configuration
|
63
61
|
from anyscale.sdk.anyscale_client.models import (
|
64
62
|
ApplyProductionServiceV2Model,
|
@@ -105,6 +103,7 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
|
|
105
103
|
WORKSPACE_CLUSTER_ID = "fake-workspace-cluster-id"
|
106
104
|
WORKSPACE_PROJECT_ID = "fake-workspace-project-id"
|
107
105
|
WORKSPACE_CLUSTER_COMPUTE_ID = "fake-workspace-cluster-compute-id"
|
106
|
+
WORKSPACE_CLUSTER_COMPUTE_NAME = "fake-workspace-cluster-compute"
|
108
107
|
WORKSPACE_CLUSTER_ENV_BUILD_ID = "fake-workspace-cluster-env-build-id"
|
109
108
|
|
110
109
|
SCHEDULE_NEXT_TRIGGER_AT_TIME = datetime.utcnow()
|
@@ -210,10 +209,28 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
|
|
210
209
|
),
|
211
210
|
local_vars_configuration=OPENAPI_NO_VALIDATION,
|
212
211
|
)
|
212
|
+
|
213
|
+
workspace_compute_config = ClusterCompute(
|
214
|
+
id=self.WORKSPACE_CLUSTER_COMPUTE_ID,
|
215
|
+
name=self.WORKSPACE_CLUSTER_COMPUTE_NAME,
|
216
|
+
config=ClusterComputeConfig(
|
217
|
+
cloud_id=self.WORKSPACE_CLOUD_ID,
|
218
|
+
head_node_type=ComputeNodeType(
|
219
|
+
name="default-head-node",
|
220
|
+
instance_type="m5.2xlarge",
|
221
|
+
resources={"CPU": 8, "GPU": 1},
|
222
|
+
),
|
223
|
+
local_vars_configuration=OPENAPI_NO_VALIDATION,
|
224
|
+
),
|
225
|
+
local_vars_configuration=OPENAPI_NO_VALIDATION,
|
226
|
+
)
|
227
|
+
|
213
228
|
self._default_compute_configs: Dict[str, ClusterCompute] = {
|
214
229
|
self.DEFAULT_CLOUD_ID: compute_config,
|
230
|
+
self.WORKSPACE_CLOUD_ID: workspace_compute_config,
|
215
231
|
}
|
216
232
|
self.add_compute_config(compute_config)
|
233
|
+
self.add_compute_config(workspace_compute_config)
|
217
234
|
|
218
235
|
def get_job_ui_url(self, job_id: str) -> str:
|
219
236
|
return f"{self.BASE_UI_URL}/jobs/{job_id}"
|
@@ -993,50 +1010,6 @@ class FakeAnyscaleClient(AnyscaleClientInterface):
|
|
993
1010
|
def trigger_counts(self, id: str): # noqa: A002
|
994
1011
|
return self._schedule_trigger_counts[id]
|
995
1012
|
|
996
|
-
def get_dataset(
|
997
|
-
self, name: str, version: Optional[int], project: Optional[str] # noqa: ARG002
|
998
|
-
) -> Dataset:
|
999
|
-
raise NotImplementedError
|
1000
|
-
|
1001
|
-
def upload_dataset(
|
1002
|
-
self,
|
1003
|
-
dataset_file: str, # noqa: ARG002
|
1004
|
-
name: Optional[str], # noqa: ARG002
|
1005
|
-
description: Optional[str], # noqa: ARG002
|
1006
|
-
cloud: Optional[str], # noqa: ARG002
|
1007
|
-
project: Optional[str], # noqa: ARG002
|
1008
|
-
) -> Dataset:
|
1009
|
-
raise NotImplementedError
|
1010
|
-
|
1011
|
-
def download_dataset(
|
1012
|
-
self, name: str, version: Optional[int], project: Optional[str] # noqa: ARG002
|
1013
|
-
) -> bytes:
|
1014
|
-
return b""
|
1015
|
-
|
1016
|
-
def list_datasets(
|
1017
|
-
self,
|
1018
|
-
limit: Optional[int] = None, # noqa: ARG002
|
1019
|
-
after: Optional[str] = None, # noqa: ARG002
|
1020
|
-
name_contains: Optional[str] = None, # noqa: ARG002
|
1021
|
-
cloud: Optional[str] = None, # noqa: ARG002
|
1022
|
-
project: Optional[str] = None, # noqa: ARG002
|
1023
|
-
) -> ListResponse[Dataset]:
|
1024
|
-
raise NotImplementedError
|
1025
|
-
|
1026
|
-
def get_finetuned_model(
|
1027
|
-
self, model_id: Optional[str], job_id: Optional[str] # noqa: ARG002
|
1028
|
-
) -> FineTunedModel:
|
1029
|
-
return FineTunedModel(
|
1030
|
-
id=model_id if model_id else "test-model-id",
|
1031
|
-
model_id=model_id if model_id else "test-model-id",
|
1032
|
-
base_model_id="my_base_model_id",
|
1033
|
-
ft_type=FineTuneType.LORA,
|
1034
|
-
creator_id="",
|
1035
|
-
creator_email="",
|
1036
|
-
created_at=datetime.utcnow(),
|
1037
|
-
storage_uri="s3://fake_bucket/fake_folder/",
|
1038
|
-
)
|
1039
|
-
|
1040
1013
|
def create_workspace(self, model: CreateExperimentalWorkspace) -> str:
|
1041
1014
|
workspace_id = uuid.uuid4()
|
1042
1015
|
|
@@ -11,11 +11,19 @@ from anyscale import scripts
|
|
11
11
|
from anyscale._private.docgen.generator import MarkdownGenerator, Module
|
12
12
|
from anyscale.aggregated_instance_usage.models import DownloadCSVFilters
|
13
13
|
from anyscale.cloud.models import (
|
14
|
+
AWSConfig,
|
14
15
|
Cloud,
|
16
|
+
CloudDeployment,
|
15
17
|
CloudPermissionLevel,
|
16
18
|
CloudProvider,
|
17
19
|
ComputeStack,
|
18
20
|
CreateCloudCollaborator,
|
21
|
+
FileStorage,
|
22
|
+
GCPConfig,
|
23
|
+
KubernetesConfig,
|
24
|
+
NetworkingMode,
|
25
|
+
NFSMountTarget,
|
26
|
+
ObjectStorage,
|
19
27
|
)
|
20
28
|
from anyscale.commands import (
|
21
29
|
aggregated_instance_usage_commands,
|
@@ -39,9 +47,8 @@ from anyscale.commands import (
|
|
39
47
|
workspace_commands,
|
40
48
|
workspace_commands_v2,
|
41
49
|
)
|
42
|
-
from anyscale.commands.llm import dataset_commands, models_commands
|
43
50
|
from anyscale.compute_config.models import (
|
44
|
-
CloudDeployment,
|
51
|
+
CloudDeployment as CloudDeploymentSelector,
|
45
52
|
ComputeConfig,
|
46
53
|
ComputeConfigVersion,
|
47
54
|
HeadNodeConfig,
|
@@ -61,12 +68,6 @@ from anyscale.job.models import (
|
|
61
68
|
JobState,
|
62
69
|
JobStatus,
|
63
70
|
)
|
64
|
-
from anyscale.llm.dataset._private.models import Dataset
|
65
|
-
from anyscale.llm.model.models import (
|
66
|
-
DeletedFineTunedModel,
|
67
|
-
FineTunedModel,
|
68
|
-
FineTuningType,
|
69
|
-
)
|
70
71
|
from anyscale.organization_invitation.models import OrganizationInvitation
|
71
72
|
from anyscale.project.models import CreateProjectCollaborator, ProjectPermissionLevel
|
72
73
|
from anyscale.resource_quota.models import CreateResourceQuota, Quota, ResourceQuota
|
@@ -315,10 +316,11 @@ ALL_MODULES = [
|
|
315
316
|
],
|
316
317
|
models=[
|
317
318
|
ComputeConfig,
|
319
|
+
MultiDeploymentComputeConfig,
|
318
320
|
HeadNodeConfig,
|
319
321
|
WorkerNodeGroupConfig,
|
320
322
|
MarketType,
|
321
|
-
|
323
|
+
CloudDeploymentSelector,
|
322
324
|
MultiDeploymentComputeConfig,
|
323
325
|
ComputeConfigVersion,
|
324
326
|
],
|
@@ -433,7 +435,10 @@ ALL_MODULES = [
|
|
433
435
|
cloud_commands.cloud_delete,
|
434
436
|
cloud_commands.cloud_verify,
|
435
437
|
cloud_commands.list_cloud,
|
436
|
-
cloud_commands.
|
438
|
+
cloud_commands.cloud_deployment_create,
|
439
|
+
cloud_commands.cloud_deployment_get,
|
440
|
+
cloud_commands.cloud_deployment_update,
|
441
|
+
cloud_commands.cloud_deployment_delete,
|
437
442
|
cloud_commands.cloud_config_update,
|
438
443
|
cloud_commands.cloud_set_default,
|
439
444
|
cloud_commands.add_collaborators,
|
@@ -449,11 +454,19 @@ ALL_MODULES = [
|
|
449
454
|
anyscale.cloud.terminate_system_cluster,
|
450
455
|
],
|
451
456
|
models=[
|
457
|
+
Cloud,
|
452
458
|
CloudPermissionLevel,
|
453
459
|
CreateCloudCollaborator,
|
454
|
-
|
460
|
+
CloudDeployment,
|
455
461
|
ComputeStack,
|
456
462
|
CloudProvider,
|
463
|
+
NetworkingMode,
|
464
|
+
ObjectStorage,
|
465
|
+
FileStorage,
|
466
|
+
NFSMountTarget,
|
467
|
+
AWSConfig,
|
468
|
+
GCPConfig,
|
469
|
+
KubernetesConfig,
|
457
470
|
],
|
458
471
|
cli_command_group_prefix={cloud_commands.cloud_config_update: "config"},
|
459
472
|
legacy_sdk_commands={
|
@@ -582,40 +595,6 @@ ALL_MODULES = [
|
|
582
595
|
"UpdateCluster",
|
583
596
|
],
|
584
597
|
),
|
585
|
-
Module(
|
586
|
-
title="LLM Model",
|
587
|
-
filename="llm_models.md",
|
588
|
-
cli_prefix="anyscale llm model",
|
589
|
-
cli_commands=[
|
590
|
-
models_commands.get_model,
|
591
|
-
models_commands.list_models,
|
592
|
-
models_commands.delete_model,
|
593
|
-
],
|
594
|
-
sdk_prefix="anyscale.llm.model",
|
595
|
-
sdk_commands=[
|
596
|
-
anyscale.llm.model.get,
|
597
|
-
anyscale.llm.model.list,
|
598
|
-
anyscale.llm.model.delete,
|
599
|
-
],
|
600
|
-
models=[FineTunedModel, FineTuningType, DeletedFineTunedModel],
|
601
|
-
),
|
602
|
-
Module(
|
603
|
-
title="LLM Dataset",
|
604
|
-
filename="llm_dataset.md",
|
605
|
-
cli_prefix="anyscale llm dataset",
|
606
|
-
cli_commands=[
|
607
|
-
dataset_commands.get_dataset,
|
608
|
-
dataset_commands.upload_dataset,
|
609
|
-
dataset_commands.download_dataset,
|
610
|
-
],
|
611
|
-
sdk_prefix="anyscale.llm.dataset",
|
612
|
-
sdk_commands=[
|
613
|
-
anyscale.llm.dataset.get,
|
614
|
-
anyscale.llm.dataset.upload,
|
615
|
-
anyscale.llm.dataset.download,
|
616
|
-
],
|
617
|
-
models=[Dataset],
|
618
|
-
),
|
619
598
|
Module(
|
620
599
|
title="Resource quotas",
|
621
600
|
filename="resource-quotas.md",
|
@@ -46,6 +46,20 @@ CUSTOMER_HOSTED_QUALIFIER = (
|
|
46
46
|
)
|
47
47
|
|
48
48
|
|
49
|
+
def _escape_mdx_content(text: str) -> str:
|
50
|
+
"""Escape content for MDX compatibility.
|
51
|
+
|
52
|
+
This function escapes angle brackets that could be interpreted as HTML tags
|
53
|
+
by MDX, converting them to escaped versions.
|
54
|
+
"""
|
55
|
+
import re
|
56
|
+
|
57
|
+
# Escape angle brackets that look like HTML tags but are meant as literal text
|
58
|
+
# This pattern matches <word> or <word-with-hyphens> but not actual markdown/HTML
|
59
|
+
text = re.sub(r"<([a-zA-Z][a-zA-Z0-9\-]*?)>", r"\\<\1\\>", text)
|
60
|
+
return text
|
61
|
+
|
62
|
+
|
49
63
|
@dataclass
|
50
64
|
class Module:
|
51
65
|
title: str
|
@@ -276,12 +290,13 @@ class MarkdownGenerator:
|
|
276
290
|
- __doc_py_example__ (required in sdks)
|
277
291
|
- __doc_cli_example__ (required for models and cli commands)
|
278
292
|
"""
|
293
|
+
skip_py_example: bool = getattr(t, "__skip_py_example__", False)
|
279
294
|
yaml_example: Optional[str] = getattr(t, "__doc_yaml_example__", None)
|
280
295
|
py_example: Optional[str] = getattr(t, "__doc_py_example__", None)
|
281
296
|
cli_example: Optional[str] = getattr(t, "__doc_cli_example__", None)
|
282
297
|
|
283
298
|
if isinstance(t, ModelBaseType):
|
284
|
-
if not py_example:
|
299
|
+
if not skip_py_example and not py_example:
|
285
300
|
raise ValueError(
|
286
301
|
f"Model '{t.__name__}' is missing a '__doc_py_example__'."
|
287
302
|
)
|
@@ -370,24 +385,25 @@ class MarkdownGenerator:
|
|
370
385
|
f"Model '{t.__name__}' is missing a docstring for field '{field.name}'"
|
371
386
|
)
|
372
387
|
|
373
|
-
md += f"- **`{field.name}` ({self._model_type_to_string(field.type)})**: {docstring}\n"
|
388
|
+
md += f"- **`{field.name}` ({self._model_type_to_string(field.type)})**: {_escape_mdx_content(docstring)}\n"
|
374
389
|
|
375
390
|
customer_hosted_only = field.metadata.get("customer_hosted_only", False)
|
376
391
|
if customer_hosted_only:
|
377
392
|
md += f" - {CUSTOMER_HOSTED_QUALIFIER}\n"
|
378
393
|
md += "\n\n"
|
379
394
|
|
380
|
-
|
381
|
-
|
382
|
-
|
383
|
-
|
384
|
-
|
385
|
-
|
386
|
-
|
387
|
-
|
388
|
-
|
389
|
-
|
390
|
-
|
395
|
+
if not getattr(t, "__skip_py_example__", False):
|
396
|
+
md += "#### Python Methods\n\n"
|
397
|
+
md += "```python\n"
|
398
|
+
if t.__name__.endswith("Config"):
|
399
|
+
# Only include constructor docs for config models.
|
400
|
+
md += f"def __init__(self, **fields) -> {t.__name__}\n"
|
401
|
+
md += ' """Construct a model with the provided field values set."""\n\n'
|
402
|
+
md += f"def options(self, **fields) -> {t.__name__}\n"
|
403
|
+
md += ' """Return a copy of the model with the provided field values overwritten."""\n\n'
|
404
|
+
md += "def to_dict(self) -> Dict[str, Any]\n"
|
405
|
+
md += ' """Return a dictionary representation of the model."""\n'
|
406
|
+
md += "```\n"
|
391
407
|
|
392
408
|
md += self._gen_example_tabs(t)
|
393
409
|
elif isinstance(t, ModelEnumType):
|
@@ -545,7 +561,7 @@ class MarkdownGenerator:
|
|
545
561
|
md += ":::warning[Limited support]\n"
|
546
562
|
md += "This command is not actively maintained. Use with caution.\n"
|
547
563
|
md += ":::\n"
|
548
|
-
md += legacy_sdk.docstring + "\n"
|
564
|
+
md += _escape_mdx_content(legacy_sdk.docstring) + "\n"
|
549
565
|
|
550
566
|
return md
|
551
567
|
|
@@ -555,7 +571,7 @@ class MarkdownGenerator:
|
|
555
571
|
The sections will be:
|
556
572
|
- All fields and their types
|
557
573
|
"""
|
558
|
-
md = f"### `{legacy_model.name}` <span class='label-h3 label-legacy'>Legacy</span
|
559
|
-
md += legacy_model.docstring + "\n"
|
574
|
+
md = f"### `{legacy_model.name}` <span class='label-h3 label-legacy'>Legacy</span> {{#{legacy_model.name.lower()}-legacy}}\n"
|
575
|
+
md += _escape_mdx_content(legacy_model.docstring) + "\n"
|
560
576
|
|
561
577
|
return md
|