dstack 0.19.18__py3-none-any.whl → 0.19.20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dstack might be problematic. Click here for more details.
- dstack/_internal/cli/services/configurators/fleet.py +99 -1
- dstack/_internal/cli/services/profile.py +1 -1
- dstack/_internal/core/backends/cloudrift/api_client.py +13 -1
- dstack/_internal/core/backends/oci/resources.py +5 -5
- dstack/_internal/core/compatibility/runs.py +12 -1
- dstack/_internal/core/compatibility/volumes.py +2 -0
- dstack/_internal/core/models/common.py +38 -2
- dstack/_internal/core/models/configurations.py +9 -1
- dstack/_internal/core/models/fleets.py +2 -1
- dstack/_internal/core/models/profiles.py +8 -5
- dstack/_internal/core/models/resources.py +15 -8
- dstack/_internal/core/models/runs.py +41 -138
- dstack/_internal/core/models/volumes.py +14 -0
- dstack/_internal/core/services/diff.py +30 -10
- dstack/_internal/core/services/ssh/attach.py +2 -0
- dstack/_internal/server/app.py +17 -9
- dstack/_internal/server/background/__init__.py +5 -3
- dstack/_internal/server/background/tasks/process_gateways.py +46 -28
- dstack/_internal/server/background/tasks/process_idle_volumes.py +139 -0
- dstack/_internal/server/background/tasks/process_submitted_jobs.py +2 -0
- dstack/_internal/server/migrations/versions/35e90e1b0d3e_add_rolling_deployment_fields.py +6 -6
- dstack/_internal/server/migrations/versions/d5863798bf41_add_volumemodel_last_job_processed_at.py +40 -0
- dstack/_internal/server/models.py +1 -0
- dstack/_internal/server/routers/backends.py +23 -16
- dstack/_internal/server/routers/files.py +7 -6
- dstack/_internal/server/routers/fleets.py +47 -36
- dstack/_internal/server/routers/gateways.py +27 -18
- dstack/_internal/server/routers/instances.py +18 -13
- dstack/_internal/server/routers/logs.py +7 -3
- dstack/_internal/server/routers/metrics.py +14 -8
- dstack/_internal/server/routers/projects.py +33 -22
- dstack/_internal/server/routers/repos.py +7 -6
- dstack/_internal/server/routers/runs.py +49 -28
- dstack/_internal/server/routers/secrets.py +20 -15
- dstack/_internal/server/routers/server.py +7 -4
- dstack/_internal/server/routers/users.py +22 -19
- dstack/_internal/server/routers/volumes.py +34 -25
- dstack/_internal/server/schemas/logs.py +3 -11
- dstack/_internal/server/schemas/runs.py +17 -5
- dstack/_internal/server/services/fleets.py +354 -72
- dstack/_internal/server/services/gateways/__init__.py +13 -4
- dstack/_internal/server/services/gateways/client.py +5 -3
- dstack/_internal/server/services/instances.py +8 -0
- dstack/_internal/server/services/jobs/__init__.py +45 -0
- dstack/_internal/server/services/jobs/configurators/base.py +7 -0
- dstack/_internal/server/services/locking.py +3 -1
- dstack/_internal/server/services/logging.py +4 -2
- dstack/_internal/server/services/logs/__init__.py +15 -2
- dstack/_internal/server/services/logs/aws.py +47 -7
- dstack/_internal/server/services/logs/filelog.py +148 -32
- dstack/_internal/server/services/logs/gcp.py +3 -5
- dstack/_internal/server/services/prometheus/custom_metrics.py +20 -0
- dstack/_internal/server/services/proxy/repo.py +4 -1
- dstack/_internal/server/services/runs.py +115 -32
- dstack/_internal/server/services/services/__init__.py +2 -1
- dstack/_internal/server/services/users.py +3 -1
- dstack/_internal/server/services/volumes.py +13 -0
- dstack/_internal/server/settings.py +7 -2
- dstack/_internal/server/statics/index.html +1 -1
- dstack/_internal/server/statics/{main-d1ac2e8c38ed5f08a114.js → main-39a767528976f8078166.js} +11 -30
- dstack/_internal/server/statics/{main-d1ac2e8c38ed5f08a114.js.map → main-39a767528976f8078166.js.map} +1 -1
- dstack/_internal/server/statics/{main-d58fc0460cb0eae7cb5c.css → main-8f9ee218d3eb45989682.css} +2 -2
- dstack/_internal/server/testing/common.py +41 -5
- dstack/_internal/server/utils/routers.py +31 -8
- dstack/_internal/utils/common.py +10 -21
- dstack/_internal/utils/json_utils.py +54 -0
- dstack/api/_public/runs.py +13 -2
- dstack/api/server/_runs.py +12 -2
- dstack/version.py +1 -1
- {dstack-0.19.18.dist-info → dstack-0.19.20.dist-info}/METADATA +7 -5
- {dstack-0.19.18.dist-info → dstack-0.19.20.dist-info}/RECORD +74 -71
- {dstack-0.19.18.dist-info → dstack-0.19.20.dist-info}/WHEEL +0 -0
- {dstack-0.19.18.dist-info → dstack-0.19.20.dist-info}/entry_points.txt +0 -0
- {dstack-0.19.18.dist-info → dstack-0.19.20.dist-info}/licenses/LICENSE.md +0 -0
|
@@ -31,6 +31,8 @@ from dstack._internal.core.models.fleets import (
|
|
|
31
31
|
FleetSpec,
|
|
32
32
|
FleetStatus,
|
|
33
33
|
InstanceGroupPlacement,
|
|
34
|
+
SSHHostParams,
|
|
35
|
+
SSHParams,
|
|
34
36
|
)
|
|
35
37
|
from dstack._internal.core.models.gateways import GatewayComputeConfiguration, GatewayStatus
|
|
36
38
|
from dstack._internal.core.models.instances import (
|
|
@@ -378,6 +380,7 @@ def get_job_provisioning_data(
|
|
|
378
380
|
hostname: str = "127.0.0.4",
|
|
379
381
|
internal_ip: Optional[str] = "127.0.0.4",
|
|
380
382
|
price: float = 10.5,
|
|
383
|
+
instance_type: Optional[InstanceType] = None,
|
|
381
384
|
) -> JobProvisioningData:
|
|
382
385
|
gpus = [
|
|
383
386
|
Gpu(
|
|
@@ -386,14 +389,16 @@ def get_job_provisioning_data(
|
|
|
386
389
|
vendor=gpuhunt.AcceleratorVendor.NVIDIA,
|
|
387
390
|
)
|
|
388
391
|
] * gpu_count
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
instance_type=InstanceType(
|
|
392
|
+
if instance_type is None:
|
|
393
|
+
instance_type = InstanceType(
|
|
392
394
|
name="instance",
|
|
393
395
|
resources=Resources(
|
|
394
396
|
cpus=cpu_count, memory_mib=int(memory_gib * 1024), spot=spot, gpus=gpus
|
|
395
397
|
),
|
|
396
|
-
)
|
|
398
|
+
)
|
|
399
|
+
return JobProvisioningData(
|
|
400
|
+
backend=backend,
|
|
401
|
+
instance_type=instance_type,
|
|
397
402
|
instance_id="instance_id",
|
|
398
403
|
hostname=hostname,
|
|
399
404
|
internal_ip=internal_ip,
|
|
@@ -549,6 +554,31 @@ def get_fleet_configuration(
|
|
|
549
554
|
)
|
|
550
555
|
|
|
551
556
|
|
|
557
|
+
def get_ssh_fleet_configuration(
|
|
558
|
+
name: str = "test-fleet",
|
|
559
|
+
user: str = "ubuntu",
|
|
560
|
+
ssh_key: Optional[SSHKey] = None,
|
|
561
|
+
hosts: Optional[list[Union[SSHHostParams, str]]] = None,
|
|
562
|
+
network: Optional[str] = None,
|
|
563
|
+
placement: Optional[InstanceGroupPlacement] = None,
|
|
564
|
+
) -> FleetConfiguration:
|
|
565
|
+
if ssh_key is None:
|
|
566
|
+
ssh_key = SSHKey(public="", private=get_private_key_string())
|
|
567
|
+
if hosts is None:
|
|
568
|
+
hosts = ["10.0.0.100"]
|
|
569
|
+
ssh_config = SSHParams(
|
|
570
|
+
user=user,
|
|
571
|
+
ssh_key=ssh_key,
|
|
572
|
+
hosts=hosts,
|
|
573
|
+
network=network,
|
|
574
|
+
)
|
|
575
|
+
return FleetConfiguration(
|
|
576
|
+
name=name,
|
|
577
|
+
ssh_config=ssh_config,
|
|
578
|
+
placement=placement,
|
|
579
|
+
)
|
|
580
|
+
|
|
581
|
+
|
|
552
582
|
async def create_instance(
|
|
553
583
|
session: AsyncSession,
|
|
554
584
|
project: ProjectModel,
|
|
@@ -590,7 +620,9 @@ async def create_instance(
|
|
|
590
620
|
internal_ip=None,
|
|
591
621
|
)
|
|
592
622
|
if offer == "auto":
|
|
593
|
-
offer = get_instance_offer_with_availability(
|
|
623
|
+
offer = get_instance_offer_with_availability(
|
|
624
|
+
backend=backend, region=region, spot=spot, price=price
|
|
625
|
+
)
|
|
594
626
|
if profile is None:
|
|
595
627
|
profile = Profile(name="test_name")
|
|
596
628
|
|
|
@@ -742,6 +774,7 @@ async def create_volume(
|
|
|
742
774
|
status: VolumeStatus = VolumeStatus.SUBMITTED,
|
|
743
775
|
created_at: datetime = datetime(2023, 1, 2, 3, 4, tzinfo=timezone.utc),
|
|
744
776
|
last_processed_at: Optional[datetime] = None,
|
|
777
|
+
last_job_processed_at: Optional[datetime] = None,
|
|
745
778
|
configuration: Optional[VolumeConfiguration] = None,
|
|
746
779
|
volume_provisioning_data: Optional[VolumeProvisioningData] = None,
|
|
747
780
|
deleted_at: Optional[datetime] = None,
|
|
@@ -759,6 +792,7 @@ async def create_volume(
|
|
|
759
792
|
status=status,
|
|
760
793
|
created_at=created_at,
|
|
761
794
|
last_processed_at=last_processed_at,
|
|
795
|
+
last_job_processed_at=last_job_processed_at,
|
|
762
796
|
configuration=configuration.json(),
|
|
763
797
|
volume_provisioning_data=volume_provisioning_data.json()
|
|
764
798
|
if volume_provisioning_data
|
|
@@ -820,6 +854,7 @@ def get_volume_configuration(
|
|
|
820
854
|
region: str = "eu-west-1",
|
|
821
855
|
size: Optional[Memory] = Memory(100),
|
|
822
856
|
volume_id: Optional[str] = None,
|
|
857
|
+
auto_cleanup_duration: Optional[Union[str, int]] = None,
|
|
823
858
|
) -> VolumeConfiguration:
|
|
824
859
|
return VolumeConfiguration(
|
|
825
860
|
name=name,
|
|
@@ -827,6 +862,7 @@ def get_volume_configuration(
|
|
|
827
862
|
region=region,
|
|
828
863
|
size=size,
|
|
829
864
|
volume_id=volume_id,
|
|
865
|
+
auto_cleanup_duration=auto_cleanup_duration,
|
|
830
866
|
)
|
|
831
867
|
|
|
832
868
|
|
|
@@ -1,11 +1,34 @@
|
|
|
1
|
-
from typing import Dict, List, Optional
|
|
1
|
+
from typing import Any, Dict, List, Optional
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
from fastapi
|
|
3
|
+
import orjson
|
|
4
|
+
from fastapi import HTTPException, Request, Response, status
|
|
5
5
|
from packaging import version
|
|
6
6
|
|
|
7
7
|
from dstack._internal.core.errors import ServerClientError, ServerClientErrorCode
|
|
8
8
|
from dstack._internal.core.models.common import CoreModel
|
|
9
|
+
from dstack._internal.utils.json_utils import get_orjson_default_options, orjson_default
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class CustomORJSONResponse(Response):
|
|
13
|
+
"""
|
|
14
|
+
Custom JSONResponse that uses orjson for serialization.
|
|
15
|
+
|
|
16
|
+
It's recommended to return this class from routers directly instead of
|
|
17
|
+
returning pydantic models to avoid the FastAPI's jsonable_encoder overhead.
|
|
18
|
+
See https://fastapi.tiangolo.com/advanced/custom-response/#use-orjsonresponse.
|
|
19
|
+
|
|
20
|
+
Beware that FastAPI skips model validation when responses are returned directly.
|
|
21
|
+
If serialization needs to be modified, override `dict()` instead of adding validators.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
media_type = "application/json"
|
|
25
|
+
|
|
26
|
+
def render(self, content: Any) -> bytes:
|
|
27
|
+
return orjson.dumps(
|
|
28
|
+
content,
|
|
29
|
+
option=get_orjson_default_options(),
|
|
30
|
+
default=orjson_default,
|
|
31
|
+
)
|
|
9
32
|
|
|
10
33
|
|
|
11
34
|
class BadRequestDetailsModel(CoreModel):
|
|
@@ -30,7 +53,7 @@ def get_base_api_additional_responses() -> Dict:
|
|
|
30
53
|
"""
|
|
31
54
|
Returns additional responses for the OpenAPI docs relevant to all API endpoints.
|
|
32
55
|
The endpoints may override responses to make them as specific as possible.
|
|
33
|
-
E.g. an
|
|
56
|
+
E.g. an endpoint may specify which error codes it may return in `code`.
|
|
34
57
|
"""
|
|
35
58
|
return {
|
|
36
59
|
400: get_bad_request_additional_response(),
|
|
@@ -102,7 +125,7 @@ def get_request_size(request: Request) -> int:
|
|
|
102
125
|
def check_client_server_compatibility(
|
|
103
126
|
client_version: Optional[str],
|
|
104
127
|
server_version: Optional[str],
|
|
105
|
-
) -> Optional[
|
|
128
|
+
) -> Optional[CustomORJSONResponse]:
|
|
106
129
|
"""
|
|
107
130
|
Returns `JSONResponse` with error if client/server versions are incompatible.
|
|
108
131
|
Returns `None` otherwise.
|
|
@@ -116,7 +139,7 @@ def check_client_server_compatibility(
|
|
|
116
139
|
try:
|
|
117
140
|
parsed_client_version = version.parse(client_version)
|
|
118
141
|
except version.InvalidVersion:
|
|
119
|
-
return
|
|
142
|
+
return CustomORJSONResponse(
|
|
120
143
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
121
144
|
content={
|
|
122
145
|
"detail": get_server_client_error_details(
|
|
@@ -138,11 +161,11 @@ def error_incompatible_versions(
|
|
|
138
161
|
client_version: Optional[str],
|
|
139
162
|
server_version: str,
|
|
140
163
|
ask_cli_update: bool,
|
|
141
|
-
) ->
|
|
164
|
+
) -> CustomORJSONResponse:
|
|
142
165
|
msg = f"The client/CLI version ({client_version}) is incompatible with the server version ({server_version})."
|
|
143
166
|
if ask_cli_update:
|
|
144
167
|
msg += f" Update the dstack CLI: `pip install dstack=={server_version}`."
|
|
145
|
-
return
|
|
168
|
+
return CustomORJSONResponse(
|
|
146
169
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
147
170
|
content={"detail": get_server_client_error_details(ServerClientError(msg=msg))},
|
|
148
171
|
)
|
dstack/_internal/utils/common.py
CHANGED
|
@@ -225,27 +225,6 @@ def remove_prefix(text: str, prefix: str) -> str:
|
|
|
225
225
|
T = TypeVar("T")
|
|
226
226
|
|
|
227
227
|
|
|
228
|
-
def split_chunks(iterable: Iterable[T], chunk_size: int) -> Iterable[List[T]]:
|
|
229
|
-
"""
|
|
230
|
-
Splits an iterable into chunks of at most `chunk_size` items.
|
|
231
|
-
|
|
232
|
-
>>> list(split_chunks([1, 2, 3, 4, 5], 2))
|
|
233
|
-
[[1, 2], [3, 4], [5]]
|
|
234
|
-
"""
|
|
235
|
-
|
|
236
|
-
if chunk_size < 1:
|
|
237
|
-
raise ValueError(f"chunk_size should be a positive integer, not {chunk_size}")
|
|
238
|
-
|
|
239
|
-
chunk = []
|
|
240
|
-
for item in iterable:
|
|
241
|
-
chunk.append(item)
|
|
242
|
-
if len(chunk) == chunk_size:
|
|
243
|
-
yield chunk
|
|
244
|
-
chunk = []
|
|
245
|
-
if chunk:
|
|
246
|
-
yield chunk
|
|
247
|
-
|
|
248
|
-
|
|
249
228
|
MEMORY_UNITS = {
|
|
250
229
|
"B": 1,
|
|
251
230
|
"K": 2**10,
|
|
@@ -283,7 +262,17 @@ def get_or_error(v: Optional[T]) -> T:
|
|
|
283
262
|
return v
|
|
284
263
|
|
|
285
264
|
|
|
265
|
+
# TODO: drop after dropping Python 3.11
|
|
286
266
|
def batched(seq: Iterable[T], n: int) -> Iterable[List[T]]:
|
|
267
|
+
"""
|
|
268
|
+
Roughly equivalent to itertools.batched from Python 3.12.
|
|
269
|
+
|
|
270
|
+
>>> list(batched([1, 2, 3, 4, 5], 2))
|
|
271
|
+
[[1, 2], [3, 4], [5]]
|
|
272
|
+
"""
|
|
273
|
+
|
|
274
|
+
if n < 1:
|
|
275
|
+
raise ValueError(f"n should be a positive integer, not {n}")
|
|
287
276
|
it = iter(seq)
|
|
288
277
|
return iter(lambda: list(itertools.islice(it, n)), [])
|
|
289
278
|
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
import orjson
|
|
4
|
+
from pydantic import BaseModel
|
|
5
|
+
|
|
6
|
+
FREEZEGUN = True
|
|
7
|
+
try:
|
|
8
|
+
from freezegun.api import FakeDatetime
|
|
9
|
+
except ImportError:
|
|
10
|
+
FREEZEGUN = False
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
ASYNCPG = True
|
|
14
|
+
try:
|
|
15
|
+
import asyncpg.pgproto.pgproto
|
|
16
|
+
except ImportError:
|
|
17
|
+
ASYNCPG = False
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def pydantic_orjson_dumps(v: Any, *, default: Any) -> str:
|
|
21
|
+
return orjson.dumps(
|
|
22
|
+
v,
|
|
23
|
+
option=get_orjson_default_options(),
|
|
24
|
+
default=orjson_default,
|
|
25
|
+
).decode()
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def pydantic_orjson_dumps_with_indent(v: Any, *, default: Any) -> str:
|
|
29
|
+
return orjson.dumps(
|
|
30
|
+
v,
|
|
31
|
+
option=get_orjson_default_options() | orjson.OPT_INDENT_2,
|
|
32
|
+
default=orjson_default,
|
|
33
|
+
).decode()
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def orjson_default(obj):
|
|
37
|
+
if isinstance(obj, float):
|
|
38
|
+
# orjson does not convert float subclasses be default
|
|
39
|
+
return float(obj)
|
|
40
|
+
if isinstance(obj, BaseModel):
|
|
41
|
+
# Allows calling orjson.dumps() on pydantic models
|
|
42
|
+
# (e.g. to return from the API)
|
|
43
|
+
return obj.dict()
|
|
44
|
+
if ASYNCPG:
|
|
45
|
+
if isinstance(obj, asyncpg.pgproto.pgproto.UUID):
|
|
46
|
+
return str(obj)
|
|
47
|
+
if FREEZEGUN:
|
|
48
|
+
if isinstance(obj, FakeDatetime):
|
|
49
|
+
return obj.isoformat()
|
|
50
|
+
raise TypeError
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def get_orjson_default_options() -> int:
|
|
54
|
+
return orjson.OPT_NON_STR_KEYS
|
dstack/api/_public/runs.py
CHANGED
|
@@ -18,7 +18,11 @@ import dstack.api as api
|
|
|
18
18
|
from dstack._internal.core.consts import DSTACK_RUNNER_HTTP_PORT, DSTACK_RUNNER_SSH_PORT
|
|
19
19
|
from dstack._internal.core.errors import ClientError, ConfigurationError, ResourceNotExistsError
|
|
20
20
|
from dstack._internal.core.models.backends.base import BackendType
|
|
21
|
-
from dstack._internal.core.models.configurations import
|
|
21
|
+
from dstack._internal.core.models.configurations import (
|
|
22
|
+
AnyRunConfiguration,
|
|
23
|
+
PortMapping,
|
|
24
|
+
ServiceConfiguration,
|
|
25
|
+
)
|
|
22
26
|
from dstack._internal.core.models.files import FileArchiveMapping, FilePathMapping
|
|
23
27
|
from dstack._internal.core.models.profiles import (
|
|
24
28
|
CreationPolicy,
|
|
@@ -38,6 +42,7 @@ from dstack._internal.core.models.runs import (
|
|
|
38
42
|
RunPlan,
|
|
39
43
|
RunSpec,
|
|
40
44
|
RunStatus,
|
|
45
|
+
get_service_port,
|
|
41
46
|
)
|
|
42
47
|
from dstack._internal.core.models.runs import Run as RunModel
|
|
43
48
|
from dstack._internal.core.services.logs import URLReplacer
|
|
@@ -163,7 +168,7 @@ class Run(ABC):
|
|
|
163
168
|
service_port = 443 if secure else 80
|
|
164
169
|
ports = {
|
|
165
170
|
**ports,
|
|
166
|
-
self.
|
|
171
|
+
get_or_error(get_or_error(self._ssh_attach).service_port): service_port,
|
|
167
172
|
}
|
|
168
173
|
path_prefix = url.path
|
|
169
174
|
replace_urls = URLReplacer(
|
|
@@ -338,6 +343,10 @@ class Run(ABC):
|
|
|
338
343
|
else:
|
|
339
344
|
container_user = "root"
|
|
340
345
|
|
|
346
|
+
service_port = None
|
|
347
|
+
if isinstance(self._run.run_spec.configuration, ServiceConfiguration):
|
|
348
|
+
service_port = get_service_port(job.job_spec, self._run.run_spec.configuration)
|
|
349
|
+
|
|
341
350
|
self._ssh_attach = SSHAttach(
|
|
342
351
|
hostname=provisioning_data.hostname,
|
|
343
352
|
ssh_port=provisioning_data.ssh_port,
|
|
@@ -349,6 +358,7 @@ class Run(ABC):
|
|
|
349
358
|
run_name=name,
|
|
350
359
|
dockerized=provisioning_data.dockerized,
|
|
351
360
|
ssh_proxy=provisioning_data.ssh_proxy,
|
|
361
|
+
service_port=service_port,
|
|
352
362
|
local_backend=provisioning_data.backend == BackendType.LOCAL,
|
|
353
363
|
bind_address=bind_address,
|
|
354
364
|
)
|
|
@@ -748,6 +758,7 @@ class RunCollection:
|
|
|
748
758
|
repo_id=None,
|
|
749
759
|
only_active=only_active,
|
|
750
760
|
limit=limit or 100,
|
|
761
|
+
# TODO: Pass job_submissions_limit=1 in 0.20
|
|
751
762
|
)
|
|
752
763
|
if only_active and len(runs) == 0:
|
|
753
764
|
runs = self._api_client.runs.list(
|
dstack/api/server/_runs.py
CHANGED
|
@@ -4,7 +4,11 @@ from uuid import UUID
|
|
|
4
4
|
|
|
5
5
|
from pydantic import parse_obj_as
|
|
6
6
|
|
|
7
|
-
from dstack._internal.core.compatibility.runs import
|
|
7
|
+
from dstack._internal.core.compatibility.runs import (
|
|
8
|
+
get_apply_plan_excludes,
|
|
9
|
+
get_get_plan_excludes,
|
|
10
|
+
get_list_runs_excludes,
|
|
11
|
+
)
|
|
8
12
|
from dstack._internal.core.models.runs import (
|
|
9
13
|
ApplyRunPlanInput,
|
|
10
14
|
Run,
|
|
@@ -33,18 +37,24 @@ class RunsAPIClient(APIClientGroup):
|
|
|
33
37
|
prev_run_id: Optional[UUID] = None,
|
|
34
38
|
limit: int = 100,
|
|
35
39
|
ascending: bool = False,
|
|
40
|
+
include_jobs: bool = True,
|
|
41
|
+
job_submissions_limit: Optional[int] = None,
|
|
36
42
|
) -> List[Run]:
|
|
37
43
|
body = ListRunsRequest(
|
|
38
44
|
project_name=project_name,
|
|
39
45
|
repo_id=repo_id,
|
|
40
46
|
username=username,
|
|
41
47
|
only_active=only_active,
|
|
48
|
+
include_jobs=include_jobs,
|
|
49
|
+
job_submissions_limit=job_submissions_limit,
|
|
42
50
|
prev_submitted_at=prev_submitted_at,
|
|
43
51
|
prev_run_id=prev_run_id,
|
|
44
52
|
limit=limit,
|
|
45
53
|
ascending=ascending,
|
|
46
54
|
)
|
|
47
|
-
resp = self._request(
|
|
55
|
+
resp = self._request(
|
|
56
|
+
"/api/runs/list", body=body.json(exclude=get_list_runs_excludes(body))
|
|
57
|
+
)
|
|
48
58
|
return parse_obj_as(List[Run.__response__], resp.json())
|
|
49
59
|
|
|
50
60
|
def get(self, project_name: str, run_name: str) -> Run:
|
dstack/version.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: dstack
|
|
3
|
-
Version: 0.19.
|
|
3
|
+
Version: 0.19.20
|
|
4
4
|
Summary: dstack is an open-source orchestration engine for running AI workloads on any cloud or on-premises.
|
|
5
5
|
Project-URL: Homepage, https://dstack.ai
|
|
6
6
|
Project-URL: Source, https://github.com/dstackai/dstack
|
|
@@ -24,6 +24,7 @@ Requires-Dist: gitpython
|
|
|
24
24
|
Requires-Dist: gpuhunt==0.1.6
|
|
25
25
|
Requires-Dist: ignore-python>=0.2.0
|
|
26
26
|
Requires-Dist: jsonschema
|
|
27
|
+
Requires-Dist: orjson
|
|
27
28
|
Requires-Dist: packaging
|
|
28
29
|
Requires-Dist: paramiko>=3.2.0
|
|
29
30
|
Requires-Dist: psutil
|
|
@@ -346,12 +347,13 @@ Description-Content-Type: text/markdown
|
|
|
346
347
|
`dstack` supports `NVIDIA`, `AMD`, `Google TPU`, `Intel Gaudi`, and `Tenstorrent` accelerators out of the box.
|
|
347
348
|
|
|
348
349
|
## Latest news ✨
|
|
349
|
-
|
|
350
|
+
- [2025/07] [dstack 0.19.17: Secrets, Files, Rolling deployment](https://github.com/dstackai/dstack/releases/tag/0.19.17)
|
|
351
|
+
- [2025/06] [dstack 0.19.16: Docker in Docker, CloudRift](https://github.com/dstackai/dstack/releases/tag/0.19.16)
|
|
352
|
+
- [2025/06] [dstack 0.19.13: InfiniBand support in default images](https://github.com/dstackai/dstack/releases/tag/0.19.13)
|
|
353
|
+
- [2025/06] [dstack 0.19.12: Simplified use of MPI](https://github.com/dstackai/dstack/releases/tag/0.19.12)
|
|
354
|
+
- [2025/05] [dstack 0.19.10: Priorities](https://github.com/dstackai/dstack/releases/tag/0.19.10)
|
|
350
355
|
- [2025/05] [dstack 0.19.8: Nebius clusters, GH200 on Lambda](https://github.com/dstackai/dstack/releases/tag/0.19.8)
|
|
351
356
|
- [2025/04] [dstack 0.19.6: Tenstorrent, Plugins](https://github.com/dstackai/dstack/releases/tag/0.19.6)
|
|
352
|
-
- [2025/04] [dstack 0.19.5: GCP A3 High clusters](https://github.com/dstackai/dstack/releases/tag/0.19.5)
|
|
353
|
-
- [2025/04] [dstack 0.19.3: GCP A3 Mega clusters](https://github.com/dstackai/dstack/releases/tag/0.19.3)
|
|
354
|
-
- [2025/03] [dstack 0.19.0: Prometheus](https://github.com/dstackai/dstack/releases/tag/0.19.0)
|
|
355
357
|
|
|
356
358
|
## How does it work?
|
|
357
359
|
|