dstack 0.19.17__py3-none-any.whl → 0.19.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dstack might be problematic. Click here for more details.

Files changed (86) hide show
  1. dstack/_internal/cli/services/configurators/fleet.py +111 -1
  2. dstack/_internal/cli/services/profile.py +1 -1
  3. dstack/_internal/core/backends/aws/compute.py +237 -18
  4. dstack/_internal/core/backends/base/compute.py +20 -2
  5. dstack/_internal/core/backends/cudo/compute.py +23 -9
  6. dstack/_internal/core/backends/gcp/compute.py +13 -7
  7. dstack/_internal/core/backends/lambdalabs/compute.py +2 -1
  8. dstack/_internal/core/compatibility/fleets.py +12 -11
  9. dstack/_internal/core/compatibility/gateways.py +9 -8
  10. dstack/_internal/core/compatibility/logs.py +4 -3
  11. dstack/_internal/core/compatibility/runs.py +29 -21
  12. dstack/_internal/core/compatibility/volumes.py +11 -8
  13. dstack/_internal/core/errors.py +4 -0
  14. dstack/_internal/core/models/common.py +45 -2
  15. dstack/_internal/core/models/configurations.py +9 -1
  16. dstack/_internal/core/models/fleets.py +2 -1
  17. dstack/_internal/core/models/profiles.py +8 -5
  18. dstack/_internal/core/models/resources.py +15 -8
  19. dstack/_internal/core/models/runs.py +41 -138
  20. dstack/_internal/core/models/volumes.py +14 -0
  21. dstack/_internal/core/services/diff.py +56 -3
  22. dstack/_internal/core/services/ssh/attach.py +2 -0
  23. dstack/_internal/server/app.py +37 -9
  24. dstack/_internal/server/background/__init__.py +66 -40
  25. dstack/_internal/server/background/tasks/process_fleets.py +19 -3
  26. dstack/_internal/server/background/tasks/process_gateways.py +47 -29
  27. dstack/_internal/server/background/tasks/process_idle_volumes.py +139 -0
  28. dstack/_internal/server/background/tasks/process_instances.py +13 -2
  29. dstack/_internal/server/background/tasks/process_placement_groups.py +4 -2
  30. dstack/_internal/server/background/tasks/process_running_jobs.py +14 -3
  31. dstack/_internal/server/background/tasks/process_runs.py +8 -4
  32. dstack/_internal/server/background/tasks/process_submitted_jobs.py +38 -7
  33. dstack/_internal/server/background/tasks/process_terminating_jobs.py +5 -3
  34. dstack/_internal/server/background/tasks/process_volumes.py +2 -2
  35. dstack/_internal/server/migrations/versions/35e90e1b0d3e_add_rolling_deployment_fields.py +6 -6
  36. dstack/_internal/server/migrations/versions/d5863798bf41_add_volumemodel_last_job_processed_at.py +40 -0
  37. dstack/_internal/server/models.py +1 -0
  38. dstack/_internal/server/routers/backends.py +23 -16
  39. dstack/_internal/server/routers/files.py +7 -6
  40. dstack/_internal/server/routers/fleets.py +47 -36
  41. dstack/_internal/server/routers/gateways.py +27 -18
  42. dstack/_internal/server/routers/instances.py +18 -13
  43. dstack/_internal/server/routers/logs.py +7 -3
  44. dstack/_internal/server/routers/metrics.py +14 -8
  45. dstack/_internal/server/routers/projects.py +33 -22
  46. dstack/_internal/server/routers/repos.py +7 -6
  47. dstack/_internal/server/routers/runs.py +49 -28
  48. dstack/_internal/server/routers/secrets.py +20 -15
  49. dstack/_internal/server/routers/server.py +7 -4
  50. dstack/_internal/server/routers/users.py +22 -19
  51. dstack/_internal/server/routers/volumes.py +34 -25
  52. dstack/_internal/server/schemas/logs.py +2 -2
  53. dstack/_internal/server/schemas/runs.py +17 -5
  54. dstack/_internal/server/services/fleets.py +358 -75
  55. dstack/_internal/server/services/gateways/__init__.py +17 -6
  56. dstack/_internal/server/services/gateways/client.py +5 -3
  57. dstack/_internal/server/services/instances.py +8 -0
  58. dstack/_internal/server/services/jobs/__init__.py +45 -0
  59. dstack/_internal/server/services/jobs/configurators/base.py +12 -1
  60. dstack/_internal/server/services/locking.py +104 -13
  61. dstack/_internal/server/services/logging.py +4 -2
  62. dstack/_internal/server/services/logs/__init__.py +15 -2
  63. dstack/_internal/server/services/logs/aws.py +2 -4
  64. dstack/_internal/server/services/logs/filelog.py +33 -27
  65. dstack/_internal/server/services/logs/gcp.py +3 -5
  66. dstack/_internal/server/services/proxy/repo.py +4 -1
  67. dstack/_internal/server/services/runs.py +139 -72
  68. dstack/_internal/server/services/services/__init__.py +2 -1
  69. dstack/_internal/server/services/users.py +3 -1
  70. dstack/_internal/server/services/volumes.py +15 -2
  71. dstack/_internal/server/settings.py +25 -6
  72. dstack/_internal/server/statics/index.html +1 -1
  73. dstack/_internal/server/statics/{main-d151637af20f70b2e796.js → main-64f8273740c4b52c18f5.js} +71 -67
  74. dstack/_internal/server/statics/{main-d151637af20f70b2e796.js.map → main-64f8273740c4b52c18f5.js.map} +1 -1
  75. dstack/_internal/server/statics/{main-d48635d8fe670d53961c.css → main-d58fc0460cb0eae7cb5c.css} +1 -1
  76. dstack/_internal/server/testing/common.py +48 -8
  77. dstack/_internal/server/utils/routers.py +31 -8
  78. dstack/_internal/utils/json_utils.py +54 -0
  79. dstack/api/_public/runs.py +13 -2
  80. dstack/api/server/_runs.py +12 -2
  81. dstack/version.py +1 -1
  82. {dstack-0.19.17.dist-info → dstack-0.19.19.dist-info}/METADATA +17 -14
  83. {dstack-0.19.17.dist-info → dstack-0.19.19.dist-info}/RECORD +86 -83
  84. {dstack-0.19.17.dist-info → dstack-0.19.19.dist-info}/WHEEL +0 -0
  85. {dstack-0.19.17.dist-info → dstack-0.19.19.dist-info}/entry_points.txt +0 -0
  86. {dstack-0.19.17.dist-info → dstack-0.19.19.dist-info}/licenses/LICENSE.md +0 -0
@@ -31,6 +31,8 @@ from dstack._internal.core.models.fleets import (
31
31
  FleetSpec,
32
32
  FleetStatus,
33
33
  InstanceGroupPlacement,
34
+ SSHHostParams,
35
+ SSHParams,
34
36
  )
35
37
  from dstack._internal.core.models.gateways import GatewayComputeConfiguration, GatewayStatus
36
38
  from dstack._internal.core.models.instances import (
@@ -378,6 +380,7 @@ def get_job_provisioning_data(
378
380
  hostname: str = "127.0.0.4",
379
381
  internal_ip: Optional[str] = "127.0.0.4",
380
382
  price: float = 10.5,
383
+ instance_type: Optional[InstanceType] = None,
381
384
  ) -> JobProvisioningData:
382
385
  gpus = [
383
386
  Gpu(
@@ -386,14 +389,16 @@ def get_job_provisioning_data(
386
389
  vendor=gpuhunt.AcceleratorVendor.NVIDIA,
387
390
  )
388
391
  ] * gpu_count
389
- return JobProvisioningData(
390
- backend=backend,
391
- instance_type=InstanceType(
392
+ if instance_type is None:
393
+ instance_type = InstanceType(
392
394
  name="instance",
393
395
  resources=Resources(
394
396
  cpus=cpu_count, memory_mib=int(memory_gib * 1024), spot=spot, gpus=gpus
395
397
  ),
396
- ),
398
+ )
399
+ return JobProvisioningData(
400
+ backend=backend,
401
+ instance_type=instance_type,
397
402
  instance_id="instance_id",
398
403
  hostname=hostname,
399
404
  internal_ip=internal_ip,
@@ -502,6 +507,7 @@ async def create_fleet(
502
507
  status: FleetStatus = FleetStatus.ACTIVE,
503
508
  deleted: bool = False,
504
509
  name: Optional[str] = None,
510
+ last_processed_at: datetime = datetime(2023, 1, 2, 3, 4, tzinfo=timezone.utc),
505
511
  ) -> FleetModel:
506
512
  if fleet_id is None:
507
513
  fleet_id = uuid.uuid4()
@@ -519,6 +525,7 @@ async def create_fleet(
519
525
  spec=spec.json(),
520
526
  instances=[],
521
527
  runs=[],
528
+ last_processed_at=last_processed_at,
522
529
  )
523
530
  session.add(fm)
524
531
  await session.commit()
@@ -547,6 +554,31 @@ def get_fleet_configuration(
547
554
  )
548
555
 
549
556
 
557
+ def get_ssh_fleet_configuration(
558
+ name: str = "test-fleet",
559
+ user: str = "ubuntu",
560
+ ssh_key: Optional[SSHKey] = None,
561
+ hosts: Optional[list[Union[SSHHostParams, str]]] = None,
562
+ network: Optional[str] = None,
563
+ placement: Optional[InstanceGroupPlacement] = None,
564
+ ) -> FleetConfiguration:
565
+ if ssh_key is None:
566
+ ssh_key = SSHKey(public="", private=get_private_key_string())
567
+ if hosts is None:
568
+ hosts = ["10.0.0.100"]
569
+ ssh_config = SSHParams(
570
+ user=user,
571
+ ssh_key=ssh_key,
572
+ hosts=hosts,
573
+ network=network,
574
+ )
575
+ return FleetConfiguration(
576
+ name=name,
577
+ ssh_config=ssh_config,
578
+ placement=placement,
579
+ )
580
+
581
+
550
582
  async def create_instance(
551
583
  session: AsyncSession,
552
584
  project: ProjectModel,
@@ -562,10 +594,10 @@ async def create_instance(
562
594
  instance_id: Optional[UUID] = None,
563
595
  job: Optional[JobModel] = None,
564
596
  instance_num: int = 0,
565
- backend: Optional[BackendType] = BackendType.DATACRUNCH,
597
+ backend: BackendType = BackendType.DATACRUNCH,
566
598
  termination_policy: Optional[TerminationPolicy] = None,
567
599
  termination_idle_time: int = DEFAULT_FLEET_TERMINATION_IDLE_TIME,
568
- region: Optional[str] = "eu-west",
600
+ region: str = "eu-west",
569
601
  remote_connection_info: Optional[RemoteConnectionInfo] = None,
570
602
  offer: Optional[Union[InstanceOfferWithAvailability, Literal["auto"]]] = "auto",
571
603
  job_provisioning_data: Optional[Union[JobProvisioningData, Literal["auto"]]] = "auto",
@@ -574,6 +606,7 @@ async def create_instance(
574
606
  name: str = "test_instance",
575
607
  volumes: Optional[List[VolumeModel]] = None,
576
608
  price: float = 1.0,
609
+ last_processed_at: datetime = datetime(2023, 1, 2, 3, 4, tzinfo=timezone.utc),
577
610
  ) -> InstanceModel:
578
611
  if instance_id is None:
579
612
  instance_id = uuid.uuid4()
@@ -587,7 +620,9 @@ async def create_instance(
587
620
  internal_ip=None,
588
621
  )
589
622
  if offer == "auto":
590
- offer = get_instance_offer_with_availability(backend=backend, region=region, spot=spot)
623
+ offer = get_instance_offer_with_availability(
624
+ backend=backend, region=region, spot=spot, price=price
625
+ )
591
626
  if profile is None:
592
627
  profile = Profile(name="test_name")
593
628
 
@@ -610,6 +645,7 @@ async def create_instance(
610
645
  fleet=fleet,
611
646
  project=project,
612
647
  status=status,
648
+ last_processed_at=last_processed_at,
613
649
  unreachable=unreachable,
614
650
  created_at=created_at,
615
651
  started_at=created_at,
@@ -738,6 +774,7 @@ async def create_volume(
738
774
  status: VolumeStatus = VolumeStatus.SUBMITTED,
739
775
  created_at: datetime = datetime(2023, 1, 2, 3, 4, tzinfo=timezone.utc),
740
776
  last_processed_at: Optional[datetime] = None,
777
+ last_job_processed_at: Optional[datetime] = None,
741
778
  configuration: Optional[VolumeConfiguration] = None,
742
779
  volume_provisioning_data: Optional[VolumeProvisioningData] = None,
743
780
  deleted_at: Optional[datetime] = None,
@@ -755,6 +792,7 @@ async def create_volume(
755
792
  status=status,
756
793
  created_at=created_at,
757
794
  last_processed_at=last_processed_at,
795
+ last_job_processed_at=last_job_processed_at,
758
796
  configuration=configuration.json(),
759
797
  volume_provisioning_data=volume_provisioning_data.json()
760
798
  if volume_provisioning_data
@@ -816,6 +854,7 @@ def get_volume_configuration(
816
854
  region: str = "eu-west-1",
817
855
  size: Optional[Memory] = Memory(100),
818
856
  volume_id: Optional[str] = None,
857
+ auto_cleanup_duration: Optional[Union[str, int]] = None,
819
858
  ) -> VolumeConfiguration:
820
859
  return VolumeConfiguration(
821
860
  name=name,
@@ -823,6 +862,7 @@ def get_volume_configuration(
823
862
  region=region,
824
863
  size=size,
825
864
  volume_id=volume_id,
865
+ auto_cleanup_duration=auto_cleanup_duration,
826
866
  )
827
867
 
828
868
 
@@ -1040,7 +1080,7 @@ class ComputeMockSpec(
1040
1080
  ComputeWithVolumeSupport,
1041
1081
  ):
1042
1082
  """
1043
- Can be used to create Compute mocks that pass all isinstance asserts.
1083
+ Can be used to create Compute mocks that pass all `isinstance()` asserts.
1044
1084
  """
1045
1085
 
1046
1086
  pass
@@ -1,11 +1,34 @@
1
- from typing import Dict, List, Optional
1
+ from typing import Any, Dict, List, Optional
2
2
 
3
- from fastapi import HTTPException, Request, status
4
- from fastapi.responses import JSONResponse
3
+ import orjson
4
+ from fastapi import HTTPException, Request, Response, status
5
5
  from packaging import version
6
6
 
7
7
  from dstack._internal.core.errors import ServerClientError, ServerClientErrorCode
8
8
  from dstack._internal.core.models.common import CoreModel
9
+ from dstack._internal.utils.json_utils import get_orjson_default_options, orjson_default
10
+
11
+
12
+ class CustomORJSONResponse(Response):
13
+ """
14
+ Custom JSONResponse that uses orjson for serialization.
15
+
16
+ It's recommended to return this class from routers directly instead of
17
+ returning pydantic models to avoid the FastAPI's jsonable_encoder overhead.
18
+ See https://fastapi.tiangolo.com/advanced/custom-response/#use-orjsonresponse.
19
+
20
+ Beware that FastAPI skips model validation when responses are returned directly.
21
+ If serialization needs to be modified, override `dict()` instead of adding validators.
22
+ """
23
+
24
+ media_type = "application/json"
25
+
26
+ def render(self, content: Any) -> bytes:
27
+ return orjson.dumps(
28
+ content,
29
+ option=get_orjson_default_options(),
30
+ default=orjson_default,
31
+ )
9
32
 
10
33
 
11
34
  class BadRequestDetailsModel(CoreModel):
@@ -30,7 +53,7 @@ def get_base_api_additional_responses() -> Dict:
30
53
  """
31
54
  Returns additional responses for the OpenAPI docs relevant to all API endpoints.
32
55
  The endpoints may override responses to make them as specific as possible.
33
- E.g. an enpoint may specify which error codes it may return in `code`.
56
+ E.g. an endpoint may specify which error codes it may return in `code`.
34
57
  """
35
58
  return {
36
59
  400: get_bad_request_additional_response(),
@@ -102,7 +125,7 @@ def get_request_size(request: Request) -> int:
102
125
  def check_client_server_compatibility(
103
126
  client_version: Optional[str],
104
127
  server_version: Optional[str],
105
- ) -> Optional[JSONResponse]:
128
+ ) -> Optional[CustomORJSONResponse]:
106
129
  """
107
130
  Returns `JSONResponse` with error if client/server versions are incompatible.
108
131
  Returns `None` otherwise.
@@ -116,7 +139,7 @@ def check_client_server_compatibility(
116
139
  try:
117
140
  parsed_client_version = version.parse(client_version)
118
141
  except version.InvalidVersion:
119
- return JSONResponse(
142
+ return CustomORJSONResponse(
120
143
  status_code=status.HTTP_400_BAD_REQUEST,
121
144
  content={
122
145
  "detail": get_server_client_error_details(
@@ -138,11 +161,11 @@ def error_incompatible_versions(
138
161
  client_version: Optional[str],
139
162
  server_version: str,
140
163
  ask_cli_update: bool,
141
- ) -> JSONResponse:
164
+ ) -> CustomORJSONResponse:
142
165
  msg = f"The client/CLI version ({client_version}) is incompatible with the server version ({server_version})."
143
166
  if ask_cli_update:
144
167
  msg += f" Update the dstack CLI: `pip install dstack=={server_version}`."
145
- return JSONResponse(
168
+ return CustomORJSONResponse(
146
169
  status_code=status.HTTP_400_BAD_REQUEST,
147
170
  content={"detail": get_server_client_error_details(ServerClientError(msg=msg))},
148
171
  )
@@ -0,0 +1,54 @@
1
+ from typing import Any
2
+
3
+ import orjson
4
+ from pydantic import BaseModel
5
+
6
+ FREEZEGUN = True
7
+ try:
8
+ from freezegun.api import FakeDatetime
9
+ except ImportError:
10
+ FREEZEGUN = False
11
+
12
+
13
+ ASYNCPG = True
14
+ try:
15
+ import asyncpg.pgproto.pgproto
16
+ except ImportError:
17
+ ASYNCPG = False
18
+
19
+
20
+ def pydantic_orjson_dumps(v: Any, *, default: Any) -> str:
21
+ return orjson.dumps(
22
+ v,
23
+ option=get_orjson_default_options(),
24
+ default=orjson_default,
25
+ ).decode()
26
+
27
+
28
+ def pydantic_orjson_dumps_with_indent(v: Any, *, default: Any) -> str:
29
+ return orjson.dumps(
30
+ v,
31
+ option=get_orjson_default_options() | orjson.OPT_INDENT_2,
32
+ default=orjson_default,
33
+ ).decode()
34
+
35
+
36
+ def orjson_default(obj):
37
+ if isinstance(obj, float):
38
+ # orjson does not convert float subclasses be default
39
+ return float(obj)
40
+ if isinstance(obj, BaseModel):
41
+ # Allows calling orjson.dumps() on pydantic models
42
+ # (e.g. to return from the API)
43
+ return obj.dict()
44
+ if ASYNCPG:
45
+ if isinstance(obj, asyncpg.pgproto.pgproto.UUID):
46
+ return str(obj)
47
+ if FREEZEGUN:
48
+ if isinstance(obj, FakeDatetime):
49
+ return obj.isoformat()
50
+ raise TypeError
51
+
52
+
53
+ def get_orjson_default_options() -> int:
54
+ return orjson.OPT_NON_STR_KEYS
@@ -18,7 +18,11 @@ import dstack.api as api
18
18
  from dstack._internal.core.consts import DSTACK_RUNNER_HTTP_PORT, DSTACK_RUNNER_SSH_PORT
19
19
  from dstack._internal.core.errors import ClientError, ConfigurationError, ResourceNotExistsError
20
20
  from dstack._internal.core.models.backends.base import BackendType
21
- from dstack._internal.core.models.configurations import AnyRunConfiguration, PortMapping
21
+ from dstack._internal.core.models.configurations import (
22
+ AnyRunConfiguration,
23
+ PortMapping,
24
+ ServiceConfiguration,
25
+ )
22
26
  from dstack._internal.core.models.files import FileArchiveMapping, FilePathMapping
23
27
  from dstack._internal.core.models.profiles import (
24
28
  CreationPolicy,
@@ -38,6 +42,7 @@ from dstack._internal.core.models.runs import (
38
42
  RunPlan,
39
43
  RunSpec,
40
44
  RunStatus,
45
+ get_service_port,
41
46
  )
42
47
  from dstack._internal.core.models.runs import Run as RunModel
43
48
  from dstack._internal.core.services.logs import URLReplacer
@@ -163,7 +168,7 @@ class Run(ABC):
163
168
  service_port = 443 if secure else 80
164
169
  ports = {
165
170
  **ports,
166
- self._run.run_spec.configuration.port.container_port: service_port,
171
+ get_or_error(get_or_error(self._ssh_attach).service_port): service_port,
167
172
  }
168
173
  path_prefix = url.path
169
174
  replace_urls = URLReplacer(
@@ -338,6 +343,10 @@ class Run(ABC):
338
343
  else:
339
344
  container_user = "root"
340
345
 
346
+ service_port = None
347
+ if isinstance(self._run.run_spec.configuration, ServiceConfiguration):
348
+ service_port = get_service_port(job.job_spec, self._run.run_spec.configuration)
349
+
341
350
  self._ssh_attach = SSHAttach(
342
351
  hostname=provisioning_data.hostname,
343
352
  ssh_port=provisioning_data.ssh_port,
@@ -349,6 +358,7 @@ class Run(ABC):
349
358
  run_name=name,
350
359
  dockerized=provisioning_data.dockerized,
351
360
  ssh_proxy=provisioning_data.ssh_proxy,
361
+ service_port=service_port,
352
362
  local_backend=provisioning_data.backend == BackendType.LOCAL,
353
363
  bind_address=bind_address,
354
364
  )
@@ -748,6 +758,7 @@ class RunCollection:
748
758
  repo_id=None,
749
759
  only_active=only_active,
750
760
  limit=limit or 100,
761
+ # TODO: Pass job_submissions_limit=1 in 0.20
751
762
  )
752
763
  if only_active and len(runs) == 0:
753
764
  runs = self._api_client.runs.list(
@@ -4,7 +4,11 @@ from uuid import UUID
4
4
 
5
5
  from pydantic import parse_obj_as
6
6
 
7
- from dstack._internal.core.compatibility.runs import get_apply_plan_excludes, get_get_plan_excludes
7
+ from dstack._internal.core.compatibility.runs import (
8
+ get_apply_plan_excludes,
9
+ get_get_plan_excludes,
10
+ get_list_runs_excludes,
11
+ )
8
12
  from dstack._internal.core.models.runs import (
9
13
  ApplyRunPlanInput,
10
14
  Run,
@@ -33,18 +37,24 @@ class RunsAPIClient(APIClientGroup):
33
37
  prev_run_id: Optional[UUID] = None,
34
38
  limit: int = 100,
35
39
  ascending: bool = False,
40
+ include_jobs: bool = True,
41
+ job_submissions_limit: Optional[int] = None,
36
42
  ) -> List[Run]:
37
43
  body = ListRunsRequest(
38
44
  project_name=project_name,
39
45
  repo_id=repo_id,
40
46
  username=username,
41
47
  only_active=only_active,
48
+ include_jobs=include_jobs,
49
+ job_submissions_limit=job_submissions_limit,
42
50
  prev_submitted_at=prev_submitted_at,
43
51
  prev_run_id=prev_run_id,
44
52
  limit=limit,
45
53
  ascending=ascending,
46
54
  )
47
- resp = self._request("/api/runs/list", body=body.json())
55
+ resp = self._request(
56
+ "/api/runs/list", body=body.json(exclude=get_list_runs_excludes(body))
57
+ )
48
58
  return parse_obj_as(List[Run.__response__], resp.json())
49
59
 
50
60
  def get(self, project_name: str, run_name: str) -> Run:
dstack/version.py CHANGED
@@ -1,4 +1,4 @@
1
- __version__ = "0.19.17"
1
+ __version__ = "0.19.19"
2
2
  __is_release__ = True
3
3
  base_image = "0.10"
4
4
  base_image_ubuntu_version = "22.04"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dstack
3
- Version: 0.19.17
3
+ Version: 0.19.19
4
4
  Summary: dstack is an open-source orchestration engine for running AI workloads on any cloud or on-premises.
5
5
  Project-URL: Homepage, https://dstack.ai
6
6
  Project-URL: Source, https://github.com/dstackai/dstack
@@ -24,6 +24,7 @@ Requires-Dist: gitpython
24
24
  Requires-Dist: gpuhunt==0.1.6
25
25
  Requires-Dist: ignore-python>=0.2.0
26
26
  Requires-Dist: jsonschema
27
+ Requires-Dist: orjson
27
28
  Requires-Dist: packaging
28
29
  Requires-Dist: paramiko>=3.2.0
29
30
  Requires-Dist: psutil
@@ -83,6 +84,7 @@ Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'all'
83
84
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'all'
84
85
  Requires-Dist: starlette>=0.26.0; extra == 'all'
85
86
  Requires-Dist: uvicorn; extra == 'all'
87
+ Requires-Dist: uvicorn[standard]; extra == 'all'
86
88
  Requires-Dist: watchfiles; extra == 'all'
87
89
  Provides-Extra: aws
88
90
  Requires-Dist: aiocache; extra == 'aws'
@@ -107,7 +109,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'aws'
107
109
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'aws'
108
110
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'aws'
109
111
  Requires-Dist: starlette>=0.26.0; extra == 'aws'
110
- Requires-Dist: uvicorn; extra == 'aws'
112
+ Requires-Dist: uvicorn[standard]; extra == 'aws'
111
113
  Requires-Dist: watchfiles; extra == 'aws'
112
114
  Provides-Extra: azure
113
115
  Requires-Dist: aiocache; extra == 'azure'
@@ -137,7 +139,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'azure'
137
139
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'azure'
138
140
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'azure'
139
141
  Requires-Dist: starlette>=0.26.0; extra == 'azure'
140
- Requires-Dist: uvicorn; extra == 'azure'
142
+ Requires-Dist: uvicorn[standard]; extra == 'azure'
141
143
  Requires-Dist: watchfiles; extra == 'azure'
142
144
  Provides-Extra: datacrunch
143
145
  Requires-Dist: aiocache; extra == 'datacrunch'
@@ -161,7 +163,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'datacrunch'
161
163
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'datacrunch'
162
164
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'datacrunch'
163
165
  Requires-Dist: starlette>=0.26.0; extra == 'datacrunch'
164
- Requires-Dist: uvicorn; extra == 'datacrunch'
166
+ Requires-Dist: uvicorn[standard]; extra == 'datacrunch'
165
167
  Requires-Dist: watchfiles; extra == 'datacrunch'
166
168
  Provides-Extra: gateway
167
169
  Requires-Dist: aiocache; extra == 'gateway'
@@ -199,7 +201,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'gcp'
199
201
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'gcp'
200
202
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'gcp'
201
203
  Requires-Dist: starlette>=0.26.0; extra == 'gcp'
202
- Requires-Dist: uvicorn; extra == 'gcp'
204
+ Requires-Dist: uvicorn[standard]; extra == 'gcp'
203
205
  Requires-Dist: watchfiles; extra == 'gcp'
204
206
  Provides-Extra: kubernetes
205
207
  Requires-Dist: aiocache; extra == 'kubernetes'
@@ -223,7 +225,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'kubernetes'
223
225
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'kubernetes'
224
226
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'kubernetes'
225
227
  Requires-Dist: starlette>=0.26.0; extra == 'kubernetes'
226
- Requires-Dist: uvicorn; extra == 'kubernetes'
228
+ Requires-Dist: uvicorn[standard]; extra == 'kubernetes'
227
229
  Requires-Dist: watchfiles; extra == 'kubernetes'
228
230
  Provides-Extra: lambda
229
231
  Requires-Dist: aiocache; extra == 'lambda'
@@ -248,7 +250,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'lambda'
248
250
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'lambda'
249
251
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'lambda'
250
252
  Requires-Dist: starlette>=0.26.0; extra == 'lambda'
251
- Requires-Dist: uvicorn; extra == 'lambda'
253
+ Requires-Dist: uvicorn[standard]; extra == 'lambda'
252
254
  Requires-Dist: watchfiles; extra == 'lambda'
253
255
  Provides-Extra: nebius
254
256
  Requires-Dist: aiocache; extra == 'nebius'
@@ -272,7 +274,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'nebius'
272
274
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'nebius'
273
275
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'nebius'
274
276
  Requires-Dist: starlette>=0.26.0; extra == 'nebius'
275
- Requires-Dist: uvicorn; extra == 'nebius'
277
+ Requires-Dist: uvicorn[standard]; extra == 'nebius'
276
278
  Requires-Dist: watchfiles; extra == 'nebius'
277
279
  Provides-Extra: oci
278
280
  Requires-Dist: aiocache; extra == 'oci'
@@ -298,7 +300,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'oci'
298
300
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'oci'
299
301
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'oci'
300
302
  Requires-Dist: starlette>=0.26.0; extra == 'oci'
301
- Requires-Dist: uvicorn; extra == 'oci'
303
+ Requires-Dist: uvicorn[standard]; extra == 'oci'
302
304
  Requires-Dist: watchfiles; extra == 'oci'
303
305
  Provides-Extra: server
304
306
  Requires-Dist: aiocache; extra == 'server'
@@ -321,7 +323,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'server'
321
323
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'server'
322
324
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'server'
323
325
  Requires-Dist: starlette>=0.26.0; extra == 'server'
324
- Requires-Dist: uvicorn; extra == 'server'
326
+ Requires-Dist: uvicorn[standard]; extra == 'server'
325
327
  Requires-Dist: watchfiles; extra == 'server'
326
328
  Description-Content-Type: text/markdown
327
329
 
@@ -345,12 +347,13 @@ Description-Content-Type: text/markdown
345
347
  `dstack` supports `NVIDIA`, `AMD`, `Google TPU`, `Intel Gaudi`, and `Tenstorrent` accelerators out of the box.
346
348
 
347
349
  ## Latest news ✨
348
-
350
+ - [2025/07] [dstack 0.19.17: Secrets, Files, Rolling deployment](https://github.com/dstackai/dstack/releases/tag/0.19.17)
351
+ - [2025/06] [dstack 0.19.16: Docker in Docker, CloudRift](https://github.com/dstackai/dstack/releases/tag/0.19.16)
352
+ - [2025/06] [dstack 0.19.13: InfiniBand support in default images](https://github.com/dstackai/dstack/releases/tag/0.19.13)
353
+ - [2025/06] [dstack 0.19.12: Simplified use of MPI](https://github.com/dstackai/dstack/releases/tag/0.19.12)
354
+ - [2025/05] [dstack 0.19.10: Priorities](https://github.com/dstackai/dstack/releases/tag/0.19.10)
349
355
  - [2025/05] [dstack 0.19.8: Nebius clusters, GH200 on Lambda](https://github.com/dstackai/dstack/releases/tag/0.19.8)
350
356
  - [2025/04] [dstack 0.19.6: Tenstorrent, Plugins](https://github.com/dstackai/dstack/releases/tag/0.19.6)
351
- - [2025/04] [dstack 0.19.5: GCP A3 High clusters](https://github.com/dstackai/dstack/releases/tag/0.19.5)
352
- - [2025/04] [dstack 0.19.3: GCP A3 Mega clusters](https://github.com/dstackai/dstack/releases/tag/0.19.3)
353
- - [2025/03] [dstack 0.19.0: Prometheus](https://github.com/dstackai/dstack/releases/tag/0.19.0)
354
357
 
355
358
  ## How does it work?
356
359