dstack 0.19.16__py3-none-any.whl → 0.19.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dstack might be problematic. Click here for more details.

Files changed (80) hide show
  1. dstack/_internal/cli/commands/secrets.py +92 -0
  2. dstack/_internal/cli/main.py +2 -0
  3. dstack/_internal/cli/services/completion.py +5 -0
  4. dstack/_internal/cli/services/configurators/fleet.py +13 -1
  5. dstack/_internal/cli/services/configurators/run.py +59 -17
  6. dstack/_internal/cli/utils/secrets.py +25 -0
  7. dstack/_internal/core/backends/__init__.py +10 -4
  8. dstack/_internal/core/backends/aws/compute.py +237 -18
  9. dstack/_internal/core/backends/base/compute.py +20 -2
  10. dstack/_internal/core/backends/cudo/compute.py +23 -9
  11. dstack/_internal/core/backends/gcp/compute.py +13 -7
  12. dstack/_internal/core/backends/lambdalabs/compute.py +2 -1
  13. dstack/_internal/core/compatibility/fleets.py +12 -11
  14. dstack/_internal/core/compatibility/gateways.py +9 -8
  15. dstack/_internal/core/compatibility/logs.py +4 -3
  16. dstack/_internal/core/compatibility/runs.py +41 -17
  17. dstack/_internal/core/compatibility/volumes.py +9 -8
  18. dstack/_internal/core/errors.py +4 -0
  19. dstack/_internal/core/models/common.py +7 -0
  20. dstack/_internal/core/models/configurations.py +11 -0
  21. dstack/_internal/core/models/files.py +67 -0
  22. dstack/_internal/core/models/runs.py +14 -0
  23. dstack/_internal/core/models/secrets.py +9 -2
  24. dstack/_internal/core/services/diff.py +36 -3
  25. dstack/_internal/server/app.py +22 -0
  26. dstack/_internal/server/background/__init__.py +61 -37
  27. dstack/_internal/server/background/tasks/process_fleets.py +19 -3
  28. dstack/_internal/server/background/tasks/process_gateways.py +1 -1
  29. dstack/_internal/server/background/tasks/process_instances.py +13 -2
  30. dstack/_internal/server/background/tasks/process_placement_groups.py +4 -2
  31. dstack/_internal/server/background/tasks/process_running_jobs.py +123 -15
  32. dstack/_internal/server/background/tasks/process_runs.py +23 -7
  33. dstack/_internal/server/background/tasks/process_submitted_jobs.py +36 -7
  34. dstack/_internal/server/background/tasks/process_terminating_jobs.py +5 -3
  35. dstack/_internal/server/background/tasks/process_volumes.py +2 -2
  36. dstack/_internal/server/migrations/versions/5f1707c525d2_add_filearchivemodel.py +39 -0
  37. dstack/_internal/server/migrations/versions/644b8a114187_add_secretmodel.py +49 -0
  38. dstack/_internal/server/models.py +33 -0
  39. dstack/_internal/server/routers/files.py +67 -0
  40. dstack/_internal/server/routers/secrets.py +57 -15
  41. dstack/_internal/server/schemas/files.py +5 -0
  42. dstack/_internal/server/schemas/runner.py +2 -0
  43. dstack/_internal/server/schemas/secrets.py +7 -11
  44. dstack/_internal/server/services/backends/__init__.py +1 -1
  45. dstack/_internal/server/services/files.py +91 -0
  46. dstack/_internal/server/services/fleets.py +5 -4
  47. dstack/_internal/server/services/gateways/__init__.py +4 -2
  48. dstack/_internal/server/services/jobs/__init__.py +19 -8
  49. dstack/_internal/server/services/jobs/configurators/base.py +25 -3
  50. dstack/_internal/server/services/jobs/configurators/dev.py +3 -3
  51. dstack/_internal/server/services/locking.py +101 -12
  52. dstack/_internal/server/services/proxy/repo.py +3 -0
  53. dstack/_internal/server/services/runner/client.py +8 -0
  54. dstack/_internal/server/services/runs.py +76 -47
  55. dstack/_internal/server/services/secrets.py +204 -0
  56. dstack/_internal/server/services/storage/base.py +21 -0
  57. dstack/_internal/server/services/storage/gcs.py +28 -6
  58. dstack/_internal/server/services/storage/s3.py +27 -9
  59. dstack/_internal/server/services/volumes.py +2 -2
  60. dstack/_internal/server/settings.py +19 -5
  61. dstack/_internal/server/statics/index.html +1 -1
  62. dstack/_internal/server/statics/{main-a4eafa74304e587d037c.js → main-d1ac2e8c38ed5f08a114.js} +86 -34
  63. dstack/_internal/server/statics/{main-a4eafa74304e587d037c.js.map → main-d1ac2e8c38ed5f08a114.js.map} +1 -1
  64. dstack/_internal/server/statics/{main-f53d6d0d42f8d61df1de.css → main-d58fc0460cb0eae7cb5c.css} +1 -1
  65. dstack/_internal/server/statics/static/media/google.b194b06fafd0a52aeb566922160ea514.svg +1 -0
  66. dstack/_internal/server/testing/common.py +50 -8
  67. dstack/_internal/settings.py +4 -0
  68. dstack/_internal/utils/files.py +69 -0
  69. dstack/_internal/utils/nested_list.py +47 -0
  70. dstack/_internal/utils/path.py +12 -4
  71. dstack/api/_public/runs.py +67 -7
  72. dstack/api/server/__init__.py +6 -0
  73. dstack/api/server/_files.py +18 -0
  74. dstack/api/server/_secrets.py +15 -15
  75. dstack/version.py +1 -1
  76. {dstack-0.19.16.dist-info → dstack-0.19.18.dist-info}/METADATA +13 -13
  77. {dstack-0.19.16.dist-info → dstack-0.19.18.dist-info}/RECORD +80 -67
  78. {dstack-0.19.16.dist-info → dstack-0.19.18.dist-info}/WHEEL +0 -0
  79. {dstack-0.19.16.dist-info → dstack-0.19.18.dist-info}/entry_points.txt +0 -0
  80. {dstack-0.19.16.dist-info → dstack-0.19.18.dist-info}/licenses/LICENSE.md +0 -0
@@ -0,0 +1 @@
1
+ <svg xmlns="http://www.w3.org/2000/svg" height="14" viewBox="0 0 24 24" width="14"><path d="M22.56 12.25c0-.78-.07-1.53-.2-2.25H12v4.26h5.92c-.26 1.37-1.04 2.53-2.21 3.31v2.77h3.57c2.08-1.92 3.28-4.74 3.28-8.09z" fill="#4285F4"/><path d="M12 23c2.97 0 5.46-.98 7.28-2.66l-3.57-2.77c-.98.66-2.23 1.06-3.71 1.06-2.86 0-5.29-1.93-6.16-4.53H2.18v2.84C3.99 20.53 7.7 23 12 23z" fill="#34A853"/><path d="M5.84 14.09c-.22-.66-.35-1.36-.35-2.09s.13-1.43.35-2.09V7.07H2.18C1.43 8.55 1 10.22 1 12s.43 3.45 1.18 4.93l2.85-2.22.81-.62z" fill="#FBBC05"/><path d="M12 5.38c1.62 0 3.06.56 4.21 1.64l3.15-3.15C17.45 2.09 14.97 1 12 1 7.7 1 3.99 3.47 2.18 7.07l3.66 2.84c.87-2.6 3.3-4.53 6.16-4.53z" fill="#EA4335"/><path d="M1 1h22v22H1z" fill="none"/></svg>
@@ -1,5 +1,6 @@
1
1
  import json
2
2
  import uuid
3
+ from collections.abc import Callable
3
4
  from contextlib import contextmanager
4
5
  from datetime import datetime, timezone
5
6
  from typing import Dict, List, Literal, Optional, Union
@@ -77,6 +78,7 @@ from dstack._internal.core.models.volumes import (
77
78
  from dstack._internal.server.models import (
78
79
  BackendModel,
79
80
  DecryptedString,
81
+ FileArchiveModel,
80
82
  FleetModel,
81
83
  GatewayComputeModel,
82
84
  GatewayModel,
@@ -89,6 +91,7 @@ from dstack._internal.server.models import (
89
91
  RepoCredsModel,
90
92
  RepoModel,
91
93
  RunModel,
94
+ SecretModel,
92
95
  UserModel,
93
96
  VolumeAttachmentModel,
94
97
  VolumeModel,
@@ -232,21 +235,38 @@ async def create_repo_creds(
232
235
  return repo_creds
233
236
 
234
237
 
238
+ async def create_file_archive(
239
+ session: AsyncSession,
240
+ user_id: UUID,
241
+ blob_hash: str = "blob_hash",
242
+ blob: bytes = b"blob_content",
243
+ ) -> FileArchiveModel:
244
+ archive = FileArchiveModel(
245
+ user_id=user_id,
246
+ blob_hash=blob_hash,
247
+ blob=blob,
248
+ )
249
+ session.add(archive)
250
+ await session.commit()
251
+ return archive
252
+
253
+
235
254
  def get_run_spec(
236
255
  run_name: str,
237
256
  repo_id: str,
238
- profile: Optional[Profile] = None,
257
+ configuration_path: str = "dstack.yaml",
258
+ profile: Union[Profile, Callable[[], Profile], None] = lambda: Profile(name="default"),
239
259
  configuration: Optional[AnyRunConfiguration] = None,
240
260
  ) -> RunSpec:
241
- if profile is None:
242
- profile = Profile(name="default")
261
+ if callable(profile):
262
+ profile = profile()
243
263
  return RunSpec(
244
264
  run_name=run_name,
245
265
  repo_id=repo_id,
246
266
  repo_data=LocalRunRepoData(repo_dir="/"),
247
267
  repo_code_hash=None,
248
268
  working_dir=".",
249
- configuration_path="dstack.yaml",
269
+ configuration_path=configuration_path,
250
270
  configuration=configuration or DevEnvironmentConfiguration(ide="vscode"),
251
271
  profile=profile,
252
272
  ssh_key_pub="user_ssh_key",
@@ -315,7 +335,9 @@ async def create_job(
315
335
  if deployment_num is None:
316
336
  deployment_num = run.deployment_num
317
337
  run_spec = RunSpec.parse_raw(run.run_spec)
318
- job_spec = (await get_job_specs_from_run_spec(run_spec, replica_num=replica_num))[0]
338
+ job_spec = (
339
+ await get_job_specs_from_run_spec(run_spec=run_spec, secrets={}, replica_num=replica_num)
340
+ )[0]
319
341
  job_spec.job_num = job_num
320
342
  job = JobModel(
321
343
  project_id=run.project_id,
@@ -480,6 +502,7 @@ async def create_fleet(
480
502
  status: FleetStatus = FleetStatus.ACTIVE,
481
503
  deleted: bool = False,
482
504
  name: Optional[str] = None,
505
+ last_processed_at: datetime = datetime(2023, 1, 2, 3, 4, tzinfo=timezone.utc),
483
506
  ) -> FleetModel:
484
507
  if fleet_id is None:
485
508
  fleet_id = uuid.uuid4()
@@ -497,6 +520,7 @@ async def create_fleet(
497
520
  spec=spec.json(),
498
521
  instances=[],
499
522
  runs=[],
523
+ last_processed_at=last_processed_at,
500
524
  )
501
525
  session.add(fm)
502
526
  await session.commit()
@@ -540,10 +564,10 @@ async def create_instance(
540
564
  instance_id: Optional[UUID] = None,
541
565
  job: Optional[JobModel] = None,
542
566
  instance_num: int = 0,
543
- backend: Optional[BackendType] = BackendType.DATACRUNCH,
567
+ backend: BackendType = BackendType.DATACRUNCH,
544
568
  termination_policy: Optional[TerminationPolicy] = None,
545
569
  termination_idle_time: int = DEFAULT_FLEET_TERMINATION_IDLE_TIME,
546
- region: Optional[str] = "eu-west",
570
+ region: str = "eu-west",
547
571
  remote_connection_info: Optional[RemoteConnectionInfo] = None,
548
572
  offer: Optional[Union[InstanceOfferWithAvailability, Literal["auto"]]] = "auto",
549
573
  job_provisioning_data: Optional[Union[JobProvisioningData, Literal["auto"]]] = "auto",
@@ -552,6 +576,7 @@ async def create_instance(
552
576
  name: str = "test_instance",
553
577
  volumes: Optional[List[VolumeModel]] = None,
554
578
  price: float = 1.0,
579
+ last_processed_at: datetime = datetime(2023, 1, 2, 3, 4, tzinfo=timezone.utc),
555
580
  ) -> InstanceModel:
556
581
  if instance_id is None:
557
582
  instance_id = uuid.uuid4()
@@ -588,6 +613,7 @@ async def create_instance(
588
613
  fleet=fleet,
589
614
  project=project,
590
615
  status=status,
616
+ last_processed_at=last_processed_at,
591
617
  unreachable=unreachable,
592
618
  created_at=created_at,
593
619
  started_at=created_at,
@@ -917,6 +943,22 @@ async def create_job_prometheus_metrics(
917
943
  return metrics
918
944
 
919
945
 
946
+ async def create_secret(
947
+ session: AsyncSession,
948
+ project: ProjectModel,
949
+ name: str,
950
+ value: str,
951
+ ):
952
+ secret_model = SecretModel(
953
+ project=project,
954
+ name=name,
955
+ value=DecryptedString(plaintext=value),
956
+ )
957
+ session.add(secret_model)
958
+ await session.commit()
959
+ return secret_model
960
+
961
+
920
962
  def get_private_key_string() -> str:
921
963
  return """
922
964
  -----BEGIN RSA PRIVATE KEY-----
@@ -1002,7 +1044,7 @@ class ComputeMockSpec(
1002
1044
  ComputeWithVolumeSupport,
1003
1045
  ):
1004
1046
  """
1005
- Can be used to create Compute mocks that pass all isinstance asserts.
1047
+ Can be used to create Compute mocks that pass all `isinstance()` asserts.
1006
1048
  """
1007
1049
 
1008
1050
  pass
@@ -19,6 +19,10 @@ DSTACK_BASE_IMAGE_UBUNTU_VERSION = os.getenv(
19
19
  )
20
20
  DSTACK_DIND_IMAGE = os.getenv("DSTACK_DIND_IMAGE", "dstackai/dind")
21
21
 
22
+ # Development settings
23
+
24
+ LOCAL_BACKEND_ENABLED = os.getenv("DSTACK_LOCAL_BACKEND_ENABLED") is not None
25
+
22
26
 
23
27
  class FeatureFlags:
24
28
  """
@@ -0,0 +1,69 @@
1
+ import tarfile
2
+ from pathlib import Path
3
+ from typing import BinaryIO
4
+
5
+ import ignore
6
+ import ignore.overrides
7
+
8
+ from dstack._internal.utils.hash import get_sha256
9
+ from dstack._internal.utils.path import PathLike, normalize_path
10
+
11
+
12
+ def create_file_archive(root: PathLike, fp: BinaryIO) -> str:
13
+ """
14
+ Packs the directory or file to a tar archive and writes it to the file-like object.
15
+
16
+ Archives can be used to transfer file(s) (e.g., over the network) preserving
17
+ file properties such as permissions, timestamps, etc.
18
+
19
+ NOTE: `.gitignore` and `.dstackignore` are respected.
20
+
21
+ Args:
22
+ root: The absolute path to the directory or file.
23
+ fp: The binary file-like object.
24
+
25
+ Returns:
26
+ The SHA-256 hash of the archive as a hex string.
27
+
28
+ Raises:
29
+ ValueError: If the path is not absolute.
30
+ OSError: Underlying errors from the tarfile module
31
+ """
32
+ root = Path(root)
33
+ if not root.is_absolute():
34
+ raise ValueError(f"path must be absolute: {root}")
35
+ walk = (
36
+ ignore.WalkBuilder(root)
37
+ .overrides(ignore.overrides.OverrideBuilder(root).add("!/.git/").build())
38
+ .hidden(False) # do not ignore files that start with a dot
39
+ .require_git(False) # respect git ignore rules even if not a git repo
40
+ .add_custom_ignore_filename(".dstackignore")
41
+ .build()
42
+ )
43
+ # sort paths to ensure archive reproducibility
44
+ paths = sorted(entry.path() for entry in walk)
45
+ with tarfile.TarFile(mode="w", fileobj=fp) as t:
46
+ for path in paths:
47
+ arcname = str(path.relative_to(root.parent))
48
+ info = t.gettarinfo(path, arcname)
49
+ if info.issym():
50
+ # Symlinks are handled as follows: each symlink in the chain is checked, and
51
+ # * if the target is inside the root: keep relative links as is, replace absolute
52
+ # links with relative ones;
53
+ # * if the target is outside the root: replace the link with the actual file.
54
+ target = Path(info.linkname)
55
+ if not target.is_absolute():
56
+ target = path.parent / target
57
+ target = normalize_path(target)
58
+ try:
59
+ target.relative_to(root)
60
+ except ValueError:
61
+ # Adding as a file
62
+ t.add(path.resolve(), arcname, recursive=False)
63
+ else:
64
+ # Adding as a relative symlink
65
+ info.linkname = str(target.relative_to(path.parent, walk_up=True))
66
+ t.addfile(info)
67
+ else:
68
+ t.add(path, arcname, recursive=False)
69
+ return get_sha256(fp)
@@ -0,0 +1,47 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import Optional
3
+
4
+
5
+ @dataclass
6
+ class NestedListItem:
7
+ label: str
8
+ children: list["NestedListItem"] = field(default_factory=list)
9
+
10
+ def render(self, indent: int = 0, visited: Optional[set[int]] = None) -> str:
11
+ if visited is None:
12
+ visited = set()
13
+
14
+ item_id = id(self)
15
+ if item_id in visited:
16
+ raise ValueError(f"Cycle detected at item: {self.label}")
17
+
18
+ visited.add(item_id)
19
+ prefix = " " * indent + "- "
20
+ output = f"{prefix}{self.label}\n"
21
+ for child in self.children:
22
+ # `visited.copy()` so that we only detect cycles within each path,
23
+ # rather than duplicate items in unrelated paths
24
+ output += child.render(indent + 1, visited.copy())
25
+ return output
26
+
27
+
28
+ @dataclass
29
+ class NestedList:
30
+ """
31
+ A nested list that can be rendered in Markdown-like format:
32
+
33
+ - Item 1
34
+ - Item 2
35
+ - Item 2.1
36
+ - Item 2.2
37
+ - Item 2.2.1
38
+ - Item 3
39
+ """
40
+
41
+ children: list[NestedListItem] = field(default_factory=list)
42
+
43
+ def render(self) -> str:
44
+ output = ""
45
+ for child in self.children:
46
+ output += child.render()
47
+ return output
@@ -27,16 +27,24 @@ def path_in_dir(path: PathLike, directory: PathLike) -> bool:
27
27
  return False
28
28
 
29
29
 
30
- def resolve_relative_path(path: str) -> PurePath:
30
+ def normalize_path(path: PathLike) -> PurePath:
31
31
  path = PurePath(path)
32
- if path.is_absolute():
33
- raise ValueError("Path should be relative")
34
32
  stack = []
35
33
  for part in path.parts:
36
34
  if part == "..":
37
35
  if not stack:
38
- raise ValueError("Path is outside of the repo")
36
+ raise ValueError("Path is outside of the top directory")
39
37
  stack.pop()
40
38
  else:
41
39
  stack.append(part)
42
40
  return PurePath(*stack)
41
+
42
+
43
+ def resolve_relative_path(path: PathLike) -> PurePath:
44
+ path = PurePath(path)
45
+ if path.is_absolute():
46
+ raise ValueError("Path should be relative")
47
+ try:
48
+ return normalize_path(path)
49
+ except ValueError:
50
+ raise ValueError("Path is outside of the repo")
@@ -4,10 +4,12 @@ import tempfile
4
4
  import threading
5
5
  import time
6
6
  from abc import ABC
7
+ from collections.abc import Iterator
8
+ from contextlib import contextmanager
7
9
  from copy import copy
8
10
  from datetime import datetime
9
11
  from pathlib import Path
10
- from typing import Dict, Iterable, List, Optional, Union
12
+ from typing import BinaryIO, Dict, Iterable, List, Optional, Union
11
13
  from urllib.parse import urlparse
12
14
 
13
15
  from websocket import WebSocketApp
@@ -17,6 +19,7 @@ from dstack._internal.core.consts import DSTACK_RUNNER_HTTP_PORT, DSTACK_RUNNER_
17
19
  from dstack._internal.core.errors import ClientError, ConfigurationError, ResourceNotExistsError
18
20
  from dstack._internal.core.models.backends.base import BackendType
19
21
  from dstack._internal.core.models.configurations import AnyRunConfiguration, PortMapping
22
+ from dstack._internal.core.models.files import FileArchiveMapping, FilePathMapping
20
23
  from dstack._internal.core.models.profiles import (
21
24
  CreationPolicy,
22
25
  Profile,
@@ -42,6 +45,7 @@ from dstack._internal.core.services.ssh.attach import SSHAttach
42
45
  from dstack._internal.core.services.ssh.ports import PortsLock
43
46
  from dstack._internal.server.schemas.logs import PollLogsRequest
44
47
  from dstack._internal.utils.common import get_or_error, make_proxy_url
48
+ from dstack._internal.utils.files import create_file_archive
45
49
  from dstack._internal.utils.logging import get_logger
46
50
  from dstack._internal.utils.path import PathLike, path_in_dir
47
51
  from dstack.api.server import APIClient
@@ -436,12 +440,16 @@ class RunCollection:
436
440
  """
437
441
  if repo is None:
438
442
  repo = VirtualRepo()
443
+ repo_code_hash = None
444
+ else:
445
+ with _prepare_code_file(repo) as (_, repo_code_hash):
446
+ pass
439
447
 
440
448
  run_spec = RunSpec(
441
449
  run_name=configuration.name,
442
450
  repo_id=repo.repo_id,
443
451
  repo_data=repo.run_repo_data,
444
- repo_code_hash=None, # `apply_plan` will fill it
452
+ repo_code_hash=repo_code_hash,
445
453
  working_dir=configuration.working_dir,
446
454
  configuration_path=configuration_path,
447
455
  configuration=configuration,
@@ -476,20 +484,36 @@ class RunCollection:
476
484
  # TODO handle multiple jobs
477
485
  ports_lock = _reserve_ports(run_plan.job_plans[0].job_spec)
478
486
 
487
+ run_spec = run_plan.run_spec
488
+ configuration = run_spec.configuration
489
+
490
+ self._validate_configuration_files(configuration, run_spec.configuration_path)
491
+ for file_mapping in configuration.files:
492
+ assert isinstance(file_mapping, FilePathMapping)
493
+ with tempfile.TemporaryFile("w+b") as fp:
494
+ try:
495
+ archive_hash = create_file_archive(file_mapping.local_path, fp)
496
+ except OSError as e:
497
+ raise ClientError(f"failed to archive '{file_mapping.local_path}': {e}") from e
498
+ fp.seek(0)
499
+ archive = self._api_client.files.upload_archive(hash=archive_hash, fp=fp)
500
+ run_spec.file_archives.append(
501
+ FileArchiveMapping(id=archive.id, path=file_mapping.path)
502
+ )
503
+
479
504
  if repo is None:
480
505
  repo = VirtualRepo()
481
506
  else:
482
507
  # Do not upload the diff without a repo (a default virtual repo)
483
508
  # since upload_code() requires a repo to be initialized.
484
- with tempfile.TemporaryFile("w+b") as fp:
485
- run_plan.run_spec.repo_code_hash = repo.write_code_file(fp)
486
- fp.seek(0)
509
+ with _prepare_code_file(repo) as (fp, repo_code_hash):
487
510
  self._api_client.repos.upload_code(
488
511
  project_name=self._project,
489
512
  repo_id=repo.repo_id,
490
- code_hash=run_plan.run_spec.repo_code_hash,
513
+ code_hash=repo_code_hash,
491
514
  fp=fp,
492
515
  )
516
+
493
517
  run = self._api_client.runs.apply_plan(self._project, run_plan)
494
518
  return self._model_to_submitted_run(run, ports_lock)
495
519
 
@@ -627,6 +651,10 @@ class RunCollection:
627
651
  logger.warning("The get_plan() method is deprecated in favor of get_run_plan().")
628
652
  if repo is None:
629
653
  repo = VirtualRepo()
654
+ repo_code_hash = None
655
+ else:
656
+ with _prepare_code_file(repo) as (_, repo_code_hash):
657
+ pass
630
658
 
631
659
  if working_dir is None:
632
660
  working_dir = "."
@@ -663,7 +691,7 @@ class RunCollection:
663
691
  run_name=run_name,
664
692
  repo_id=repo.repo_id,
665
693
  repo_data=repo.run_repo_data,
666
- repo_code_hash=None, # `exec_plan` will fill it
694
+ repo_code_hash=repo_code_hash,
667
695
  working_dir=working_dir,
668
696
  configuration_path=configuration_path,
669
697
  configuration=configuration,
@@ -762,6 +790,30 @@ class RunCollection:
762
790
  ports_lock,
763
791
  )
764
792
 
793
+ def _validate_configuration_files(
794
+ self, configuration: AnyRunConfiguration, configuration_path: Optional[PathLike]
795
+ ) -> None:
796
+ """
797
+ Expands, normalizes and validates local paths specified in
798
+ the `files` configuration property.
799
+ """
800
+ base_dir: Optional[Path] = None
801
+ if configuration_path is not None:
802
+ base_dir = Path(configuration_path).expanduser().resolve().parent
803
+ for file_mapping in configuration.files:
804
+ assert isinstance(file_mapping, FilePathMapping)
805
+ path = Path(file_mapping.local_path).expanduser()
806
+ if not path.is_absolute():
807
+ if base_dir is None:
808
+ raise ConfigurationError(
809
+ f"Path '{path}' is relative but `configuration_path` is not provided"
810
+ )
811
+ else:
812
+ path = base_dir / path
813
+ if not path.exists():
814
+ raise ConfigurationError(f"Path '{path}' specified in `files` does not exist")
815
+ file_mapping.local_path = str(path)
816
+
765
817
 
766
818
  def _reserve_ports(
767
819
  job_spec: JobSpec,
@@ -781,3 +833,11 @@ def _reserve_ports(
781
833
  ports[port_override.container_port] = port_override.local_port or 0
782
834
  logger.debug("Reserving ports: %s", ports)
783
835
  return PortsLock(ports).acquire()
836
+
837
+
838
+ @contextmanager
839
+ def _prepare_code_file(repo: Repo) -> Iterator[tuple[BinaryIO, str]]:
840
+ with tempfile.TemporaryFile("w+b") as fp:
841
+ repo_code_hash = repo.write_code_file(fp)
842
+ fp.seek(0)
843
+ yield fp, repo_code_hash
@@ -14,6 +14,7 @@ from dstack._internal.core.errors import (
14
14
  )
15
15
  from dstack._internal.utils.logging import get_logger
16
16
  from dstack.api.server._backends import BackendsAPIClient
17
+ from dstack.api.server._files import FilesAPIClient
17
18
  from dstack.api.server._fleets import FleetsAPIClient
18
19
  from dstack.api.server._gateways import GatewaysAPIClient
19
20
  from dstack.api.server._logs import LogsAPIClient
@@ -47,6 +48,7 @@ class APIClient:
47
48
  logs: operations with logs
48
49
  gateways: operations with gateways
49
50
  volumes: operations with volumes
51
+ files: operations with files
50
52
  """
51
53
 
52
54
  def __init__(self, base_url: str, token: str):
@@ -111,6 +113,10 @@ class APIClient:
111
113
  def volumes(self) -> VolumesAPIClient:
112
114
  return VolumesAPIClient(self._request)
113
115
 
116
+ @property
117
+ def files(self) -> FilesAPIClient:
118
+ return FilesAPIClient(self._request)
119
+
114
120
  def _request(
115
121
  self,
116
122
  path: str,
@@ -0,0 +1,18 @@
1
+ from typing import BinaryIO
2
+
3
+ from pydantic import parse_obj_as
4
+
5
+ from dstack._internal.core.models.files import FileArchive
6
+ from dstack._internal.server.schemas.files import GetFileArchiveByHashRequest
7
+ from dstack.api.server._group import APIClientGroup
8
+
9
+
10
+ class FilesAPIClient(APIClientGroup):
11
+ def get_archive_by_hash(self, hash: str) -> FileArchive:
12
+ body = GetFileArchiveByHashRequest(hash=hash)
13
+ resp = self._request("/api/files/get_archive_by_hash", body=body.json())
14
+ return parse_obj_as(FileArchive.__response__, resp.json())
15
+
16
+ def upload_archive(self, hash: str, fp: BinaryIO) -> FileArchive:
17
+ resp = self._request("/api/files/upload_archive", files={"file": (hash, fp)})
18
+ return parse_obj_as(FileArchive.__response__, resp.json())
@@ -4,33 +4,33 @@ from pydantic import parse_obj_as
4
4
 
5
5
  from dstack._internal.core.models.secrets import Secret
6
6
  from dstack._internal.server.schemas.secrets import (
7
- AddSecretRequest,
7
+ CreateOrUpdateSecretRequest,
8
8
  DeleteSecretsRequest,
9
- GetSecretsRequest,
10
- ListSecretsRequest,
9
+ GetSecretRequest,
11
10
  )
12
11
  from dstack.api.server._group import APIClientGroup
13
12
 
14
13
 
15
14
  class SecretsAPIClient(APIClientGroup):
16
- def list(self, project_name: str, repo_id: str) -> List[Secret]:
17
- body = ListSecretsRequest(repo_id=repo_id)
18
- resp = self._request(f"/api/project/{project_name}/secrets/list", body=body.json())
15
+ def list(self, project_name: str) -> List[Secret]:
16
+ resp = self._request(f"/api/project/{project_name}/secrets/list")
19
17
  return parse_obj_as(List[Secret.__response__], resp.json())
20
18
 
21
- def get(self, project_name: str, repo_id: str, secret_name: str) -> Secret:
22
- raise NotImplementedError()
23
- body = GetSecretsRequest(repo_id=repo_id)
19
+ def get(self, project_name: str, name: str) -> Secret:
20
+ body = GetSecretRequest(name=name)
24
21
  resp = self._request(f"/api/project/{project_name}/secrets/get", body=body.json())
25
22
  return parse_obj_as(Secret, resp.json())
26
23
 
27
- def add(self, project_name: str, repo_id: str, secret_name: str, secret_value: str) -> Secret:
28
- body = AddSecretRequest(
29
- repo_id=repo_id, secret=Secret(name=secret_name, value=secret_value)
24
+ def create_or_update(self, project_name: str, name: str, value: str) -> Secret:
25
+ body = CreateOrUpdateSecretRequest(
26
+ name=name,
27
+ value=value,
28
+ )
29
+ resp = self._request(
30
+ f"/api/project/{project_name}/secrets/create_or_update", body=body.json()
30
31
  )
31
- resp = self._request(f"/api/project/{project_name}/secrets/add", body=body.json())
32
32
  return parse_obj_as(Secret.__response__, resp.json())
33
33
 
34
- def delete(self, project_name: str, repo_id: str, secrets_names: List[str]):
35
- body = DeleteSecretsRequest(repo_id=repo_id, secrets_names=secrets_names)
34
+ def delete(self, project_name: str, names: List[str]):
35
+ body = DeleteSecretsRequest(secrets_names=names)
36
36
  self._request(f"/api/project/{project_name}/secrets/delete", body=body.json())
dstack/version.py CHANGED
@@ -1,4 +1,4 @@
1
- __version__ = "0.19.16"
1
+ __version__ = "0.19.18"
2
2
  __is_release__ = True
3
3
  base_image = "0.10"
4
4
  base_image_ubuntu_version = "22.04"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dstack
3
- Version: 0.19.16
3
+ Version: 0.19.18
4
4
  Summary: dstack is an open-source orchestration engine for running AI workloads on any cloud or on-premises.
5
5
  Project-URL: Homepage, https://dstack.ai
6
6
  Project-URL: Source, https://github.com/dstackai/dstack
@@ -83,6 +83,7 @@ Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'all'
83
83
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'all'
84
84
  Requires-Dist: starlette>=0.26.0; extra == 'all'
85
85
  Requires-Dist: uvicorn; extra == 'all'
86
+ Requires-Dist: uvicorn[standard]; extra == 'all'
86
87
  Requires-Dist: watchfiles; extra == 'all'
87
88
  Provides-Extra: aws
88
89
  Requires-Dist: aiocache; extra == 'aws'
@@ -107,7 +108,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'aws'
107
108
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'aws'
108
109
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'aws'
109
110
  Requires-Dist: starlette>=0.26.0; extra == 'aws'
110
- Requires-Dist: uvicorn; extra == 'aws'
111
+ Requires-Dist: uvicorn[standard]; extra == 'aws'
111
112
  Requires-Dist: watchfiles; extra == 'aws'
112
113
  Provides-Extra: azure
113
114
  Requires-Dist: aiocache; extra == 'azure'
@@ -137,7 +138,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'azure'
137
138
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'azure'
138
139
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'azure'
139
140
  Requires-Dist: starlette>=0.26.0; extra == 'azure'
140
- Requires-Dist: uvicorn; extra == 'azure'
141
+ Requires-Dist: uvicorn[standard]; extra == 'azure'
141
142
  Requires-Dist: watchfiles; extra == 'azure'
142
143
  Provides-Extra: datacrunch
143
144
  Requires-Dist: aiocache; extra == 'datacrunch'
@@ -161,7 +162,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'datacrunch'
161
162
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'datacrunch'
162
163
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'datacrunch'
163
164
  Requires-Dist: starlette>=0.26.0; extra == 'datacrunch'
164
- Requires-Dist: uvicorn; extra == 'datacrunch'
165
+ Requires-Dist: uvicorn[standard]; extra == 'datacrunch'
165
166
  Requires-Dist: watchfiles; extra == 'datacrunch'
166
167
  Provides-Extra: gateway
167
168
  Requires-Dist: aiocache; extra == 'gateway'
@@ -199,7 +200,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'gcp'
199
200
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'gcp'
200
201
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'gcp'
201
202
  Requires-Dist: starlette>=0.26.0; extra == 'gcp'
202
- Requires-Dist: uvicorn; extra == 'gcp'
203
+ Requires-Dist: uvicorn[standard]; extra == 'gcp'
203
204
  Requires-Dist: watchfiles; extra == 'gcp'
204
205
  Provides-Extra: kubernetes
205
206
  Requires-Dist: aiocache; extra == 'kubernetes'
@@ -223,7 +224,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'kubernetes'
223
224
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'kubernetes'
224
225
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'kubernetes'
225
226
  Requires-Dist: starlette>=0.26.0; extra == 'kubernetes'
226
- Requires-Dist: uvicorn; extra == 'kubernetes'
227
+ Requires-Dist: uvicorn[standard]; extra == 'kubernetes'
227
228
  Requires-Dist: watchfiles; extra == 'kubernetes'
228
229
  Provides-Extra: lambda
229
230
  Requires-Dist: aiocache; extra == 'lambda'
@@ -248,7 +249,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'lambda'
248
249
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'lambda'
249
250
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'lambda'
250
251
  Requires-Dist: starlette>=0.26.0; extra == 'lambda'
251
- Requires-Dist: uvicorn; extra == 'lambda'
252
+ Requires-Dist: uvicorn[standard]; extra == 'lambda'
252
253
  Requires-Dist: watchfiles; extra == 'lambda'
253
254
  Provides-Extra: nebius
254
255
  Requires-Dist: aiocache; extra == 'nebius'
@@ -272,7 +273,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'nebius'
272
273
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'nebius'
273
274
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'nebius'
274
275
  Requires-Dist: starlette>=0.26.0; extra == 'nebius'
275
- Requires-Dist: uvicorn; extra == 'nebius'
276
+ Requires-Dist: uvicorn[standard]; extra == 'nebius'
276
277
  Requires-Dist: watchfiles; extra == 'nebius'
277
278
  Provides-Extra: oci
278
279
  Requires-Dist: aiocache; extra == 'oci'
@@ -298,7 +299,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'oci'
298
299
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'oci'
299
300
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'oci'
300
301
  Requires-Dist: starlette>=0.26.0; extra == 'oci'
301
- Requires-Dist: uvicorn; extra == 'oci'
302
+ Requires-Dist: uvicorn[standard]; extra == 'oci'
302
303
  Requires-Dist: watchfiles; extra == 'oci'
303
304
  Provides-Extra: server
304
305
  Requires-Dist: aiocache; extra == 'server'
@@ -321,7 +322,7 @@ Requires-Dist: sentry-sdk[fastapi]; extra == 'server'
321
322
  Requires-Dist: sqlalchemy-utils>=0.40.0; extra == 'server'
322
323
  Requires-Dist: sqlalchemy[asyncio]>=2.0.0; extra == 'server'
323
324
  Requires-Dist: starlette>=0.26.0; extra == 'server'
324
- Requires-Dist: uvicorn; extra == 'server'
325
+ Requires-Dist: uvicorn[standard]; extra == 'server'
325
326
  Requires-Dist: watchfiles; extra == 'server'
326
327
  Description-Content-Type: text/markdown
327
328
 
@@ -338,8 +339,7 @@ Description-Content-Type: text/markdown
338
339
 
339
340
  </div>
340
341
 
341
- `dstack` is an open-source alternative to Kubernetes and Slurm, designed to simplify GPU allocation and AI workload
342
- orchestration for ML teams across top clouds and on-prem clusters.
342
+ `dstack` is an open-source container orchestrator that simplifies workload orchestration and drives GPU utilization for ML teams. It works with any GPU cloud, on-prem cluster, or accelerated hardware.
343
343
 
344
344
  #### Accelerators
345
345
 
@@ -355,7 +355,7 @@ orchestration for ML teams across top clouds and on-prem clusters.
355
355
 
356
356
  ## How does it work?
357
357
 
358
- <img src="https://dstack.ai/static-assets/static-assets/images/dstack-architecture-diagram-v8.svg" width="750" />
358
+ <img src="https://dstack.ai/static-assets/static-assets/images/dstack-architecture-diagram-v10.svg" width="750" />
359
359
 
360
360
  ### Installation
361
361