dstack 0.19.15rc1__py3-none-any.whl → 0.19.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dstack might be problematic. Click here for more details.

Files changed (93) hide show
  1. dstack/_internal/cli/commands/secrets.py +92 -0
  2. dstack/_internal/cli/main.py +2 -0
  3. dstack/_internal/cli/services/completion.py +5 -0
  4. dstack/_internal/cli/services/configurators/run.py +59 -17
  5. dstack/_internal/cli/utils/secrets.py +25 -0
  6. dstack/_internal/core/backends/__init__.py +10 -4
  7. dstack/_internal/core/backends/cloudrift/__init__.py +0 -0
  8. dstack/_internal/core/backends/cloudrift/api_client.py +208 -0
  9. dstack/_internal/core/backends/cloudrift/backend.py +16 -0
  10. dstack/_internal/core/backends/cloudrift/compute.py +138 -0
  11. dstack/_internal/core/backends/cloudrift/configurator.py +66 -0
  12. dstack/_internal/core/backends/cloudrift/models.py +40 -0
  13. dstack/_internal/core/backends/configurators.py +9 -0
  14. dstack/_internal/core/backends/models.py +7 -0
  15. dstack/_internal/core/compatibility/logs.py +15 -0
  16. dstack/_internal/core/compatibility/runs.py +31 -2
  17. dstack/_internal/core/models/backends/base.py +2 -0
  18. dstack/_internal/core/models/configurations.py +33 -2
  19. dstack/_internal/core/models/files.py +67 -0
  20. dstack/_internal/core/models/logs.py +2 -1
  21. dstack/_internal/core/models/runs.py +24 -1
  22. dstack/_internal/core/models/secrets.py +9 -2
  23. dstack/_internal/server/app.py +2 -0
  24. dstack/_internal/server/background/tasks/process_fleets.py +1 -1
  25. dstack/_internal/server/background/tasks/process_gateways.py +1 -1
  26. dstack/_internal/server/background/tasks/process_instances.py +1 -1
  27. dstack/_internal/server/background/tasks/process_placement_groups.py +1 -1
  28. dstack/_internal/server/background/tasks/process_running_jobs.py +110 -13
  29. dstack/_internal/server/background/tasks/process_runs.py +36 -5
  30. dstack/_internal/server/background/tasks/process_submitted_jobs.py +10 -4
  31. dstack/_internal/server/background/tasks/process_terminating_jobs.py +2 -2
  32. dstack/_internal/server/background/tasks/process_volumes.py +1 -1
  33. dstack/_internal/server/migrations/versions/5f1707c525d2_add_filearchivemodel.py +39 -0
  34. dstack/_internal/server/migrations/versions/644b8a114187_add_secretmodel.py +49 -0
  35. dstack/_internal/server/models.py +33 -0
  36. dstack/_internal/server/routers/files.py +67 -0
  37. dstack/_internal/server/routers/gateways.py +6 -3
  38. dstack/_internal/server/routers/projects.py +63 -0
  39. dstack/_internal/server/routers/prometheus.py +5 -5
  40. dstack/_internal/server/routers/secrets.py +57 -15
  41. dstack/_internal/server/schemas/files.py +5 -0
  42. dstack/_internal/server/schemas/logs.py +10 -1
  43. dstack/_internal/server/schemas/projects.py +12 -0
  44. dstack/_internal/server/schemas/runner.py +2 -0
  45. dstack/_internal/server/schemas/secrets.py +7 -11
  46. dstack/_internal/server/security/permissions.py +75 -2
  47. dstack/_internal/server/services/backends/__init__.py +1 -1
  48. dstack/_internal/server/services/files.py +91 -0
  49. dstack/_internal/server/services/fleets.py +1 -1
  50. dstack/_internal/server/services/gateways/__init__.py +1 -1
  51. dstack/_internal/server/services/jobs/__init__.py +19 -8
  52. dstack/_internal/server/services/jobs/configurators/base.py +27 -3
  53. dstack/_internal/server/services/jobs/configurators/dev.py +3 -3
  54. dstack/_internal/server/services/logs/aws.py +38 -38
  55. dstack/_internal/server/services/logs/filelog.py +48 -14
  56. dstack/_internal/server/services/logs/gcp.py +17 -16
  57. dstack/_internal/server/services/projects.py +164 -5
  58. dstack/_internal/server/services/prometheus/__init__.py +0 -0
  59. dstack/_internal/server/services/prometheus/client_metrics.py +52 -0
  60. dstack/_internal/server/services/proxy/repo.py +3 -0
  61. dstack/_internal/server/services/runner/client.py +8 -0
  62. dstack/_internal/server/services/runs.py +55 -10
  63. dstack/_internal/server/services/secrets.py +204 -0
  64. dstack/_internal/server/services/services/__init__.py +2 -1
  65. dstack/_internal/server/services/storage/base.py +21 -0
  66. dstack/_internal/server/services/storage/gcs.py +28 -6
  67. dstack/_internal/server/services/storage/s3.py +27 -9
  68. dstack/_internal/server/services/users.py +1 -3
  69. dstack/_internal/server/services/volumes.py +1 -1
  70. dstack/_internal/server/settings.py +2 -2
  71. dstack/_internal/server/statics/index.html +1 -1
  72. dstack/_internal/server/statics/{main-0ac1e1583684417ae4d1.js → main-d151637af20f70b2e796.js} +104 -48
  73. dstack/_internal/server/statics/{main-0ac1e1583684417ae4d1.js.map → main-d151637af20f70b2e796.js.map} +1 -1
  74. dstack/_internal/server/statics/{main-f39c418b05fe14772dd8.css → main-d48635d8fe670d53961c.css} +1 -1
  75. dstack/_internal/server/statics/static/media/google.b194b06fafd0a52aeb566922160ea514.svg +1 -0
  76. dstack/_internal/server/testing/common.py +43 -5
  77. dstack/_internal/settings.py +5 -0
  78. dstack/_internal/utils/files.py +69 -0
  79. dstack/_internal/utils/nested_list.py +47 -0
  80. dstack/_internal/utils/path.py +12 -4
  81. dstack/api/_public/runs.py +73 -12
  82. dstack/api/server/__init__.py +6 -0
  83. dstack/api/server/_files.py +18 -0
  84. dstack/api/server/_logs.py +5 -1
  85. dstack/api/server/_projects.py +24 -0
  86. dstack/api/server/_secrets.py +15 -15
  87. dstack/version.py +1 -1
  88. {dstack-0.19.15rc1.dist-info → dstack-0.19.17.dist-info}/METADATA +3 -4
  89. {dstack-0.19.15rc1.dist-info → dstack-0.19.17.dist-info}/RECORD +93 -71
  90. /dstack/_internal/server/services/{prometheus.py → prometheus/custom_metrics.py} +0 -0
  91. {dstack-0.19.15rc1.dist-info → dstack-0.19.17.dist-info}/WHEEL +0 -0
  92. {dstack-0.19.15rc1.dist-info → dstack-0.19.17.dist-info}/entry_points.txt +0 -0
  93. {dstack-0.19.15rc1.dist-info → dstack-0.19.17.dist-info}/licenses/LICENSE.md +0 -0
@@ -0,0 +1 @@
1
+ <svg xmlns="http://www.w3.org/2000/svg" height="14" viewBox="0 0 24 24" width="14"><path d="M22.56 12.25c0-.78-.07-1.53-.2-2.25H12v4.26h5.92c-.26 1.37-1.04 2.53-2.21 3.31v2.77h3.57c2.08-1.92 3.28-4.74 3.28-8.09z" fill="#4285F4"/><path d="M12 23c2.97 0 5.46-.98 7.28-2.66l-3.57-2.77c-.98.66-2.23 1.06-3.71 1.06-2.86 0-5.29-1.93-6.16-4.53H2.18v2.84C3.99 20.53 7.7 23 12 23z" fill="#34A853"/><path d="M5.84 14.09c-.22-.66-.35-1.36-.35-2.09s.13-1.43.35-2.09V7.07H2.18C1.43 8.55 1 10.22 1 12s.43 3.45 1.18 4.93l2.85-2.22.81-.62z" fill="#FBBC05"/><path d="M12 5.38c1.62 0 3.06.56 4.21 1.64l3.15-3.15C17.45 2.09 14.97 1 12 1 7.7 1 3.99 3.47 2.18 7.07l3.66 2.84c.87-2.6 3.3-4.53 6.16-4.53z" fill="#EA4335"/><path d="M1 1h22v22H1z" fill="none"/></svg>
@@ -1,5 +1,6 @@
1
1
  import json
2
2
  import uuid
3
+ from collections.abc import Callable
3
4
  from contextlib import contextmanager
4
5
  from datetime import datetime, timezone
5
6
  from typing import Dict, List, Literal, Optional, Union
@@ -77,6 +78,7 @@ from dstack._internal.core.models.volumes import (
77
78
  from dstack._internal.server.models import (
78
79
  BackendModel,
79
80
  DecryptedString,
81
+ FileArchiveModel,
80
82
  FleetModel,
81
83
  GatewayComputeModel,
82
84
  GatewayModel,
@@ -89,6 +91,7 @@ from dstack._internal.server.models import (
89
91
  RepoCredsModel,
90
92
  RepoModel,
91
93
  RunModel,
94
+ SecretModel,
92
95
  UserModel,
93
96
  VolumeAttachmentModel,
94
97
  VolumeModel,
@@ -232,21 +235,38 @@ async def create_repo_creds(
232
235
  return repo_creds
233
236
 
234
237
 
238
+ async def create_file_archive(
239
+ session: AsyncSession,
240
+ user_id: UUID,
241
+ blob_hash: str = "blob_hash",
242
+ blob: bytes = b"blob_content",
243
+ ) -> FileArchiveModel:
244
+ archive = FileArchiveModel(
245
+ user_id=user_id,
246
+ blob_hash=blob_hash,
247
+ blob=blob,
248
+ )
249
+ session.add(archive)
250
+ await session.commit()
251
+ return archive
252
+
253
+
235
254
  def get_run_spec(
236
255
  run_name: str,
237
256
  repo_id: str,
238
- profile: Optional[Profile] = None,
257
+ configuration_path: str = "dstack.yaml",
258
+ profile: Union[Profile, Callable[[], Profile], None] = lambda: Profile(name="default"),
239
259
  configuration: Optional[AnyRunConfiguration] = None,
240
260
  ) -> RunSpec:
241
- if profile is None:
242
- profile = Profile(name="default")
261
+ if callable(profile):
262
+ profile = profile()
243
263
  return RunSpec(
244
264
  run_name=run_name,
245
265
  repo_id=repo_id,
246
266
  repo_data=LocalRunRepoData(repo_dir="/"),
247
267
  repo_code_hash=None,
248
268
  working_dir=".",
249
- configuration_path="dstack.yaml",
269
+ configuration_path=configuration_path,
250
270
  configuration=configuration or DevEnvironmentConfiguration(ide="vscode"),
251
271
  profile=profile,
252
272
  ssh_key_pub="user_ssh_key",
@@ -315,7 +335,9 @@ async def create_job(
315
335
  if deployment_num is None:
316
336
  deployment_num = run.deployment_num
317
337
  run_spec = RunSpec.parse_raw(run.run_spec)
318
- job_spec = (await get_job_specs_from_run_spec(run_spec, replica_num=replica_num))[0]
338
+ job_spec = (
339
+ await get_job_specs_from_run_spec(run_spec=run_spec, secrets={}, replica_num=replica_num)
340
+ )[0]
319
341
  job_spec.job_num = job_num
320
342
  job = JobModel(
321
343
  project_id=run.project_id,
@@ -917,6 +939,22 @@ async def create_job_prometheus_metrics(
917
939
  return metrics
918
940
 
919
941
 
942
+ async def create_secret(
943
+ session: AsyncSession,
944
+ project: ProjectModel,
945
+ name: str,
946
+ value: str,
947
+ ):
948
+ secret_model = SecretModel(
949
+ project=project,
950
+ name=name,
951
+ value=DecryptedString(plaintext=value),
952
+ )
953
+ session.add(secret_model)
954
+ await session.commit()
955
+ return secret_model
956
+
957
+
920
958
  def get_private_key_string() -> str:
921
959
  return """
922
960
  -----BEGIN RSA PRIVATE KEY-----
@@ -17,6 +17,11 @@ DSTACK_BASE_IMAGE_VERSION = os.getenv("DSTACK_BASE_IMAGE_VERSION", version.base_
17
17
  DSTACK_BASE_IMAGE_UBUNTU_VERSION = os.getenv(
18
18
  "DSTACK_BASE_IMAGE_UBUNTU_VERSION", version.base_image_ubuntu_version
19
19
  )
20
+ DSTACK_DIND_IMAGE = os.getenv("DSTACK_DIND_IMAGE", "dstackai/dind")
21
+
22
+ # Development settings
23
+
24
+ LOCAL_BACKEND_ENABLED = os.getenv("DSTACK_LOCAL_BACKEND_ENABLED") is not None
20
25
 
21
26
 
22
27
  class FeatureFlags:
@@ -0,0 +1,69 @@
1
+ import tarfile
2
+ from pathlib import Path
3
+ from typing import BinaryIO
4
+
5
+ import ignore
6
+ import ignore.overrides
7
+
8
+ from dstack._internal.utils.hash import get_sha256
9
+ from dstack._internal.utils.path import PathLike, normalize_path
10
+
11
+
12
+ def create_file_archive(root: PathLike, fp: BinaryIO) -> str:
13
+ """
14
+ Packs the directory or file to a tar archive and writes it to the file-like object.
15
+
16
+ Archives can be used to transfer file(s) (e.g., over the network) preserving
17
+ file properties such as permissions, timestamps, etc.
18
+
19
+ NOTE: `.gitignore` and `.dstackignore` are respected.
20
+
21
+ Args:
22
+ root: The absolute path to the directory or file.
23
+ fp: The binary file-like object.
24
+
25
+ Returns:
26
+ The SHA-256 hash of the archive as a hex string.
27
+
28
+ Raises:
29
+ ValueError: If the path is not absolute.
30
+ OSError: Underlying errors from the tarfile module
31
+ """
32
+ root = Path(root)
33
+ if not root.is_absolute():
34
+ raise ValueError(f"path must be absolute: {root}")
35
+ walk = (
36
+ ignore.WalkBuilder(root)
37
+ .overrides(ignore.overrides.OverrideBuilder(root).add("!/.git/").build())
38
+ .hidden(False) # do not ignore files that start with a dot
39
+ .require_git(False) # respect git ignore rules even if not a git repo
40
+ .add_custom_ignore_filename(".dstackignore")
41
+ .build()
42
+ )
43
+ # sort paths to ensure archive reproducibility
44
+ paths = sorted(entry.path() for entry in walk)
45
+ with tarfile.TarFile(mode="w", fileobj=fp) as t:
46
+ for path in paths:
47
+ arcname = str(path.relative_to(root.parent))
48
+ info = t.gettarinfo(path, arcname)
49
+ if info.issym():
50
+ # Symlinks are handled as follows: each symlink in the chain is checked, and
51
+ # * if the target is inside the root: keep relative links as is, replace absolute
52
+ # links with relative ones;
53
+ # * if the target is outside the root: replace the link with the actual file.
54
+ target = Path(info.linkname)
55
+ if not target.is_absolute():
56
+ target = path.parent / target
57
+ target = normalize_path(target)
58
+ try:
59
+ target.relative_to(root)
60
+ except ValueError:
61
+ # Adding as a file
62
+ t.add(path.resolve(), arcname, recursive=False)
63
+ else:
64
+ # Adding as a relative symlink
65
+ info.linkname = str(target.relative_to(path.parent, walk_up=True))
66
+ t.addfile(info)
67
+ else:
68
+ t.add(path, arcname, recursive=False)
69
+ return get_sha256(fp)
@@ -0,0 +1,47 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import Optional
3
+
4
+
5
+ @dataclass
6
+ class NestedListItem:
7
+ label: str
8
+ children: list["NestedListItem"] = field(default_factory=list)
9
+
10
+ def render(self, indent: int = 0, visited: Optional[set[int]] = None) -> str:
11
+ if visited is None:
12
+ visited = set()
13
+
14
+ item_id = id(self)
15
+ if item_id in visited:
16
+ raise ValueError(f"Cycle detected at item: {self.label}")
17
+
18
+ visited.add(item_id)
19
+ prefix = " " * indent + "- "
20
+ output = f"{prefix}{self.label}\n"
21
+ for child in self.children:
22
+ # `visited.copy()` so that we only detect cycles within each path,
23
+ # rather than duplicate items in unrelated paths
24
+ output += child.render(indent + 1, visited.copy())
25
+ return output
26
+
27
+
28
+ @dataclass
29
+ class NestedList:
30
+ """
31
+ A nested list that can be rendered in Markdown-like format:
32
+
33
+ - Item 1
34
+ - Item 2
35
+ - Item 2.1
36
+ - Item 2.2
37
+ - Item 2.2.1
38
+ - Item 3
39
+ """
40
+
41
+ children: list[NestedListItem] = field(default_factory=list)
42
+
43
+ def render(self) -> str:
44
+ output = ""
45
+ for child in self.children:
46
+ output += child.render()
47
+ return output
@@ -27,16 +27,24 @@ def path_in_dir(path: PathLike, directory: PathLike) -> bool:
27
27
  return False
28
28
 
29
29
 
30
- def resolve_relative_path(path: str) -> PurePath:
30
+ def normalize_path(path: PathLike) -> PurePath:
31
31
  path = PurePath(path)
32
- if path.is_absolute():
33
- raise ValueError("Path should be relative")
34
32
  stack = []
35
33
  for part in path.parts:
36
34
  if part == "..":
37
35
  if not stack:
38
- raise ValueError("Path is outside of the repo")
36
+ raise ValueError("Path is outside of the top directory")
39
37
  stack.pop()
40
38
  else:
41
39
  stack.append(part)
42
40
  return PurePath(*stack)
41
+
42
+
43
+ def resolve_relative_path(path: PathLike) -> PurePath:
44
+ path = PurePath(path)
45
+ if path.is_absolute():
46
+ raise ValueError("Path should be relative")
47
+ try:
48
+ return normalize_path(path)
49
+ except ValueError:
50
+ raise ValueError("Path is outside of the repo")
@@ -4,10 +4,12 @@ import tempfile
4
4
  import threading
5
5
  import time
6
6
  from abc import ABC
7
+ from collections.abc import Iterator
8
+ from contextlib import contextmanager
7
9
  from copy import copy
8
10
  from datetime import datetime
9
11
  from pathlib import Path
10
- from typing import Dict, Iterable, List, Optional, Union
12
+ from typing import BinaryIO, Dict, Iterable, List, Optional, Union
11
13
  from urllib.parse import urlparse
12
14
 
13
15
  from websocket import WebSocketApp
@@ -17,6 +19,7 @@ from dstack._internal.core.consts import DSTACK_RUNNER_HTTP_PORT, DSTACK_RUNNER_
17
19
  from dstack._internal.core.errors import ClientError, ConfigurationError, ResourceNotExistsError
18
20
  from dstack._internal.core.models.backends.base import BackendType
19
21
  from dstack._internal.core.models.configurations import AnyRunConfiguration, PortMapping
22
+ from dstack._internal.core.models.files import FileArchiveMapping, FilePathMapping
20
23
  from dstack._internal.core.models.profiles import (
21
24
  CreationPolicy,
22
25
  Profile,
@@ -42,6 +45,7 @@ from dstack._internal.core.services.ssh.attach import SSHAttach
42
45
  from dstack._internal.core.services.ssh.ports import PortsLock
43
46
  from dstack._internal.server.schemas.logs import PollLogsRequest
44
47
  from dstack._internal.utils.common import get_or_error, make_proxy_url
48
+ from dstack._internal.utils.files import create_file_archive
45
49
  from dstack._internal.utils.logging import get_logger
46
50
  from dstack._internal.utils.path import PathLike, path_in_dir
47
51
  from dstack.api.server import APIClient
@@ -204,25 +208,26 @@ class Run(ABC):
204
208
  job = self._find_job(replica_num=replica_num, job_num=job_num)
205
209
  if job is None:
206
210
  return []
207
- next_start_time = start_time
211
+ next_token = None
208
212
  while True:
209
213
  resp = self._api_client.logs.poll(
210
214
  project_name=self._project,
211
215
  body=PollLogsRequest(
212
216
  run_name=self.name,
213
217
  job_submission_id=job.job_submissions[-1].id,
214
- start_time=next_start_time,
218
+ start_time=start_time,
215
219
  end_time=None,
216
220
  descending=False,
217
221
  limit=1000,
218
222
  diagnose=diagnose,
223
+ next_token=next_token,
219
224
  ),
220
225
  )
221
- if len(resp.logs) == 0:
222
- return []
223
226
  for log in resp.logs:
224
227
  yield base64.b64decode(log.message)
225
- next_start_time = resp.logs[-1].timestamp
228
+ next_token = resp.next_token
229
+ if next_token is None:
230
+ break
226
231
 
227
232
  def refresh(self):
228
233
  """
@@ -435,12 +440,16 @@ class RunCollection:
435
440
  """
436
441
  if repo is None:
437
442
  repo = VirtualRepo()
443
+ repo_code_hash = None
444
+ else:
445
+ with _prepare_code_file(repo) as (_, repo_code_hash):
446
+ pass
438
447
 
439
448
  run_spec = RunSpec(
440
449
  run_name=configuration.name,
441
450
  repo_id=repo.repo_id,
442
451
  repo_data=repo.run_repo_data,
443
- repo_code_hash=None, # `apply_plan` will fill it
452
+ repo_code_hash=repo_code_hash,
444
453
  working_dir=configuration.working_dir,
445
454
  configuration_path=configuration_path,
446
455
  configuration=configuration,
@@ -475,20 +484,36 @@ class RunCollection:
475
484
  # TODO handle multiple jobs
476
485
  ports_lock = _reserve_ports(run_plan.job_plans[0].job_spec)
477
486
 
487
+ run_spec = run_plan.run_spec
488
+ configuration = run_spec.configuration
489
+
490
+ self._validate_configuration_files(configuration, run_spec.configuration_path)
491
+ for file_mapping in configuration.files:
492
+ assert isinstance(file_mapping, FilePathMapping)
493
+ with tempfile.TemporaryFile("w+b") as fp:
494
+ try:
495
+ archive_hash = create_file_archive(file_mapping.local_path, fp)
496
+ except OSError as e:
497
+ raise ClientError(f"failed to archive '{file_mapping.local_path}': {e}") from e
498
+ fp.seek(0)
499
+ archive = self._api_client.files.upload_archive(hash=archive_hash, fp=fp)
500
+ run_spec.file_archives.append(
501
+ FileArchiveMapping(id=archive.id, path=file_mapping.path)
502
+ )
503
+
478
504
  if repo is None:
479
505
  repo = VirtualRepo()
480
506
  else:
481
507
  # Do not upload the diff without a repo (a default virtual repo)
482
508
  # since upload_code() requires a repo to be initialized.
483
- with tempfile.TemporaryFile("w+b") as fp:
484
- run_plan.run_spec.repo_code_hash = repo.write_code_file(fp)
485
- fp.seek(0)
509
+ with _prepare_code_file(repo) as (fp, repo_code_hash):
486
510
  self._api_client.repos.upload_code(
487
511
  project_name=self._project,
488
512
  repo_id=repo.repo_id,
489
- code_hash=run_plan.run_spec.repo_code_hash,
513
+ code_hash=repo_code_hash,
490
514
  fp=fp,
491
515
  )
516
+
492
517
  run = self._api_client.runs.apply_plan(self._project, run_plan)
493
518
  return self._model_to_submitted_run(run, ports_lock)
494
519
 
@@ -626,6 +651,10 @@ class RunCollection:
626
651
  logger.warning("The get_plan() method is deprecated in favor of get_run_plan().")
627
652
  if repo is None:
628
653
  repo = VirtualRepo()
654
+ repo_code_hash = None
655
+ else:
656
+ with _prepare_code_file(repo) as (_, repo_code_hash):
657
+ pass
629
658
 
630
659
  if working_dir is None:
631
660
  working_dir = "."
@@ -662,7 +691,7 @@ class RunCollection:
662
691
  run_name=run_name,
663
692
  repo_id=repo.repo_id,
664
693
  repo_data=repo.run_repo_data,
665
- repo_code_hash=None, # `exec_plan` will fill it
694
+ repo_code_hash=repo_code_hash,
666
695
  working_dir=working_dir,
667
696
  configuration_path=configuration_path,
668
697
  configuration=configuration,
@@ -761,6 +790,30 @@ class RunCollection:
761
790
  ports_lock,
762
791
  )
763
792
 
793
+ def _validate_configuration_files(
794
+ self, configuration: AnyRunConfiguration, configuration_path: Optional[PathLike]
795
+ ) -> None:
796
+ """
797
+ Expands, normalizes and validates local paths specified in
798
+ the `files` configuration property.
799
+ """
800
+ base_dir: Optional[Path] = None
801
+ if configuration_path is not None:
802
+ base_dir = Path(configuration_path).expanduser().resolve().parent
803
+ for file_mapping in configuration.files:
804
+ assert isinstance(file_mapping, FilePathMapping)
805
+ path = Path(file_mapping.local_path).expanduser()
806
+ if not path.is_absolute():
807
+ if base_dir is None:
808
+ raise ConfigurationError(
809
+ f"Path '{path}' is relative but `configuration_path` is not provided"
810
+ )
811
+ else:
812
+ path = base_dir / path
813
+ if not path.exists():
814
+ raise ConfigurationError(f"Path '{path}' specified in `files` does not exist")
815
+ file_mapping.local_path = str(path)
816
+
764
817
 
765
818
  def _reserve_ports(
766
819
  job_spec: JobSpec,
@@ -780,3 +833,11 @@ def _reserve_ports(
780
833
  ports[port_override.container_port] = port_override.local_port or 0
781
834
  logger.debug("Reserving ports: %s", ports)
782
835
  return PortsLock(ports).acquire()
836
+
837
+
838
+ @contextmanager
839
+ def _prepare_code_file(repo: Repo) -> Iterator[tuple[BinaryIO, str]]:
840
+ with tempfile.TemporaryFile("w+b") as fp:
841
+ repo_code_hash = repo.write_code_file(fp)
842
+ fp.seek(0)
843
+ yield fp, repo_code_hash
@@ -14,6 +14,7 @@ from dstack._internal.core.errors import (
14
14
  )
15
15
  from dstack._internal.utils.logging import get_logger
16
16
  from dstack.api.server._backends import BackendsAPIClient
17
+ from dstack.api.server._files import FilesAPIClient
17
18
  from dstack.api.server._fleets import FleetsAPIClient
18
19
  from dstack.api.server._gateways import GatewaysAPIClient
19
20
  from dstack.api.server._logs import LogsAPIClient
@@ -47,6 +48,7 @@ class APIClient:
47
48
  logs: operations with logs
48
49
  gateways: operations with gateways
49
50
  volumes: operations with volumes
51
+ files: operations with files
50
52
  """
51
53
 
52
54
  def __init__(self, base_url: str, token: str):
@@ -111,6 +113,10 @@ class APIClient:
111
113
  def volumes(self) -> VolumesAPIClient:
112
114
  return VolumesAPIClient(self._request)
113
115
 
116
+ @property
117
+ def files(self) -> FilesAPIClient:
118
+ return FilesAPIClient(self._request)
119
+
114
120
  def _request(
115
121
  self,
116
122
  path: str,
@@ -0,0 +1,18 @@
1
+ from typing import BinaryIO
2
+
3
+ from pydantic import parse_obj_as
4
+
5
+ from dstack._internal.core.models.files import FileArchive
6
+ from dstack._internal.server.schemas.files import GetFileArchiveByHashRequest
7
+ from dstack.api.server._group import APIClientGroup
8
+
9
+
10
+ class FilesAPIClient(APIClientGroup):
11
+ def get_archive_by_hash(self, hash: str) -> FileArchive:
12
+ body = GetFileArchiveByHashRequest(hash=hash)
13
+ resp = self._request("/api/files/get_archive_by_hash", body=body.json())
14
+ return parse_obj_as(FileArchive.__response__, resp.json())
15
+
16
+ def upload_archive(self, hash: str, fp: BinaryIO) -> FileArchive:
17
+ resp = self._request("/api/files/upload_archive", files={"file": (hash, fp)})
18
+ return parse_obj_as(FileArchive.__response__, resp.json())
@@ -1,5 +1,6 @@
1
1
  from pydantic import parse_obj_as
2
2
 
3
+ from dstack._internal.core.compatibility.logs import get_poll_logs_excludes
3
4
  from dstack._internal.core.models.logs import JobSubmissionLogs
4
5
  from dstack._internal.server.schemas.logs import PollLogsRequest
5
6
  from dstack.api.server._group import APIClientGroup
@@ -7,5 +8,8 @@ from dstack.api.server._group import APIClientGroup
7
8
 
8
9
  class LogsAPIClient(APIClientGroup):
9
10
  def poll(self, project_name: str, body: PollLogsRequest) -> JobSubmissionLogs:
10
- resp = self._request(f"/api/project/{project_name}/logs/poll", body=body.json())
11
+ resp = self._request(
12
+ f"/api/project/{project_name}/logs/poll",
13
+ body=body.json(exclude=get_poll_logs_excludes(body)),
14
+ )
11
15
  return parse_obj_as(JobSubmissionLogs.__response__, resp.json())
@@ -3,10 +3,13 @@ from typing import List
3
3
  from pydantic import parse_obj_as
4
4
 
5
5
  from dstack._internal.core.models.projects import Project
6
+ from dstack._internal.core.models.users import ProjectRole
6
7
  from dstack._internal.server.schemas.projects import (
8
+ AddProjectMemberRequest,
7
9
  CreateProjectRequest,
8
10
  DeleteProjectsRequest,
9
11
  MemberSetting,
12
+ RemoveProjectMemberRequest,
10
13
  SetProjectMembersRequest,
11
14
  )
12
15
  from dstack.api.server._group import APIClientGroup
@@ -34,3 +37,24 @@ class ProjectsAPIClient(APIClientGroup):
34
37
  body = SetProjectMembersRequest(members=members)
35
38
  resp = self._request(f"/api/projects/{project_name}/set_members", body=body.json())
36
39
  return parse_obj_as(Project.__response__, resp.json())
40
+
41
+ def add_member(self, project_name: str, username: str, project_role: ProjectRole) -> Project:
42
+ member_setting = MemberSetting(username=username, project_role=project_role)
43
+ body = AddProjectMemberRequest(members=[member_setting])
44
+ resp = self._request(f"/api/projects/{project_name}/add_members", body=body.json())
45
+ return parse_obj_as(Project.__response__, resp.json())
46
+
47
+ def add_members(self, project_name: str, members: List[MemberSetting]) -> Project:
48
+ body = AddProjectMemberRequest(members=members)
49
+ resp = self._request(f"/api/projects/{project_name}/add_members", body=body.json())
50
+ return parse_obj_as(Project.__response__, resp.json())
51
+
52
+ def remove_member(self, project_name: str, username: str) -> Project:
53
+ body = RemoveProjectMemberRequest(usernames=[username])
54
+ resp = self._request(f"/api/projects/{project_name}/remove_members", body=body.json())
55
+ return parse_obj_as(Project.__response__, resp.json())
56
+
57
+ def remove_members(self, project_name: str, usernames: List[str]) -> Project:
58
+ body = RemoveProjectMemberRequest(usernames=usernames)
59
+ resp = self._request(f"/api/projects/{project_name}/remove_members", body=body.json())
60
+ return parse_obj_as(Project.__response__, resp.json())
@@ -4,33 +4,33 @@ from pydantic import parse_obj_as
4
4
 
5
5
  from dstack._internal.core.models.secrets import Secret
6
6
  from dstack._internal.server.schemas.secrets import (
7
- AddSecretRequest,
7
+ CreateOrUpdateSecretRequest,
8
8
  DeleteSecretsRequest,
9
- GetSecretsRequest,
10
- ListSecretsRequest,
9
+ GetSecretRequest,
11
10
  )
12
11
  from dstack.api.server._group import APIClientGroup
13
12
 
14
13
 
15
14
  class SecretsAPIClient(APIClientGroup):
16
- def list(self, project_name: str, repo_id: str) -> List[Secret]:
17
- body = ListSecretsRequest(repo_id=repo_id)
18
- resp = self._request(f"/api/project/{project_name}/secrets/list", body=body.json())
15
+ def list(self, project_name: str) -> List[Secret]:
16
+ resp = self._request(f"/api/project/{project_name}/secrets/list")
19
17
  return parse_obj_as(List[Secret.__response__], resp.json())
20
18
 
21
- def get(self, project_name: str, repo_id: str, secret_name: str) -> Secret:
22
- raise NotImplementedError()
23
- body = GetSecretsRequest(repo_id=repo_id)
19
+ def get(self, project_name: str, name: str) -> Secret:
20
+ body = GetSecretRequest(name=name)
24
21
  resp = self._request(f"/api/project/{project_name}/secrets/get", body=body.json())
25
22
  return parse_obj_as(Secret, resp.json())
26
23
 
27
- def add(self, project_name: str, repo_id: str, secret_name: str, secret_value: str) -> Secret:
28
- body = AddSecretRequest(
29
- repo_id=repo_id, secret=Secret(name=secret_name, value=secret_value)
24
+ def create_or_update(self, project_name: str, name: str, value: str) -> Secret:
25
+ body = CreateOrUpdateSecretRequest(
26
+ name=name,
27
+ value=value,
28
+ )
29
+ resp = self._request(
30
+ f"/api/project/{project_name}/secrets/create_or_update", body=body.json()
30
31
  )
31
- resp = self._request(f"/api/project/{project_name}/secrets/add", body=body.json())
32
32
  return parse_obj_as(Secret.__response__, resp.json())
33
33
 
34
- def delete(self, project_name: str, repo_id: str, secrets_names: List[str]):
35
- body = DeleteSecretsRequest(repo_id=repo_id, secrets_names=secrets_names)
34
+ def delete(self, project_name: str, names: List[str]):
35
+ body = DeleteSecretsRequest(secrets_names=names)
36
36
  self._request(f"/api/project/{project_name}/secrets/delete", body=body.json())
dstack/version.py CHANGED
@@ -1,4 +1,4 @@
1
- __version__ = "0.19.15rc1"
1
+ __version__ = "0.19.17"
2
2
  __is_release__ = True
3
3
  base_image = "0.10"
4
4
  base_image_ubuntu_version = "22.04"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dstack
3
- Version: 0.19.15rc1
3
+ Version: 0.19.17
4
4
  Summary: dstack is an open-source orchestration engine for running AI workloads on any cloud or on-premises.
5
5
  Project-URL: Homepage, https://dstack.ai
6
6
  Project-URL: Source, https://github.com/dstackai/dstack
@@ -338,8 +338,7 @@ Description-Content-Type: text/markdown
338
338
 
339
339
  </div>
340
340
 
341
- `dstack` is an open-source alternative to Kubernetes and Slurm, designed to simplify GPU allocation and AI workload
342
- orchestration for ML teams across top clouds and on-prem clusters.
341
+ `dstack` is an open-source container orchestrator that simplifies workload orchestration and drives GPU utilization for ML teams. It works with any GPU cloud, on-prem cluster, or accelerated hardware.
343
342
 
344
343
  #### Accelerators
345
344
 
@@ -355,7 +354,7 @@ orchestration for ML teams across top clouds and on-prem clusters.
355
354
 
356
355
  ## How does it work?
357
356
 
358
- <img src="https://dstack.ai/static-assets/static-assets/images/dstack-architecture-diagram-v8.svg" width="750" />
357
+ <img src="https://dstack.ai/static-assets/static-assets/images/dstack-architecture-diagram-v10.svg" width="750" />
359
358
 
360
359
  ### Installation
361
360