skypilot-nightly 1.0.0.dev20241104__py3-none-any.whl → 1.0.0.dev20241106__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sky/__init__.py CHANGED
@@ -5,7 +5,7 @@ from typing import Optional
5
5
  import urllib.request
6
6
 
7
7
  # Replaced with the current commit when building the wheels.
8
- _SKYPILOT_COMMIT_SHA = '0f0008d41ecf4919b169541fb86a4ec60d1485ab'
8
+ _SKYPILOT_COMMIT_SHA = '1dcd2f009c1989cbc130e5b8490170a5a96c3e23'
9
9
 
10
10
 
11
11
  def _get_git_commit():
@@ -35,7 +35,7 @@ def _get_git_commit():
35
35
 
36
36
 
37
37
  __commit__ = _get_git_commit()
38
- __version__ = '1.0.0.dev20241104'
38
+ __version__ = '1.0.0.dev20241106'
39
39
  __root_dir__ = os.path.dirname(os.path.abspath(__file__))
40
40
 
41
41
 
@@ -3278,9 +3278,10 @@ class CloudVmRayBackend(backends.Backend['CloudVmRayResourceHandle']):
3278
3278
  f'{cd} && {constants.SKY_RAY_CMD} job submit '
3279
3279
  '--address=http://127.0.0.1:$RAY_DASHBOARD_PORT '
3280
3280
  f'--submission-id {job_id}-$(whoami) --no-wait '
3281
- # Redirect stderr to /dev/null to avoid distracting error from ray.
3282
- f'"{constants.SKY_PYTHON_CMD} -u {script_path} > {remote_log_path} '
3283
- '2> /dev/null"')
3281
+ f'"{constants.SKY_PYTHON_CMD} -u {script_path} '
3282
+ # Do not use &>, which is not POSIX and may not work.
3283
+ # Note that the order of ">filename 2>&1" matters.
3284
+ f'> {remote_log_path} 2>&1"')
3284
3285
 
3285
3286
  code = job_lib.JobLibCodeGen.queue_job(job_id, job_submit_cmd)
3286
3287
  job_submit_cmd = ' && '.join([mkdir_code, create_script_code, code])
sky/clouds/aws.py CHANGED
@@ -280,12 +280,12 @@ class AWS(clouds.Cloud):
280
280
  if image_id.startswith('skypilot:'):
281
281
  return DEFAULT_AMI_GB
282
282
  assert region is not None, (image_id, region)
283
- client = aws.client('ec2', region_name=region)
284
283
  image_not_found_message = (
285
284
  f'Image {image_id!r} not found in AWS region {region}.\n'
286
285
  f'\nTo find AWS AMI IDs: https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-images.html#examples\n' # pylint: disable=line-too-long
287
286
  'Example: ami-0729d913a335efca7')
288
287
  try:
288
+ client = aws.client('ec2', region_name=region)
289
289
  image_info = client.describe_images(ImageIds=[image_id])
290
290
  image_info = image_info.get('Images', [])
291
291
  if not image_info:
@@ -294,7 +294,8 @@ class AWS(clouds.Cloud):
294
294
  image_info = image_info[0]
295
295
  image_size = image_info['BlockDeviceMappings'][0]['Ebs'][
296
296
  'VolumeSize']
297
- except aws.botocore_exceptions().NoCredentialsError:
297
+ except (aws.botocore_exceptions().NoCredentialsError,
298
+ aws.botocore_exceptions().ProfileNotFound):
298
299
  # Fallback to default image size if no credentials are available.
299
300
  # The credentials issue will be caught when actually provisioning
300
301
  # the instance and appropriate errors will be raised there.
sky/clouds/azure.py CHANGED
@@ -175,7 +175,11 @@ class Azure(clouds.Cloud):
175
175
 
176
176
  # Process user-specified images.
177
177
  azure_utils.validate_image_id(image_id)
178
- compute_client = azure.get_client('compute', cls.get_project_id())
178
+ try:
179
+ compute_client = azure.get_client('compute', cls.get_project_id())
180
+ except (azure.exceptions().AzureError, RuntimeError):
181
+ # Fallback to default image size if no credentials are available.
182
+ return 0.0
179
183
 
180
184
  # Community gallery image.
181
185
  if image_id.startswith(_COMMUNITY_IMAGE_PREFIX):
@@ -74,11 +74,14 @@ def home():
74
74
  # Remove filler rows ([''], ..., ['-']).
75
75
  rows = [row for row in rows if ''.join(map(str, row)) != '']
76
76
 
77
+ # Get all unique status values.
78
+ status_values = sorted(list(set(row[-5] for row in rows)))
77
79
  rendered_html = flask.render_template(
78
80
  'index.html',
79
81
  columns=columns,
80
82
  rows=rows,
81
83
  last_updated_timestamp=timestamp,
84
+ status_values=status_values,
82
85
  )
83
86
  return rendered_html
84
87
 
@@ -44,6 +44,22 @@
44
44
  .clickable {
45
45
  cursor: pointer; /* This makes the cursor a pointer when hovering over the element */
46
46
  }
47
+
48
+ .filter-controls {
49
+ display: flex;
50
+ gap: 10px;
51
+ align-items: center; /* This ensures vertical alignment */
52
+ margin-top: 1rem;
53
+ position: relative;
54
+ z-index: 2;
55
+ }
56
+
57
+ /* Customize the select focus/hover states */
58
+ .form-select:focus {
59
+ border-color: #dee2e6;
60
+ box-shadow: 0 0 0 0.1rem rgba(0,0,0,0.1);
61
+ }
62
+
47
63
  </style>
48
64
  <script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.29.1/moment.min.js"></script>
49
65
  <script
@@ -54,12 +70,21 @@
54
70
  <body>
55
71
  <div class="container">
56
72
  <header>
57
- <h1>Managed jobs</h1>
73
+ <h1>SkyPilot managed jobs</h1>
58
74
  <p class="text-muted mt-4" id="last-updated"></p>
59
75
  <div class="form-check form-switch">
60
76
  <input class="form-check-input" type="checkbox" id="refresh-toggle" checked>
61
77
  <label class="form-check-label" for="refresh-toggle">Auto-refresh (every 30s)</label>
62
78
  </div>
79
+ <div class="filter-controls">
80
+ <span class="fw-medium fs-6">Filter by status:</span>
81
+ <select class="form-select" id="status-filter" style="width: auto;">
82
+ <option value="">All statuses</option>
83
+ {% for status in status_values %}
84
+ <option value="{{ status }}">{{ status }}</option>
85
+ {% endfor %}
86
+ </select>
87
+ </div>
63
88
  </header>
64
89
 
65
90
  <table class="table table-hover table-hover-selected fixed-header-table" id="jobs-table">
@@ -204,17 +229,25 @@
204
229
  </script>
205
230
  <script>
206
231
  function filterStatus(status) {
207
- var rows = document.querySelectorAll("#spot-jobs-table tbody tr");
208
- rows.forEach(function (row) {
209
- var statusCell = row.querySelector("td:nth-child(9)");
232
+ var rows = document.querySelectorAll("#jobs-table tbody tr");
233
+ rows.forEach(function(row) {
234
+ var statusCell = row.querySelector("td:nth-child(10)"); // Status is in the 10th column
235
+ var statusText = statusCell.textContent.trim().split(' ')[0]; // Get first word of status
210
236
 
211
- if (status === '' || statusCell.textContent === status) {
237
+ if (status === '' || statusText === status) {
212
238
  row.style.display = "";
213
239
  } else {
214
240
  row.style.display = "none";
215
241
  }
216
242
  });
217
243
  }
244
+
245
+ // Add event listener for the status filter
246
+ document.addEventListener("DOMContentLoaded", function() {
247
+ document.getElementById("status-filter").addEventListener("change", function() {
248
+ filterStatus(this.value);
249
+ });
250
+ });
218
251
  </script>
219
252
 
220
253
  </body>
@@ -283,11 +283,27 @@ def start_ray_on_head_node(cluster_name: str, custom_resource: Optional[str],
283
283
  # the same credentials. Otherwise, `ray status` will fail to fetch the
284
284
  # available nodes.
285
285
  # Reference: https://github.com/skypilot-org/skypilot/issues/2441
286
- cmd = (f'{constants.SKY_RAY_CMD} stop; '
287
- 'unset AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY; '
288
- 'RAY_SCHEDULER_EVENTS=0 RAY_DEDUP_LOGS=0 '
289
- f'{constants.SKY_RAY_CMD} start --head {ray_options} || exit 1;' +
290
- _RAY_PRLIMIT + _DUMP_RAY_PORTS + RAY_HEAD_WAIT_INITIALIZED_COMMAND)
286
+ cmd = (
287
+ f'{constants.SKY_RAY_CMD} stop; '
288
+ 'unset AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY; '
289
+ 'RAY_SCHEDULER_EVENTS=0 RAY_DEDUP_LOGS=0 '
290
+ # worker_maximum_startup_concurrency controls the maximum number of
291
+ # workers that can be started concurrently. However, it also controls
292
+ # this warning message:
293
+ # https://github.com/ray-project/ray/blob/d5d03e6e24ae3cfafb87637ade795fb1480636e6/src/ray/raylet/worker_pool.cc#L1535-L1545
294
+ # maximum_startup_concurrency defaults to the number of CPUs given by
295
+ # multiprocessing.cpu_count() or manually specified to ray. (See
296
+ # https://github.com/ray-project/ray/blob/fab26e1813779eb568acba01281c6dd963c13635/python/ray/_private/services.py#L1622-L1624.)
297
+ # The warning will show when the number of workers is >4x the
298
+ # maximum_startup_concurrency, so typically 4x CPU count. However, the
299
+ # job controller uses 0.25cpu reservations, and each job can use two
300
+ # workers (one for the submitted job and one for remote actors),
301
+ # resulting in a worker count of 8x CPUs or more. Increase the
302
+ # worker_maximum_startup_concurrency to 3x CPUs so that we will only see
303
+ # the warning when the worker count is >12x CPUs.
304
+ 'RAY_worker_maximum_startup_concurrency=$(( 3 * $(nproc --all) )) '
305
+ f'{constants.SKY_RAY_CMD} start --head {ray_options} || exit 1;' +
306
+ _RAY_PRLIMIT + _DUMP_RAY_PORTS + RAY_HEAD_WAIT_INITIALIZED_COMMAND)
291
307
  logger.info(f'Running command on head node: {cmd}')
292
308
  # TODO(zhwu): add the output to log files.
293
309
  returncode, stdout, stderr = head_runner.run(
@@ -2,7 +2,7 @@
2
2
  import copy
3
3
  import json
4
4
  import time
5
- from typing import Any, Dict, List, Optional
5
+ from typing import Any, Callable, Dict, List, Optional
6
6
  import uuid
7
7
 
8
8
  from sky import exceptions
@@ -24,6 +24,8 @@ from sky.utils import ux_utils
24
24
 
25
25
  POLL_INTERVAL = 2
26
26
  _TIMEOUT_FOR_POD_TERMINATION = 60 # 1 minutes
27
+ _MAX_RETRIES = 3
28
+ NUM_THREADS = subprocess_utils.get_parallel_threads() * 2
27
29
 
28
30
  logger = sky_logging.init_logger(__name__)
29
31
  TAG_RAY_CLUSTER_NAME = 'ray-cluster-name'
@@ -304,6 +306,33 @@ def _wait_for_pods_to_run(namespace, context, new_nodes):
304
306
  time.sleep(1)
305
307
 
306
308
 
309
+ def _run_function_with_retries(func: Callable,
310
+ operation_name: str,
311
+ max_retries: int = _MAX_RETRIES,
312
+ retry_delay: int = 5) -> Any:
313
+ """Runs a function with retries on Kubernetes errors.
314
+
315
+ Args:
316
+ func: Function to retry
317
+ operation_name: Name of the operation for logging
318
+ max_retries: Maximum number of retry attempts
319
+ retry_delay: Delay between retries in seconds
320
+
321
+ Raises:
322
+ The last exception encountered if all retries fail.
323
+ """
324
+ for attempt in range(max_retries + 1):
325
+ try:
326
+ return func()
327
+ except config_lib.KubernetesError:
328
+ if attempt < max_retries:
329
+ logger.warning(f'Failed to {operation_name} - '
330
+ f'retrying in {retry_delay} seconds.')
331
+ time.sleep(retry_delay)
332
+ else:
333
+ raise
334
+
335
+
307
336
  def _set_env_vars_in_pods(namespace: str, context: Optional[str],
308
337
  new_pods: List):
309
338
  """Setting environment variables in pods.
@@ -323,14 +352,27 @@ def _set_env_vars_in_pods(namespace: str, context: Optional[str],
323
352
  """
324
353
  set_k8s_env_var_cmd = docker_utils.SETUP_ENV_VARS_CMD
325
354
 
326
- for new_pod in new_pods:
355
+ def _set_env_vars_thread(new_pod):
356
+ pod_name = new_pod.metadata.name
357
+ logger.info(f'{"-"*20}Start: Set up env vars in pod {pod_name!r} '
358
+ f'{"-"*20}')
327
359
  runner = command_runner.KubernetesCommandRunner(
328
- ((namespace, context), new_pod.metadata.name))
329
- rc, stdout, _ = runner.run(set_k8s_env_var_cmd,
330
- require_outputs=True,
331
- stream_logs=False)
332
- _raise_command_running_error('set env vars', set_k8s_env_var_cmd,
333
- new_pod.metadata.name, rc, stdout)
360
+ ((namespace, context), pod_name))
361
+
362
+ def _run_env_vars_cmd():
363
+ rc, stdout, _ = runner.run(set_k8s_env_var_cmd,
364
+ require_outputs=True,
365
+ stream_logs=False)
366
+ _raise_command_running_error('set env vars', set_k8s_env_var_cmd,
367
+ pod_name, rc, stdout)
368
+
369
+ _run_function_with_retries(_run_env_vars_cmd,
370
+ f'set env vars in pod {pod_name}')
371
+ logger.info(f'{"-"*20}End: Set up env vars in pod {pod_name!r} '
372
+ f'{"-"*20}')
373
+
374
+ subprocess_utils.run_in_parallel(_set_env_vars_thread, new_pods,
375
+ NUM_THREADS)
334
376
 
335
377
 
336
378
  def _check_user_privilege(namespace: str, context: Optional[str],
@@ -350,23 +392,37 @@ def _check_user_privilege(namespace: str, context: Optional[str],
350
392
  ' fi; '
351
393
  'fi')
352
394
 
353
- for new_node in new_nodes:
354
- runner = command_runner.KubernetesCommandRunner(
355
- ((namespace, context), new_node.metadata.name))
395
+ # This check needs to run on a per-image basis, so running the check on
396
+ # any one pod is sufficient.
397
+ new_node = new_nodes[0]
398
+ pod_name = new_node.metadata.name
399
+
400
+ runner = command_runner.KubernetesCommandRunner(
401
+ ((namespace, context), pod_name))
402
+ logger.info(f'{"-"*20}Start: Check user privilege in pod {pod_name!r} '
403
+ f'{"-"*20}')
404
+
405
+ def _run_privilege_check():
356
406
  rc, stdout, stderr = runner.run(check_k8s_user_sudo_cmd,
357
407
  require_outputs=True,
358
408
  separate_stderr=True,
359
409
  stream_logs=False)
360
410
  _raise_command_running_error('check user privilege',
361
- check_k8s_user_sudo_cmd,
362
- new_node.metadata.name, rc,
411
+ check_k8s_user_sudo_cmd, pod_name, rc,
363
412
  stdout + stderr)
364
- if stdout == str(exceptions.INSUFFICIENT_PRIVILEGES_CODE):
365
- raise config_lib.KubernetesError(
366
- 'Insufficient system privileges detected. '
367
- 'Ensure the default user has root access or '
368
- '"sudo" is installed and the user is added to the sudoers '
369
- 'from the image.')
413
+ return stdout
414
+
415
+ stdout = _run_function_with_retries(
416
+ _run_privilege_check, f'check user privilege in pod {pod_name!r}')
417
+
418
+ if stdout == str(exceptions.INSUFFICIENT_PRIVILEGES_CODE):
419
+ raise config_lib.KubernetesError(
420
+ 'Insufficient system privileges detected. '
421
+ 'Ensure the default user has root access or '
422
+ '"sudo" is installed and the user is added to the sudoers '
423
+ 'from the image.')
424
+ logger.info(f'{"-"*20}End: Check user privilege in pod {pod_name!r} '
425
+ f'{"-"*20}')
370
426
 
371
427
 
372
428
  def _setup_ssh_in_pods(namespace: str, context: Optional[str],
@@ -405,14 +461,19 @@ def _setup_ssh_in_pods(namespace: str, context: Optional[str],
405
461
  runner = command_runner.KubernetesCommandRunner(
406
462
  ((namespace, context), pod_name))
407
463
  logger.info(f'{"-"*20}Start: Set up SSH in pod {pod_name!r} {"-"*20}')
408
- rc, stdout, _ = runner.run(set_k8s_ssh_cmd,
409
- require_outputs=True,
410
- stream_logs=False)
411
- _raise_command_running_error('setup ssh', set_k8s_ssh_cmd, pod_name, rc,
412
- stdout)
464
+
465
+ def _run_ssh_setup():
466
+ rc, stdout, _ = runner.run(set_k8s_ssh_cmd,
467
+ require_outputs=True,
468
+ stream_logs=False)
469
+ _raise_command_running_error('setup ssh', set_k8s_ssh_cmd, pod_name,
470
+ rc, stdout)
471
+
472
+ _run_function_with_retries(_run_ssh_setup,
473
+ f'setup ssh in pod {pod_name!r}')
413
474
  logger.info(f'{"-"*20}End: Set up SSH in pod {pod_name!r} {"-"*20}')
414
475
 
415
- subprocess_utils.run_in_parallel(_setup_ssh_thread, new_nodes)
476
+ subprocess_utils.run_in_parallel(_setup_ssh_thread, new_nodes, NUM_THREADS)
416
477
 
417
478
 
418
479
  def _label_pod(namespace: str, context: Optional[str], pod_name: str,
@@ -765,12 +826,17 @@ def terminate_instances(
765
826
  def _is_head(pod) -> bool:
766
827
  return pod.metadata.labels[constants.TAG_RAY_NODE_KIND] == 'head'
767
828
 
768
- for pod_name, pod in pods.items():
769
- logger.debug(f'Terminating instance {pod_name}: {pod}')
829
+ def _terminate_pod_thread(pod_info):
830
+ pod_name, pod = pod_info
770
831
  if _is_head(pod) and worker_only:
771
- continue
832
+ return
833
+ logger.debug(f'Terminating instance {pod_name}: {pod}')
772
834
  _terminate_node(namespace, context, pod_name)
773
835
 
836
+ # Run pod termination in parallel
837
+ subprocess_utils.run_in_parallel(_terminate_pod_thread, pods.items(),
838
+ NUM_THREADS)
839
+
774
840
 
775
841
  def get_cluster_info(
776
842
  region: str,
@@ -122,6 +122,15 @@ available_node_types:
122
122
  - path: /etc/apt/apt.conf.d/10cloudinit-disable
123
123
  content: |
124
124
  APT::Periodic::Enable "0";
125
+ - path: /etc/apt/apt.conf.d/52unattended-upgrades-local
126
+ content: |
127
+ Unattended-Upgrade::DevRelease "false";
128
+ Unattended-Upgrade::Allowed-Origins {};
129
+ bootcmd:
130
+ - systemctl stop apt-daily.timer apt-daily-upgrade.timer unattended-upgrades.service
131
+ - systemctl disable apt-daily.timer apt-daily-upgrade.timer unattended-upgrades.service
132
+ - systemctl mask apt-daily.service apt-daily-upgrade.service unattended-upgrades.service
133
+ - systemctl daemon-reload
125
134
  TagSpecifications:
126
135
  - ResourceType: instance
127
136
  Tags:
@@ -237,6 +237,23 @@ class CommandRunner:
237
237
  rsync_command.append(prefix_command)
238
238
  rsync_command += ['rsync', RSYNC_DISPLAY_OPTION]
239
239
 
240
+ def _get_remote_home_dir_with_retry():
241
+ backoff = common_utils.Backoff(initial_backoff=1,
242
+ max_backoff_factor=5)
243
+ retries_left = max_retry
244
+ assert retries_left > 0, f'max_retry {max_retry} must be positive.'
245
+ while retries_left >= 0:
246
+ try:
247
+ return get_remote_home_dir()
248
+ except Exception: # pylint: disable=broad-except
249
+ if retries_left == 0:
250
+ raise
251
+ sleep_time = backoff.current_backoff()
252
+ logger.warning(f'Failed to get remote home dir '
253
+ f'- retrying in {sleep_time} seconds.')
254
+ retries_left -= 1
255
+ time.sleep(sleep_time)
256
+
240
257
  # --filter
241
258
  # The source is a local path, so we need to resolve it.
242
259
  resolved_source = pathlib.Path(source).expanduser().resolve()
@@ -261,7 +278,7 @@ class CommandRunner:
261
278
  if up:
262
279
  resolved_target = target
263
280
  if target.startswith('~'):
264
- remote_home_dir = get_remote_home_dir()
281
+ remote_home_dir = _get_remote_home_dir_with_retry()
265
282
  resolved_target = target.replace('~', remote_home_dir)
266
283
  full_source_str = str(resolved_source)
267
284
  if resolved_source.is_dir():
@@ -273,7 +290,7 @@ class CommandRunner:
273
290
  else:
274
291
  resolved_source = source
275
292
  if source.startswith('~'):
276
- remote_home_dir = get_remote_home_dir()
293
+ remote_home_dir = _get_remote_home_dir_with_retry()
277
294
  resolved_source = source.replace('~', remote_home_dir)
278
295
  rsync_command.extend([
279
296
  f'{node_destination}:{resolved_source!r}',
@@ -656,6 +673,8 @@ class SSHCommandRunner(CommandRunner):
656
673
  class KubernetesCommandRunner(CommandRunner):
657
674
  """Runner for Kubernetes commands."""
658
675
 
676
+ _MAX_RETRIES_FOR_RSYNC = 3
677
+
659
678
  def __init__(
660
679
  self,
661
680
  node: Tuple[Tuple[str, Optional[str]], str],
@@ -798,7 +817,7 @@ class KubernetesCommandRunner(CommandRunner):
798
817
  # Advanced options.
799
818
  log_path: str = os.devnull,
800
819
  stream_logs: bool = True,
801
- max_retry: int = 1,
820
+ max_retry: int = _MAX_RETRIES_FOR_RSYNC,
802
821
  ) -> None:
803
822
  """Uses 'rsync' to sync 'source' to 'target'.
804
823
 
@@ -50,17 +50,27 @@ def get_parallel_threads() -> int:
50
50
  return max(4, cpu_count - 1)
51
51
 
52
52
 
53
- def run_in_parallel(func: Callable, args: Iterable[Any]) -> List[Any]:
53
+ def run_in_parallel(func: Callable,
54
+ args: Iterable[Any],
55
+ num_threads: Optional[int] = None) -> List[Any]:
54
56
  """Run a function in parallel on a list of arguments.
55
57
 
56
58
  The function 'func' should raise a CommandError if the command fails.
57
59
 
60
+ Args:
61
+ func: The function to run in parallel
62
+ args: Iterable of arguments to pass to func
63
+ num_threads: Number of threads to use. If None, uses
64
+ get_parallel_threads()
65
+
58
66
  Returns:
59
67
  A list of the return values of the function func, in the same order as the
60
68
  arguments.
61
69
  """
62
70
  # Reference: https://stackoverflow.com/questions/25790279/python-multiprocessing-early-termination # pylint: disable=line-too-long
63
- with pool.ThreadPool(processes=get_parallel_threads()) as p:
71
+ processes = num_threads if num_threads is not None else get_parallel_threads(
72
+ )
73
+ with pool.ThreadPool(processes=processes) as p:
64
74
  # Run the function in parallel on the arguments, keeping the order.
65
75
  return list(p.imap(func, args))
66
76
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: skypilot-nightly
3
- Version: 1.0.0.dev20241104
3
+ Version: 1.0.0.dev20241106
4
4
  Summary: SkyPilot: An intercloud broker for the clouds
5
5
  Author: SkyPilot Team
6
6
  License: Apache 2.0
@@ -1,4 +1,4 @@
1
- sky/__init__.py,sha256=SY7a4jJlsaQsS8FjWQRJvBp2nd_B86PDQ9z3qFAHBsg,5882
1
+ sky/__init__.py,sha256=gJi4nCnW9_tfOdSmOh1s0EemDMl3aeTk1lG8K9lrsHA,5882
2
2
  sky/admin_policy.py,sha256=hPo02f_A32gCqhUueF0QYy1fMSSKqRwYEg_9FxScN_s,3248
3
3
  sky/authentication.py,sha256=pAdCT60OxxiXI9KXDyP2lQ9u9vMc6aMtq5Xi2h_hbdw,20984
4
4
  sky/check.py,sha256=D3Y3saIFAYVvPxuBHnVgJEO0fUVDxgjwuMBaO-D778k,9472
@@ -31,7 +31,7 @@ sky/adaptors/vsphere.py,sha256=zJP9SeObEoLrpgHW2VHvZE48EhgVf8GfAEIwBeaDMfM,2129
31
31
  sky/backends/__init__.py,sha256=UDjwbUgpTRApbPJnNfR786GadUuwgRk3vsWoVu5RB_c,536
32
32
  sky/backends/backend.py,sha256=wwfbrxPhjMPs6PSyy3tAHI8WJhl-xhgzWBsAZjmJJ6g,6249
33
33
  sky/backends/backend_utils.py,sha256=2myfryj1zG9xxPaX6XYYJruxAOGNGbpsy2ckT4A77sE,121813
34
- sky/backends/cloud_vm_ray_backend.py,sha256=jdG17FDAOUoHjXib2P73Hhdl9yXoDJxPTY5Dyqvp6j4,232757
34
+ sky/backends/cloud_vm_ray_backend.py,sha256=jlX1atSF4L31ZMzC_tnBaWnxvc2Wb8DRwt5G_ukrlJk,232799
35
35
  sky/backends/docker_utils.py,sha256=Hyw1YY20EyghhEbYx6O2FIMDcGkNzBzV9TM7LFynei8,8358
36
36
  sky/backends/local_docker_backend.py,sha256=0JL5m0YUgOmOL4aWEUe4tmt89dsxjk4_WXkPwgEKEis,16801
37
37
  sky/backends/wheel_utils.py,sha256=3QS4T_Ydvo4DbYhogtyADyNBEf04I6jUCL71M285shQ,7963
@@ -40,8 +40,8 @@ sky/benchmark/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
40
  sky/benchmark/benchmark_state.py,sha256=X8CXmuU9KgsDRhKedhFgjeRMUFWtQsjFs1qECvPG2yg,8723
41
41
  sky/benchmark/benchmark_utils.py,sha256=eb-i6zYoo-Zkod-T9qtCu1FcYLw--Yyos1SyibUPZNE,26194
42
42
  sky/clouds/__init__.py,sha256=WuNIJEnZmBO72tU5awgaaL3rdvFRSkgaYNNeuY68dXo,1356
43
- sky/clouds/aws.py,sha256=dVZ8auaa2z2Ifl9iiRT06IeEFaNtZhANKtHVLT6Gcno,49474
44
- sky/clouds/azure.py,sha256=ixw5jCnnMxDLj0hpArljVzq88EKOrqRxk9xm5N9u-mc,30576
43
+ sky/clouds/aws.py,sha256=2STW4eaCEtxre96yVagUcewNHiYGmxHKITNEQvgBmww,49539
44
+ sky/clouds/azure.py,sha256=38eUcB1_lt5FvDWo-G_pKIIsT1c_bCU2AifEYo7KX9Y,30756
45
45
  sky/clouds/cloud.py,sha256=A5F4a71ciPyljWEs6vT-4RmdGT-AE9NkhS8gJ4Vgi_I,35165
46
46
  sky/clouds/cloud_registry.py,sha256=oLoYFjm_SDTgdHokY7b6A5Utq80HXRQNxV0fLjDdVsQ,2361
47
47
  sky/clouds/cudo.py,sha256=UiY273Sln7VOYDYx93yWiWH_RLlOKZ2cm7mA31ld4A8,13094
@@ -100,14 +100,14 @@ sky/jobs/core.py,sha256=w7PancHi8_-afLKZQ3HHMD1sEDoepm1vEMxyDlXdo64,17155
100
100
  sky/jobs/recovery_strategy.py,sha256=FpPK6e2PT61cZPDUJqIfo6g53uSRTBh7dOTbfR1DLVE,26672
101
101
  sky/jobs/state.py,sha256=exN6BdJlLBzFTccJCSHN4dNjVeYFgTgqgxOaHwLw2IQ,24307
102
102
  sky/jobs/utils.py,sha256=pF4Kyl4v1M_Bmm2jIRlXGTSdII5BJ3f4qwex_oCFgBk,37742
103
- sky/jobs/dashboard/dashboard.py,sha256=HFShuaxKir97QTeK2x37h6bsY6ncaFaNEg1USZqJPdc,3050
103
+ sky/jobs/dashboard/dashboard.py,sha256=FXVQAWjAuQQTfAGlTCD-Xb9LckC5I4NhGwiBZy8Avo8,3186
104
104
  sky/jobs/dashboard/static/favicon.ico,sha256=uYlvgxSM7gjBmXpZ8wydvZUPAbJiiix-rc2Xe5mma9s,15086
105
- sky/jobs/dashboard/templates/index.html,sha256=DBKMYEkkJ6sgLYod9ro7drgL8Y_neDsCx_WbwhWDsWM,9837
105
+ sky/jobs/dashboard/templates/index.html,sha256=su1tqgcsXNl1lGl9hfIR6ig1f531OO57x1Tc2mNDK7U,11139
106
106
  sky/provision/__init__.py,sha256=UhYsGRribEyK1--PPT0Dom9051jlpdn8UCNhO8qpPOc,6262
107
107
  sky/provision/common.py,sha256=E8AlSUFcn0FYQq1erNmoVfMAdsF9tP2yxfyk-9PLvQU,10286
108
108
  sky/provision/constants.py,sha256=oc_XDUkcoLQ_lwDy5yMeMSWviKS0j0s1c0pjlvpNeWY,800
109
109
  sky/provision/docker_utils.py,sha256=cKYasCwbMf6C2_0vTxg2GvbrnhFvko-xDl1frfm7wxc,19199
110
- sky/provision/instance_setup.py,sha256=n1Px_KOYZl7Rf1WLXrfTTHyqxyA8_5QTN9BNLjQRkgc,22427
110
+ sky/provision/instance_setup.py,sha256=c6i_NC6GrW4hXAQIU5_dUBbnThjZQNS3cL2M6yMtzes,23616
111
111
  sky/provision/logging.py,sha256=yZWgejrFBhhRjAtvFu5N5bRXIMK5TuwNjp1vKQqz2pw,2103
112
112
  sky/provision/metadata_utils.py,sha256=LrxeV4wD2QPzNdXV_npj8q-pr35FatxBBjF_jSbpOT0,4013
113
113
  sky/provision/provisioner.py,sha256=mTvtBjS-Xz64LJcyeHx_-wdM8Gin8D49YRaV_TADaz4,25334
@@ -137,7 +137,7 @@ sky/provision/gcp/instance_utils.py,sha256=veRBr6Oziv0KaUdC4acuWeaOremNV0gMYCCHa
137
137
  sky/provision/gcp/mig_utils.py,sha256=oFpcFZoapHMILSE4iIm8V5bxP1RhbMHRF7cciqq8qAk,7883
138
138
  sky/provision/kubernetes/__init__.py,sha256=y6yVfii81WYG3ROxv4hiIj-ydinS5-xGxLvXnARVQoI,719
139
139
  sky/provision/kubernetes/config.py,sha256=WEKcFXXhe89bLGAvoMiBvTDxdxkpTIA6ezrj2vmzldc,29072
140
- sky/provision/kubernetes/instance.py,sha256=1dN2vdh-ZdeIe39ZxH5DAnnc8kXHWpzD6q-f14-8cDE,41576
140
+ sky/provision/kubernetes/instance.py,sha256=rY43hZOInP20kYofW0MGs7wDbJ4NxMw1FtKAJAPGIOU,43960
141
141
  sky/provision/kubernetes/network.py,sha256=EpNjRQ131CXepqbdkoRKFu4szVrm0oKEpv1l8EgOkjU,12364
142
142
  sky/provision/kubernetes/network_utils.py,sha256=t1FS3K400fetH7cBuRgQJZl5_jEeMshsvsYmnMUcq8k,11399
143
143
  sky/provision/kubernetes/utils.py,sha256=2N5c4yA7CEn4DjvCiUO73W4XDEjgixcJRVdgs913QQE,89523
@@ -218,7 +218,7 @@ sky/skylet/ray_patches/log_monitor.py.patch,sha256=CPoh3U_ogOHrkMOK7jaIRnwdzxjBT
218
218
  sky/skylet/ray_patches/resource_demand_scheduler.py.patch,sha256=AVV-Hw-Rxw16aFm4VsyzayX1QOvwmQuM79iVdSjkSl4,658
219
219
  sky/skylet/ray_patches/updater.py.patch,sha256=ZNMGVYICPBB44jLbEx2KvCgIY7BWYdDv3-2b2HJWmAQ,289
220
220
  sky/skylet/ray_patches/worker.py.patch,sha256=_OBhibdr3xOy5Qje6Tt8D1eQVm_msi50TJbCJmOTxVU,565
221
- sky/templates/aws-ray.yml.j2,sha256=K0rAuyf1XC_GPFp1BR9df42-Be12A6T2UF0BllVSpYg,8005
221
+ sky/templates/aws-ray.yml.j2,sha256=fJUwkgXwkuackZI3UD7Fum4iJpkZttl6Jwy3MtYqL1I,8547
222
222
  sky/templates/azure-ray.yml.j2,sha256=uUneIfT5vTLUCvrZXiv2dsd3gFqLH2FK632oBruOO_k,6237
223
223
  sky/templates/cudo-ray.yml.j2,sha256=SEHVY57iBauCOE2HYJtYVFEKlriAkdwQu_p86a1n_bA,3548
224
224
  sky/templates/fluidstack-ray.yml.j2,sha256=t8TCULgiErCZdtFmBZVsA8ZdcqR7ccwsmQhuDFTBEAU,3541
@@ -245,7 +245,7 @@ sky/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
245
245
  sky/utils/accelerator_registry.py,sha256=BO4iYH5bV80Xyp4EPfO0n1D3LL0FvESCy7xm59Je3_o,3798
246
246
  sky/utils/admin_policy_utils.py,sha256=zFCu1OFIrZRfQNY0JFRO1502WFfdqZhwAU_QgM4fO9U,5943
247
247
  sky/utils/cluster_yaml_utils.py,sha256=1wRRYqI1kI-eFs1pMW4r_FFjHJ0zamq6v2RRI-Gtx5E,849
248
- sky/utils/command_runner.py,sha256=3CDcqRXEmoe3C-t2P58McgcRg6p9m5haUWYj1rOLuqM,34858
248
+ sky/utils/command_runner.py,sha256=seU7uX9CrxiC8WOWBKHW94m67-V6DYghqRXhYdUIdQI,35756
249
249
  sky/utils/command_runner.pyi,sha256=mJOzCgcYZAfHwnY_6Wf1YwlTEJGb9ihzc2f0rE0Kw98,7751
250
250
  sky/utils/common_utils.py,sha256=Qy25LuIoTT0qg391EWyT9i5D6fwk1S4OdFwRpCTZ9Vk,24657
251
251
  sky/utils/controller_utils.py,sha256=wF4_y1PCsLAWoo3XEtECwkNYTN6hO3vn_cxGxgQYcd8,43268
@@ -257,7 +257,7 @@ sky/utils/log_utils.py,sha256=ptv2sbsiJSgk4NvdccrMsUR-MvOKnbu4BQiRSishgk0,12472
257
257
  sky/utils/resources_utils.py,sha256=Xqi7gxPYw2y5wl5okUI5zx5LEij0hJF_V3Zi8q7TXYg,7890
258
258
  sky/utils/rich_utils.py,sha256=hmnI1X5dKvRIQzB7EyNb34FT97qFNve-0QHqM5r0mVk,3066
259
259
  sky/utils/schemas.py,sha256=mogoStpQ77S936VfChinAW2I1DT4q2c5E7qY_qNiO0w,29094
260
- sky/utils/subprocess_utils.py,sha256=3R54Elc2n8DQeO6Y8MCDJ6N6v27HDGpbNMIfCquqXYQ,6552
260
+ sky/utils/subprocess_utils.py,sha256=mMFCTfxbyav5LJ1epJJXkgfFYmd828naTOMVfYjuEWY,6905
261
261
  sky/utils/timeline.py,sha256=ao_nm0y52ZQILfL7Y92c3pSEFRyPm_ElORC3DrI5BwQ,3936
262
262
  sky/utils/ux_utils.py,sha256=CqyIFGDuSE8fQasPkna_loZMwtboC9KedR09WEQ7qz0,6502
263
263
  sky/utils/validator.py,sha256=cAFERCoC7jH0DFKepcU4x9SYmdrYL1iVmW9tXA18hvo,701
@@ -274,9 +274,9 @@ sky/utils/kubernetes/k8s_gpu_labeler_job.yaml,sha256=k0TBoQ4zgf79-sVkixKSGYFHQ7Z
274
274
  sky/utils/kubernetes/k8s_gpu_labeler_setup.yaml,sha256=VLKT2KKimZu1GDg_4AIlIt488oMQvhRZWwsj9vBbPUg,3812
275
275
  sky/utils/kubernetes/rsync_helper.sh,sha256=hyYDaYSNxYaNvzUQBzC8AidB7nDeojizjkzc_CTxycY,1077
276
276
  sky/utils/kubernetes/ssh_jump_lifecycle_manager.py,sha256=RFLJ3k7MR5UN4SKHykQ0lV9SgXumoULpKYIAt1vh-HU,6560
277
- skypilot_nightly-1.0.0.dev20241104.dist-info/LICENSE,sha256=emRJAvE7ngL6x0RhQvlns5wJzGI3NEQ_WMjNmd9TZc4,12170
278
- skypilot_nightly-1.0.0.dev20241104.dist-info/METADATA,sha256=7kv9Ztl-Bs4moW_9zTW_V_3-xlRlklT3-vd6U0wI55o,19708
279
- skypilot_nightly-1.0.0.dev20241104.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
280
- skypilot_nightly-1.0.0.dev20241104.dist-info/entry_points.txt,sha256=StA6HYpuHj-Y61L2Ze-hK2IcLWgLZcML5gJu8cs6nU4,36
281
- skypilot_nightly-1.0.0.dev20241104.dist-info/top_level.txt,sha256=qA8QuiNNb6Y1OF-pCUtPEr6sLEwy2xJX06Bd_CrtrHY,4
282
- skypilot_nightly-1.0.0.dev20241104.dist-info/RECORD,,
277
+ skypilot_nightly-1.0.0.dev20241106.dist-info/LICENSE,sha256=emRJAvE7ngL6x0RhQvlns5wJzGI3NEQ_WMjNmd9TZc4,12170
278
+ skypilot_nightly-1.0.0.dev20241106.dist-info/METADATA,sha256=xDHkghCeZl-VGNYV5hps-0I-il3EKeUO9Rg7JcokqPI,19708
279
+ skypilot_nightly-1.0.0.dev20241106.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
280
+ skypilot_nightly-1.0.0.dev20241106.dist-info/entry_points.txt,sha256=StA6HYpuHj-Y61L2Ze-hK2IcLWgLZcML5gJu8cs6nU4,36
281
+ skypilot_nightly-1.0.0.dev20241106.dist-info/top_level.txt,sha256=qA8QuiNNb6Y1OF-pCUtPEr6sLEwy2xJX06Bd_CrtrHY,4
282
+ skypilot_nightly-1.0.0.dev20241106.dist-info/RECORD,,