skypilot-nightly 1.0.0.dev20250604__py3-none-any.whl → 1.0.0.dev20250605__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. sky/__init__.py +2 -2
  2. sky/admin_policy.py +5 -0
  3. sky/catalog/__init__.py +2 -2
  4. sky/catalog/common.py +7 -9
  5. sky/cli.py +11 -9
  6. sky/client/cli.py +11 -9
  7. sky/client/sdk.py +30 -12
  8. sky/dashboard/out/404.html +1 -1
  9. sky/dashboard/out/_next/static/chunks/614-635a84e87800f99e.js +66 -0
  10. sky/dashboard/out/_next/static/chunks/{856-f1b1f7f47edde2e8.js → 856-3a32da4b84176f6d.js} +1 -1
  11. sky/dashboard/out/_next/static/chunks/937.3759f538f11a0953.js +1 -0
  12. sky/dashboard/out/_next/static/chunks/pages/config-1a1eeb949dab8897.js +6 -0
  13. sky/dashboard/out/_next/static/chunks/pages/users-262aab38b9baaf3a.js +16 -0
  14. sky/dashboard/out/_next/static/chunks/pages/workspaces-384ea5fa0cea8f28.js +1 -0
  15. sky/dashboard/out/_next/static/chunks/{webpack-f27c9a32aa3d9c6d.js → webpack-65d465f948974c0d.js} +1 -1
  16. sky/dashboard/out/_next/static/css/667d941a2888ce6e.css +3 -0
  17. sky/dashboard/out/_next/static/qjhIe-yC6nHcLKBqpzO1M/_buildManifest.js +1 -0
  18. sky/dashboard/out/clusters/[cluster]/[job].html +1 -1
  19. sky/dashboard/out/clusters/[cluster].html +1 -1
  20. sky/dashboard/out/clusters.html +1 -1
  21. sky/dashboard/out/config.html +1 -1
  22. sky/dashboard/out/index.html +1 -1
  23. sky/dashboard/out/infra/[context].html +1 -1
  24. sky/dashboard/out/infra.html +1 -1
  25. sky/dashboard/out/jobs/[job].html +1 -1
  26. sky/dashboard/out/jobs.html +1 -1
  27. sky/dashboard/out/users.html +1 -1
  28. sky/dashboard/out/workspace/new.html +1 -1
  29. sky/dashboard/out/workspaces/[name].html +1 -1
  30. sky/dashboard/out/workspaces.html +1 -1
  31. sky/execution.py +44 -46
  32. sky/global_user_state.py +118 -83
  33. sky/jobs/client/sdk.py +4 -1
  34. sky/jobs/server/core.py +5 -1
  35. sky/models.py +1 -0
  36. sky/resources.py +22 -1
  37. sky/server/constants.py +3 -1
  38. sky/server/requests/payloads.py +9 -0
  39. sky/server/server.py +30 -9
  40. sky/setup_files/MANIFEST.in +1 -0
  41. sky/setup_files/dependencies.py +2 -0
  42. sky/skylet/constants.py +10 -4
  43. sky/skypilot_config.py +4 -2
  44. sky/templates/websocket_proxy.py +11 -1
  45. sky/users/__init__.py +0 -0
  46. sky/users/model.conf +15 -0
  47. sky/users/permission.py +178 -0
  48. sky/users/rbac.py +86 -0
  49. sky/users/server.py +66 -0
  50. sky/utils/schemas.py +20 -7
  51. sky/workspaces/core.py +2 -2
  52. {skypilot_nightly-1.0.0.dev20250604.dist-info → skypilot_nightly-1.0.0.dev20250605.dist-info}/METADATA +3 -1
  53. {skypilot_nightly-1.0.0.dev20250604.dist-info → skypilot_nightly-1.0.0.dev20250605.dist-info}/RECORD +68 -64
  54. sky/catalog/constants.py +0 -8
  55. sky/dashboard/out/_next/static/chunks/614-3d29f98e0634b179.js +0 -66
  56. sky/dashboard/out/_next/static/chunks/937.f97f83652028e944.js +0 -1
  57. sky/dashboard/out/_next/static/chunks/pages/config-35383adcb0edb5e2.js +0 -6
  58. sky/dashboard/out/_next/static/chunks/pages/users-07b523ccb19317ad.js +0 -6
  59. sky/dashboard/out/_next/static/chunks/pages/workspaces-f54921ec9eb20965.js +0 -1
  60. sky/dashboard/out/_next/static/css/63d3995d8b528eb1.css +0 -3
  61. sky/dashboard/out/_next/static/vWwfD3jOky5J5jULHp8JT/_buildManifest.js +0 -1
  62. /sky/dashboard/out/_next/static/chunks/{121-8f55ee3fa6301784.js → 121-865d2bf8a3b84c6a.js} +0 -0
  63. /sky/dashboard/out/_next/static/chunks/{236-fef38aa6e5639300.js → 236-4c0dc6f63ccc6319.js} +0 -0
  64. /sky/dashboard/out/_next/static/chunks/{37-947904ccc5687bac.js → 37-beedd583fea84cc8.js} +0 -0
  65. /sky/dashboard/out/_next/static/chunks/{682-2be9b0f169727f2f.js → 682-6647f0417d5662f0.js} +0 -0
  66. /sky/dashboard/out/_next/static/chunks/{843-a097338acb89b7d7.js → 843-c296541442d4af88.js} +0 -0
  67. /sky/dashboard/out/_next/static/chunks/{969-d7b6fb7f602bfcb3.js → 969-c7abda31c10440ac.js} +0 -0
  68. /sky/dashboard/out/_next/static/chunks/pages/{_app-67925f5e6382e22f.js → _app-cb81dc4d27f4d009.js} +0 -0
  69. /sky/dashboard/out/_next/static/chunks/pages/clusters/[cluster]/{[job]-158b70da336d8607.js → [job]-65d04d5d77cbb6b6.js} +0 -0
  70. /sky/dashboard/out/_next/static/chunks/pages/clusters/{[cluster]-62c9982dc3675725.js → [cluster]-beabbcd7606c1a23.js} +0 -0
  71. /sky/dashboard/out/_next/static/chunks/pages/jobs/{[job]-a62a3c65dc9bc57c.js → [job]-86c47edc500f15f9.js} +0 -0
  72. /sky/dashboard/out/_next/static/{vWwfD3jOky5J5jULHp8JT → qjhIe-yC6nHcLKBqpzO1M}/_ssgManifest.js +0 -0
  73. {skypilot_nightly-1.0.0.dev20250604.dist-info → skypilot_nightly-1.0.0.dev20250605.dist-info}/WHEEL +0 -0
  74. {skypilot_nightly-1.0.0.dev20250604.dist-info → skypilot_nightly-1.0.0.dev20250605.dist-info}/entry_points.txt +0 -0
  75. {skypilot_nightly-1.0.0.dev20250604.dist-info → skypilot_nightly-1.0.0.dev20250605.dist-info}/licenses/LICENSE +0 -0
  76. {skypilot_nightly-1.0.0.dev20250604.dist-info → skypilot_nightly-1.0.0.dev20250605.dist-info}/top_level.txt +0 -0
sky/global_user_state.py CHANGED
@@ -44,18 +44,6 @@ logger = sky_logging.init_logger(__name__)
44
44
 
45
45
  _ENABLED_CLOUDS_KEY_PREFIX = 'enabled_clouds_'
46
46
 
47
- _DB_PATH = os.path.expanduser('~/.sky/state.db')
48
- pathlib.Path(_DB_PATH).parents[0].mkdir(parents=True, exist_ok=True)
49
-
50
- if os.environ.get(constants.SKYPILOT_API_SERVER_DB_URL_ENV_VAR):
51
- # If SKYPILOT_API_SERVER_DB_URL_ENV_VAR is set, use it as the database URI.
52
- logger.debug(
53
- f'using db URI from {constants.SKYPILOT_API_SERVER_DB_URL_ENV_VAR}')
54
- _SQLALCHEMY_ENGINE = sqlalchemy.create_engine(
55
- os.environ.get(constants.SKYPILOT_API_SERVER_DB_URL_ENV_VAR))
56
- else:
57
- _SQLALCHEMY_ENGINE = sqlalchemy.create_engine('sqlite:///' + _DB_PATH)
58
-
59
47
  Base = declarative.declarative_base()
60
48
 
61
49
  config_table = sqlalchemy.Table(
@@ -183,11 +171,11 @@ def create_table():
183
171
  # https://github.com/microsoft/WSL/issues/2395
184
172
  # TODO(romilb): We do not enable WAL for WSL because of known issue in WSL.
185
173
  # This may cause the database locked problem from WSL issue #1441.
186
- if (_SQLALCHEMY_ENGINE.dialect.name
174
+ if (SQLALCHEMY_ENGINE.dialect.name
187
175
  == db_utils.SQLAlchemyDialect.SQLITE.value and
188
176
  not common_utils.is_wsl()):
189
177
  try:
190
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
178
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
191
179
  session.execute(sqlalchemy.text('PRAGMA journal_mode=WAL'))
192
180
  session.commit()
193
181
  except sqlalchemy_exc.OperationalError as e:
@@ -197,12 +185,12 @@ def create_table():
197
185
  # is not critical and is likely to be enabled by other processes.
198
186
 
199
187
  # Create tables if they don't exist
200
- Base.metadata.create_all(bind=_SQLALCHEMY_ENGINE)
188
+ Base.metadata.create_all(bind=SQLALCHEMY_ENGINE)
201
189
 
202
190
  # For backward compatibility.
203
191
  # TODO(zhwu): Remove this function after all users have migrated to
204
192
  # the latest version of SkyPilot.
205
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
193
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
206
194
  # Add autostop column to clusters table
207
195
  db_utils.add_column_to_table_sqlalchemy(session,
208
196
  'clusters',
@@ -309,34 +297,81 @@ def create_table():
309
297
  session.commit()
310
298
 
311
299
 
300
+ conn_string = None
301
+ if os.environ.get(constants.ENV_VAR_IS_SKYPILOT_SERVER) is not None:
302
+ conn_string = skypilot_config.get_nested(('db',), None)
303
+ if conn_string:
304
+ logger.debug(f'using db URI from {conn_string}')
305
+ SQLALCHEMY_ENGINE = sqlalchemy.create_engine(conn_string)
306
+ else:
307
+ _DB_PATH = os.path.expanduser('~/.sky/state.db')
308
+ pathlib.Path(_DB_PATH).parents[0].mkdir(parents=True, exist_ok=True)
309
+ SQLALCHEMY_ENGINE = sqlalchemy.create_engine('sqlite:///' + _DB_PATH)
312
310
  create_table()
313
311
 
314
312
 
315
- def add_or_update_user(user: models.User):
316
- """Store the mapping from user hash to user name for display purposes."""
313
+ def add_or_update_user(user: models.User) -> bool:
314
+ """Store the mapping from user hash to user name for display purposes.
315
+
316
+ Returns:
317
+ Boolean: whether the user is newly added
318
+ """
317
319
  if user.name is None:
318
- return
320
+ return False
319
321
 
320
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
321
- if (_SQLALCHEMY_ENGINE.dialect.name ==
322
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
323
+ if (SQLALCHEMY_ENGINE.dialect.name ==
322
324
  db_utils.SQLAlchemyDialect.SQLITE.value):
325
+ # For SQLite, use INSERT OR IGNORE followed by UPDATE to detect new
326
+ # vs existing
323
327
  insert_func = sqlite.insert
324
- elif (_SQLALCHEMY_ENGINE.dialect.name ==
328
+
329
+ # First try INSERT OR IGNORE - this won't fail if user exists
330
+ insert_stmnt = insert_func(user_table).prefix_with(
331
+ 'OR IGNORE').values(id=user.id, name=user.name)
332
+ result = session.execute(insert_stmnt)
333
+
334
+ # Check if the INSERT actually inserted a row
335
+ was_inserted = result.rowcount > 0
336
+
337
+ if not was_inserted:
338
+ # User existed, so update it
339
+ session.query(user_table).filter_by(id=user.id).update(
340
+ {user_table.c.name: user.name})
341
+
342
+ session.commit()
343
+ return was_inserted
344
+
345
+ elif (SQLALCHEMY_ENGINE.dialect.name ==
325
346
  db_utils.SQLAlchemyDialect.POSTGRESQL.value):
347
+ # For PostgreSQL, use INSERT ... ON CONFLICT with RETURNING to
348
+ # detect insert vs update
326
349
  insert_func = postgresql.insert
350
+ insert_stmnt = insert_func(user_table).values(id=user.id,
351
+ name=user.name)
352
+
353
+ # Use a sentinel in the RETURNING clause to detect insert vs update
354
+ upsert_stmnt = insert_stmnt.on_conflict_do_update(
355
+ index_elements=[user_table.c.id],
356
+ set_={
357
+ user_table.c.name: user.name
358
+ }).returning(
359
+ user_table.c.id,
360
+ # This will be True for INSERT, False for UPDATE
361
+ sqlalchemy.literal_column('(xmax = 0)').label('was_inserted'
362
+ ))
363
+
364
+ result = session.execute(upsert_stmnt)
365
+ session.commit()
366
+
367
+ row = result.fetchone()
368
+ return bool(row.was_inserted) if row else False
327
369
  else:
328
370
  raise ValueError('Unsupported database dialect')
329
- insert_stmnt = insert_func(user_table).values(id=user.id,
330
- name=user.name)
331
- do_update_stmt = insert_stmnt.on_conflict_do_update(
332
- index_elements=[user_table.c.id],
333
- set_={user_table.c.name: user.name})
334
- session.execute(do_update_stmt)
335
- session.commit()
336
371
 
337
372
 
338
373
  def get_user(user_id: str) -> models.User:
339
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
374
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
340
375
  row = session.query(user_table).filter_by(id=user_id).first()
341
376
  if row is None:
342
377
  return models.User(id=user_id)
@@ -344,7 +379,7 @@ def get_user(user_id: str) -> models.User:
344
379
 
345
380
 
346
381
  def get_all_users() -> List[models.User]:
347
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
382
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
348
383
  rows = session.query(user_table).all()
349
384
  return [models.User(id=row.id, name=row.name) for row in rows]
350
385
 
@@ -421,7 +456,7 @@ def add_or_update_cluster(cluster_name: str,
421
456
  'config_hash': config_hash,
422
457
  })
423
458
 
424
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
459
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
425
460
  # with_for_update() locks the row until commit() or rollback()
426
461
  # is called, or until the code escapes the with block.
427
462
  cluster_row = session.query(cluster_table).filter_by(
@@ -448,10 +483,10 @@ def add_or_update_cluster(cluster_name: str,
448
483
  'last_creation_command': last_use,
449
484
  })
450
485
 
451
- if (_SQLALCHEMY_ENGINE.dialect.name ==
486
+ if (SQLALCHEMY_ENGINE.dialect.name ==
452
487
  db_utils.SQLAlchemyDialect.SQLITE.value):
453
488
  insert_func = sqlite.insert
454
- elif (_SQLALCHEMY_ENGINE.dialect.name ==
489
+ elif (SQLALCHEMY_ENGINE.dialect.name ==
455
490
  db_utils.SQLAlchemyDialect.POSTGRESQL.value):
456
491
  insert_func = postgresql.insert
457
492
  else:
@@ -529,7 +564,7 @@ def _get_user_hash_or_current_user(user_hash: Optional[str]) -> str:
529
564
  def update_cluster_handle(cluster_name: str,
530
565
  cluster_handle: 'backends.ResourceHandle'):
531
566
  handle = pickle.dumps(cluster_handle)
532
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
567
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
533
568
  session.query(cluster_table).filter_by(name=cluster_name).update(
534
569
  {cluster_table.c.handle: handle})
535
570
  session.commit()
@@ -537,7 +572,7 @@ def update_cluster_handle(cluster_name: str,
537
572
 
538
573
  def update_last_use(cluster_name: str):
539
574
  """Updates the last used command for the cluster."""
540
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
575
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
541
576
  session.query(cluster_table).filter_by(name=cluster_name).update(
542
577
  {cluster_table.c.last_use: common_utils.get_current_command()})
543
578
  session.commit()
@@ -548,7 +583,7 @@ def remove_cluster(cluster_name: str, terminate: bool) -> None:
548
583
  cluster_hash = _get_hash_for_existing_cluster(cluster_name)
549
584
  usage_intervals = _get_cluster_usage_intervals(cluster_hash)
550
585
 
551
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
586
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
552
587
  # usage_intervals is not None and not empty
553
588
  if usage_intervals:
554
589
  assert cluster_hash is not None, cluster_name
@@ -581,7 +616,7 @@ def remove_cluster(cluster_name: str, terminate: bool) -> None:
581
616
  def get_handle_from_cluster_name(
582
617
  cluster_name: str) -> Optional['backends.ResourceHandle']:
583
618
  assert cluster_name is not None, 'cluster_name cannot be None'
584
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
619
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
585
620
  row = session.query(cluster_table).filter_by(name=cluster_name).first()
586
621
  if row is None:
587
622
  return None
@@ -590,12 +625,12 @@ def get_handle_from_cluster_name(
590
625
 
591
626
  def get_glob_cluster_names(cluster_name: str) -> List[str]:
592
627
  assert cluster_name is not None, 'cluster_name cannot be None'
593
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
594
- if (_SQLALCHEMY_ENGINE.dialect.name ==
628
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
629
+ if (SQLALCHEMY_ENGINE.dialect.name ==
595
630
  db_utils.SQLAlchemyDialect.SQLITE.value):
596
631
  rows = session.query(cluster_table).filter(
597
632
  cluster_table.c.name.op('GLOB')(cluster_name)).all()
598
- elif (_SQLALCHEMY_ENGINE.dialect.name ==
633
+ elif (SQLALCHEMY_ENGINE.dialect.name ==
599
634
  db_utils.SQLAlchemyDialect.POSTGRESQL.value):
600
635
  rows = session.query(cluster_table).filter(
601
636
  cluster_table.c.name.op('SIMILAR TO')(
@@ -608,7 +643,7 @@ def get_glob_cluster_names(cluster_name: str) -> List[str]:
608
643
  def set_cluster_status(cluster_name: str,
609
644
  status: status_lib.ClusterStatus) -> None:
610
645
  current_time = int(time.time())
611
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
646
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
612
647
  count = session.query(cluster_table).filter_by(
613
648
  name=cluster_name).update({
614
649
  cluster_table.c.status: status.value,
@@ -622,7 +657,7 @@ def set_cluster_status(cluster_name: str,
622
657
 
623
658
  def set_cluster_autostop_value(cluster_name: str, idle_minutes: int,
624
659
  to_down: bool) -> None:
625
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
660
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
626
661
  count = session.query(cluster_table).filter_by(
627
662
  name=cluster_name).update({
628
663
  cluster_table.c.autostop: idle_minutes,
@@ -635,7 +670,7 @@ def set_cluster_autostop_value(cluster_name: str, idle_minutes: int,
635
670
 
636
671
 
637
672
  def get_cluster_launch_time(cluster_name: str) -> Optional[int]:
638
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
673
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
639
674
  row = session.query(cluster_table).filter_by(name=cluster_name).first()
640
675
  if row is None or row.launched_at is None:
641
676
  return None
@@ -643,7 +678,7 @@ def get_cluster_launch_time(cluster_name: str) -> Optional[int]:
643
678
 
644
679
 
645
680
  def get_cluster_info(cluster_name: str) -> Optional[Dict[str, Any]]:
646
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
681
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
647
682
  row = session.query(cluster_table).filter_by(name=cluster_name).first()
648
683
  if row is None or row.metadata is None:
649
684
  return None
@@ -651,7 +686,7 @@ def get_cluster_info(cluster_name: str) -> Optional[Dict[str, Any]]:
651
686
 
652
687
 
653
688
  def set_cluster_info(cluster_name: str, metadata: Dict[str, Any]) -> None:
654
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
689
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
655
690
  count = session.query(cluster_table).filter_by(
656
691
  name=cluster_name).update(
657
692
  {cluster_table.c.metadata: json.dumps(metadata)})
@@ -663,7 +698,7 @@ def set_cluster_info(cluster_name: str, metadata: Dict[str, Any]) -> None:
663
698
 
664
699
  def get_cluster_storage_mounts_metadata(
665
700
  cluster_name: str) -> Optional[Dict[str, Any]]:
666
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
701
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
667
702
  row = session.query(cluster_table).filter_by(name=cluster_name).first()
668
703
  if row is None or row.storage_mounts_metadata is None:
669
704
  return None
@@ -672,7 +707,7 @@ def get_cluster_storage_mounts_metadata(
672
707
 
673
708
  def set_cluster_storage_mounts_metadata(
674
709
  cluster_name: str, storage_mounts_metadata: Dict[str, Any]) -> None:
675
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
710
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
676
711
  count = session.query(cluster_table).filter_by(
677
712
  name=cluster_name).update({
678
713
  cluster_table.c.storage_mounts_metadata:
@@ -689,7 +724,7 @@ def _get_cluster_usage_intervals(
689
724
  ) -> Optional[List[Tuple[int, Optional[int]]]]:
690
725
  if cluster_hash is None:
691
726
  return None
692
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
727
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
693
728
  row = session.query(cluster_history_table).filter_by(
694
729
  cluster_hash=cluster_hash).first()
695
730
  if row is None or row.usage_intervals is None:
@@ -726,7 +761,7 @@ def _get_cluster_duration(cluster_hash: str) -> int:
726
761
  def _set_cluster_usage_intervals(
727
762
  cluster_hash: str, usage_intervals: List[Tuple[int,
728
763
  Optional[int]]]) -> None:
729
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
764
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
730
765
  count = session.query(cluster_history_table).filter_by(
731
766
  cluster_hash=cluster_hash).update({
732
767
  cluster_history_table.c.usage_intervals:
@@ -743,7 +778,7 @@ def set_owner_identity_for_cluster(cluster_name: str,
743
778
  if owner_identity is None:
744
779
  return
745
780
  owner_identity_str = json.dumps(owner_identity)
746
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
781
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
747
782
  count = session.query(cluster_table).filter_by(
748
783
  name=cluster_name).update(
749
784
  {cluster_table.c.owner: owner_identity_str})
@@ -754,7 +789,7 @@ def set_owner_identity_for_cluster(cluster_name: str,
754
789
 
755
790
 
756
791
  def _get_hash_for_existing_cluster(cluster_name: str) -> Optional[str]:
757
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
792
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
758
793
  row = session.query(cluster_table).filter_by(name=cluster_name).first()
759
794
  if row is None or row.cluster_hash is None:
760
795
  return None
@@ -763,7 +798,7 @@ def _get_hash_for_existing_cluster(cluster_name: str) -> Optional[str]:
763
798
 
764
799
  def get_launched_resources_from_cluster_hash(
765
800
  cluster_hash: str) -> Optional[Tuple[int, Any]]:
766
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
801
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
767
802
  row = session.query(cluster_history_table).filter_by(
768
803
  cluster_hash=cluster_hash).first()
769
804
  if row is None:
@@ -808,7 +843,7 @@ def _load_storage_mounts_metadata(
808
843
  @context_utils.cancellation_guard
809
844
  def get_cluster_from_name(
810
845
  cluster_name: Optional[str]) -> Optional[Dict[str, Any]]:
811
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
846
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
812
847
  row = session.query(cluster_table).filter_by(name=cluster_name).first()
813
848
  if row is None:
814
849
  return None
@@ -841,7 +876,7 @@ def get_cluster_from_name(
841
876
 
842
877
 
843
878
  def get_clusters() -> List[Dict[str, Any]]:
844
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
879
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
845
880
  rows = session.query(cluster_table).order_by(
846
881
  sqlalchemy.desc(cluster_table.c.launched_at)).all()
847
882
  records = []
@@ -876,7 +911,7 @@ def get_clusters() -> List[Dict[str, Any]]:
876
911
 
877
912
 
878
913
  def get_clusters_from_history() -> List[Dict[str, Any]]:
879
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
914
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
880
915
  rows = session.query(
881
916
  cluster_history_table.join(cluster_table,
882
917
  cluster_history_table.c.cluster_hash ==
@@ -912,7 +947,7 @@ def get_clusters_from_history() -> List[Dict[str, Any]]:
912
947
 
913
948
 
914
949
  def get_cluster_names_start_with(starts_with: str) -> List[str]:
915
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
950
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
916
951
  rows = session.query(cluster_table).filter(
917
952
  cluster_table.c.name.like(f'{starts_with}%')).all()
918
953
  return [row.name for row in rows]
@@ -920,7 +955,7 @@ def get_cluster_names_start_with(starts_with: str) -> List[str]:
920
955
 
921
956
  def get_cached_enabled_clouds(cloud_capability: 'cloud.CloudCapability',
922
957
  workspace: str) -> List['clouds.Cloud']:
923
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
958
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
924
959
  row = session.query(config_table).filter_by(
925
960
  key=_get_enabled_clouds_key(cloud_capability, workspace)).first()
926
961
  ret = []
@@ -944,11 +979,11 @@ def get_cached_enabled_clouds(cloud_capability: 'cloud.CloudCapability',
944
979
  def set_enabled_clouds(enabled_clouds: List[str],
945
980
  cloud_capability: 'cloud.CloudCapability',
946
981
  workspace: str) -> None:
947
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
948
- if (_SQLALCHEMY_ENGINE.dialect.name ==
982
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
983
+ if (SQLALCHEMY_ENGINE.dialect.name ==
949
984
  db_utils.SQLAlchemyDialect.SQLITE.value):
950
985
  insert_func = sqlite.insert
951
- elif (_SQLALCHEMY_ENGINE.dialect.name ==
986
+ elif (SQLALCHEMY_ENGINE.dialect.name ==
952
987
  db_utils.SQLAlchemyDialect.POSTGRESQL.value):
953
988
  insert_func = postgresql.insert
954
989
  else:
@@ -981,11 +1016,11 @@ def add_or_update_storage(storage_name: str,
981
1016
  if not status_check(storage_status):
982
1017
  raise ValueError(f'Error in updating global state. Storage Status '
983
1018
  f'{storage_status} is passed in incorrectly')
984
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
985
- if (_SQLALCHEMY_ENGINE.dialect.name ==
1019
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
1020
+ if (SQLALCHEMY_ENGINE.dialect.name ==
986
1021
  db_utils.SQLAlchemyDialect.SQLITE.value):
987
1022
  insert_func = sqlite.insert
988
- elif (_SQLALCHEMY_ENGINE.dialect.name ==
1023
+ elif (SQLALCHEMY_ENGINE.dialect.name ==
989
1024
  db_utils.SQLAlchemyDialect.POSTGRESQL.value):
990
1025
  insert_func = postgresql.insert
991
1026
  else:
@@ -1010,14 +1045,14 @@ def add_or_update_storage(storage_name: str,
1010
1045
 
1011
1046
  def remove_storage(storage_name: str):
1012
1047
  """Removes Storage from Database"""
1013
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
1048
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
1014
1049
  session.query(storage_table).filter_by(name=storage_name).delete()
1015
1050
  session.commit()
1016
1051
 
1017
1052
 
1018
1053
  def set_storage_status(storage_name: str,
1019
1054
  status: status_lib.StorageStatus) -> None:
1020
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
1055
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
1021
1056
  count = session.query(storage_table).filter_by(
1022
1057
  name=storage_name).update({storage_table.c.status: status.value})
1023
1058
  session.commit()
@@ -1028,7 +1063,7 @@ def set_storage_status(storage_name: str,
1028
1063
 
1029
1064
  def get_storage_status(storage_name: str) -> Optional[status_lib.StorageStatus]:
1030
1065
  assert storage_name is not None, 'storage_name cannot be None'
1031
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
1066
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
1032
1067
  row = session.query(storage_table).filter_by(name=storage_name).first()
1033
1068
  if row:
1034
1069
  return status_lib.StorageStatus[row.status]
@@ -1037,7 +1072,7 @@ def get_storage_status(storage_name: str) -> Optional[status_lib.StorageStatus]:
1037
1072
 
1038
1073
  def set_storage_handle(storage_name: str,
1039
1074
  handle: 'Storage.StorageMetadata') -> None:
1040
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
1075
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
1041
1076
  count = session.query(storage_table).filter_by(
1042
1077
  name=storage_name).update(
1043
1078
  {storage_table.c.handle: pickle.dumps(handle)})
@@ -1051,7 +1086,7 @@ def get_handle_from_storage_name(
1051
1086
  storage_name: Optional[str]) -> Optional['Storage.StorageMetadata']:
1052
1087
  if storage_name is None:
1053
1088
  return None
1054
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
1089
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
1055
1090
  row = session.query(storage_table).filter_by(name=storage_name).first()
1056
1091
  if row:
1057
1092
  return pickle.loads(row.handle)
@@ -1060,12 +1095,12 @@ def get_handle_from_storage_name(
1060
1095
 
1061
1096
  def get_glob_storage_name(storage_name: str) -> List[str]:
1062
1097
  assert storage_name is not None, 'storage_name cannot be None'
1063
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
1064
- if (_SQLALCHEMY_ENGINE.dialect.name ==
1098
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
1099
+ if (SQLALCHEMY_ENGINE.dialect.name ==
1065
1100
  db_utils.SQLAlchemyDialect.SQLITE.value):
1066
1101
  rows = session.query(storage_table).filter(
1067
1102
  storage_table.c.name.op('GLOB')(storage_name)).all()
1068
- elif (_SQLALCHEMY_ENGINE.dialect.name ==
1103
+ elif (SQLALCHEMY_ENGINE.dialect.name ==
1069
1104
  db_utils.SQLAlchemyDialect.POSTGRESQL.value):
1070
1105
  rows = session.query(storage_table).filter(
1071
1106
  storage_table.c.name.op('SIMILAR TO')(
@@ -1076,14 +1111,14 @@ def get_glob_storage_name(storage_name: str) -> List[str]:
1076
1111
 
1077
1112
 
1078
1113
  def get_storage_names_start_with(starts_with: str) -> List[str]:
1079
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
1114
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
1080
1115
  rows = session.query(storage_table).filter(
1081
1116
  storage_table.c.name.like(f'{starts_with}%')).all()
1082
1117
  return [row.name for row in rows]
1083
1118
 
1084
1119
 
1085
1120
  def get_storage() -> List[Dict[str, Any]]:
1086
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
1121
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
1087
1122
  rows = session.query(storage_table).all()
1088
1123
  records = []
1089
1124
  for row in rows:
@@ -1099,7 +1134,7 @@ def get_storage() -> List[Dict[str, Any]]:
1099
1134
 
1100
1135
 
1101
1136
  def get_ssh_keys(user_hash: str) -> Tuple[str, str, bool]:
1102
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
1137
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
1103
1138
  row = session.query(ssh_key_table).filter_by(
1104
1139
  user_hash=user_hash).first()
1105
1140
  if row:
@@ -1108,11 +1143,11 @@ def get_ssh_keys(user_hash: str) -> Tuple[str, str, bool]:
1108
1143
 
1109
1144
 
1110
1145
  def set_ssh_keys(user_hash: str, ssh_public_key: str, ssh_private_key: str):
1111
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
1112
- if (_SQLALCHEMY_ENGINE.dialect.name ==
1146
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
1147
+ if (SQLALCHEMY_ENGINE.dialect.name ==
1113
1148
  db_utils.SQLAlchemyDialect.SQLITE.value):
1114
1149
  insert_func = sqlite.insert
1115
- elif (_SQLALCHEMY_ENGINE.dialect.name ==
1150
+ elif (SQLALCHEMY_ENGINE.dialect.name ==
1116
1151
  db_utils.SQLAlchemyDialect.POSTGRESQL.value):
1117
1152
  insert_func = postgresql.insert
1118
1153
  else:
@@ -1142,7 +1177,7 @@ def get_cluster_yaml_str(cluster_yaml_path: Optional[str]) -> Optional[str]:
1142
1177
  raise ValueError('Attempted to read a None YAML.')
1143
1178
  cluster_file_name = os.path.basename(cluster_yaml_path)
1144
1179
  cluster_name, _ = os.path.splitext(cluster_file_name)
1145
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
1180
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
1146
1181
  row = session.query(cluster_yaml_table).filter_by(
1147
1182
  cluster_name=cluster_name).first()
1148
1183
  if row is None:
@@ -1172,11 +1207,11 @@ def get_cluster_yaml_dict(cluster_yaml_path: Optional[str]) -> Dict[str, Any]:
1172
1207
 
1173
1208
  def set_cluster_yaml(cluster_name: str, yaml_str: str) -> None:
1174
1209
  """Set the cluster yaml in the database."""
1175
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
1176
- if (_SQLALCHEMY_ENGINE.dialect.name ==
1210
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
1211
+ if (SQLALCHEMY_ENGINE.dialect.name ==
1177
1212
  db_utils.SQLAlchemyDialect.SQLITE.value):
1178
1213
  insert_func = sqlite.insert
1179
- elif (_SQLALCHEMY_ENGINE.dialect.name ==
1214
+ elif (SQLALCHEMY_ENGINE.dialect.name ==
1180
1215
  db_utils.SQLAlchemyDialect.POSTGRESQL.value):
1181
1216
  insert_func = postgresql.insert
1182
1217
  else:
@@ -1191,7 +1226,7 @@ def set_cluster_yaml(cluster_name: str, yaml_str: str) -> None:
1191
1226
 
1192
1227
 
1193
1228
  def remove_cluster_yaml(cluster_name: str):
1194
- with orm.Session(_SQLALCHEMY_ENGINE) as session:
1229
+ with orm.Session(SQLALCHEMY_ENGINE) as session:
1195
1230
  session.query(cluster_yaml_table).filter_by(
1196
1231
  cluster_name=cluster_name).delete()
1197
1232
  session.commit()
sky/jobs/client/sdk.py CHANGED
@@ -92,7 +92,8 @@ def launch(
92
92
  @server_common.check_server_healthy_or_start
93
93
  def queue(refresh: bool,
94
94
  skip_finished: bool = False,
95
- all_users: bool = False) -> server_common.RequestId:
95
+ all_users: bool = False,
96
+ job_ids: Optional[List[int]] = None) -> server_common.RequestId:
96
97
  """Gets statuses of managed jobs.
97
98
 
98
99
  Please refer to sky.cli.job_queue for documentation.
@@ -101,6 +102,7 @@ def queue(refresh: bool,
101
102
  refresh: Whether to restart the jobs controller if it is stopped.
102
103
  skip_finished: Whether to skip finished jobs.
103
104
  all_users: Whether to show all users' jobs.
105
+ job_ids: IDs of the managed jobs to show.
104
106
 
105
107
  Returns:
106
108
  The request ID of the queue request.
@@ -135,6 +137,7 @@ def queue(refresh: bool,
135
137
  refresh=refresh,
136
138
  skip_finished=skip_finished,
137
139
  all_users=all_users,
140
+ job_ids=job_ids,
138
141
  )
139
142
  response = requests.post(
140
143
  f'{server_common.get_server_url()}/jobs/queue',
sky/jobs/server/core.py CHANGED
@@ -378,7 +378,8 @@ def _maybe_restart_controller(
378
378
  @usage_lib.entrypoint
379
379
  def queue(refresh: bool,
380
380
  skip_finished: bool = False,
381
- all_users: bool = False) -> List[Dict[str, Any]]:
381
+ all_users: bool = False,
382
+ job_ids: Optional[List[int]] = None) -> List[Dict[str, Any]]:
382
383
  # NOTE(dev): Keep the docstring consistent between the Python API and CLI.
383
384
  """Gets statuses of managed jobs.
384
385
 
@@ -450,6 +451,9 @@ def queue(refresh: bool,
450
451
  jobs = list(
451
452
  filter(lambda job: job['job_id'] in non_finished_job_ids, jobs))
452
453
 
454
+ if job_ids:
455
+ jobs = [job for job in jobs if job['job_id'] in job_ids]
456
+
453
457
  return jobs
454
458
 
455
459
 
sky/models.py CHANGED
@@ -7,6 +7,7 @@ from typing import Any, Dict, Optional
7
7
 
8
8
  @dataclasses.dataclass
9
9
  class User:
10
+ """Dataclass to store user information."""
10
11
  # User hash
11
12
  id: str
12
13
  # Display name of the user
sky/resources.py CHANGED
@@ -45,7 +45,9 @@ class AutostopConfig:
45
45
  # to be complete.
46
46
  enabled: bool
47
47
  # If enabled is False, these values are ignored.
48
- idle_minutes: int = 5
48
+ # Keep the default value to 0 to make the behavior consistent with the CLI
49
+ # flags.
50
+ idle_minutes: int = 0
49
51
  down: bool = False
50
52
 
51
53
  def to_yaml_config(self) -> Union[Literal[False], Dict[str, Any]]:
@@ -883,6 +885,25 @@ class Resources:
883
885
  valid_volumes.append(volume)
884
886
  self._volumes = valid_volumes
885
887
 
888
+ def override_autostop_config(self,
889
+ down: bool = False,
890
+ idle_minutes: Optional[int] = None) -> None:
891
+ """Override autostop config to the resource.
892
+
893
+ Args:
894
+ down: If true, override the autostop config to use autodown.
895
+ idle_minutes: If not None, override the idle minutes to autostop or
896
+ autodown.
897
+ """
898
+ if not down and idle_minutes is None:
899
+ return
900
+ if self._autostop_config is None:
901
+ self._autostop_config = AutostopConfig(enabled=True,)
902
+ if down:
903
+ self._autostop_config.down = down
904
+ if idle_minutes is not None:
905
+ self._autostop_config.idle_minutes = idle_minutes
906
+
886
907
  def is_launchable(self) -> bool:
887
908
  """Returns whether the resource is launchable."""
888
909
  return self.cloud is not None and self._instance_type is not None
sky/server/constants.py CHANGED
@@ -7,7 +7,7 @@ from sky.skylet import constants
7
7
  # API server version, whenever there is a change in API server that requires a
8
8
  # restart of the local API server or error out when the client does not match
9
9
  # the server version.
10
- API_VERSION = '8'
10
+ API_VERSION = '9'
11
11
 
12
12
  # Prefix for API request names.
13
13
  REQUEST_NAME_PREFIX = 'sky.'
@@ -25,8 +25,10 @@ API_SERVER_REQUEST_DB_PATH = '~/.sky/api_server/requests.db'
25
25
  CLUSTER_REFRESH_DAEMON_INTERVAL_SECONDS = 60
26
26
 
27
27
  # Environment variable for a file path to the API cookie file.
28
+ # Keep in sync with websocket_proxy.py
28
29
  API_COOKIE_FILE_ENV_VAR = f'{constants.SKYPILOT_ENV_VAR_PREFIX}API_COOKIE_FILE'
29
30
  # Default file if unset.
31
+ # Keep in sync with websocket_proxy.py
30
32
  API_COOKIE_FILE_DEFAULT_LOCATION = '~/.sky/cookies.txt'
31
33
 
32
34
  # The path to the dashboard build output
@@ -196,8 +196,10 @@ class LaunchBody(RequestBody):
196
196
  task: str
197
197
  cluster_name: str
198
198
  retry_until_up: bool = False
199
+ # TODO(aylei): remove this field in v0.12.0
199
200
  idle_minutes_to_autostop: Optional[int] = None
200
201
  dryrun: bool = False
202
+ # TODO(aylei): remove this field in v0.12.0
201
203
  down: bool = False
202
204
  backend: Optional[str] = None
203
205
  optimize_target: common_lib.OptimizeTarget = common_lib.OptimizeTarget.COST
@@ -331,6 +333,12 @@ class ClusterJobsDownloadLogsBody(RequestBody):
331
333
  local_dir: str = constants.SKY_LOGS_DIRECTORY
332
334
 
333
335
 
336
+ class UserUpdateBody(RequestBody):
337
+ """The request body for the user update endpoint."""
338
+ user_id: str
339
+ role: str
340
+
341
+
334
342
  class DownloadBody(RequestBody):
335
343
  """The request body for the download endpoint."""
336
344
  folder_paths: List[str]
@@ -375,6 +383,7 @@ class JobsQueueBody(RequestBody):
375
383
  refresh: bool = False
376
384
  skip_finished: bool = False
377
385
  all_users: bool = False
386
+ job_ids: Optional[List[int]] = None
378
387
 
379
388
 
380
389
  class JobsCancelBody(RequestBody):