skypilot-nightly 1.0.0.dev20251021__py3-none-any.whl → 1.0.0.dev20251022__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of skypilot-nightly might be problematic. Click here for more details.
- sky/__init__.py +2 -2
- sky/adaptors/kubernetes.py +5 -2
- sky/dashboard/out/404.html +1 -1
- sky/dashboard/out/_next/static/IgACOQPupLbX9z-RYVEDx/_buildManifest.js +1 -0
- sky/dashboard/out/_next/static/chunks/1141-ec6f902ffb865853.js +11 -0
- sky/dashboard/out/_next/static/chunks/2755.9b1e69c921b5a870.js +26 -0
- sky/dashboard/out/_next/static/chunks/3015-d014dc5b9412fade.js +1 -0
- sky/dashboard/out/_next/static/chunks/{3294.1fafbf42b3bcebff.js → 3294.998db87cd52a1238.js} +1 -1
- sky/dashboard/out/_next/static/chunks/{3785.a19328ba41517b8b.js → 3785.483a3dda2d52f26e.js} +1 -1
- sky/dashboard/out/_next/static/chunks/{1121-d0782b9251f0fcd3.js → 4282-d2f3ef2fbf78e347.js} +1 -1
- sky/dashboard/out/_next/static/chunks/6856-5c94d394259cdb6e.js +1 -0
- sky/dashboard/out/_next/static/chunks/8969-0389e2cb52412db3.js +1 -0
- sky/dashboard/out/_next/static/chunks/9360.14326e329484b57e.js +31 -0
- sky/dashboard/out/_next/static/chunks/pages/clusters/[cluster]/{[job]-8f058b0346db2aff.js → [job]-602eeead010ec1d6.js} +1 -1
- sky/dashboard/out/_next/static/chunks/pages/clusters/{[cluster]-477555ab7c0b13d8.js → [cluster]-18b334dedbd9f6f2.js} +1 -1
- sky/dashboard/out/_next/static/chunks/pages/{clusters-2f61f65487f6d8ff.js → clusters-57221ec2e4e01076.js} +1 -1
- sky/dashboard/out/_next/static/chunks/pages/infra/{[context]-553b8b5cb65e100b.js → [context]-44ce535a0a0ad4ec.js} +1 -1
- sky/dashboard/out/_next/static/chunks/pages/{infra-910a22500c50596f.js → infra-872e6a00165534f4.js} +1 -1
- sky/dashboard/out/_next/static/chunks/pages/{jobs-a35a9dc3c5ccd657.js → jobs-0dc34cf9a8710a9f.js} +1 -1
- sky/dashboard/out/_next/static/chunks/pages/{users-98d2ed979084162a.js → users-3a543725492fb896.js} +1 -1
- sky/dashboard/out/_next/static/chunks/pages/{volumes-835d14ba94808f79.js → volumes-d2af9d22e87cc4ba.js} +1 -1
- sky/dashboard/out/_next/static/chunks/pages/workspaces/{[name]-e8688c35c06f0ac5.js → [name]-9ad108cd67d16d96.js} +1 -1
- sky/dashboard/out/_next/static/chunks/pages/{workspaces-69c80d677d3c2949.js → workspaces-6fc994fa1ee6c6bf.js} +1 -1
- sky/dashboard/out/_next/static/chunks/webpack-919e3c01ab6b2633.js +1 -0
- sky/dashboard/out/clusters/[cluster]/[job].html +1 -1
- sky/dashboard/out/clusters/[cluster].html +1 -1
- sky/dashboard/out/clusters.html +1 -1
- sky/dashboard/out/config.html +1 -1
- sky/dashboard/out/index.html +1 -1
- sky/dashboard/out/infra/[context].html +1 -1
- sky/dashboard/out/infra.html +1 -1
- sky/dashboard/out/jobs/[job].html +1 -1
- sky/dashboard/out/jobs/pools/[pool].html +1 -1
- sky/dashboard/out/jobs.html +1 -1
- sky/dashboard/out/users.html +1 -1
- sky/dashboard/out/volumes.html +1 -1
- sky/dashboard/out/workspace/new.html +1 -1
- sky/dashboard/out/workspaces/[name].html +1 -1
- sky/dashboard/out/workspaces.html +1 -1
- sky/global_user_state.py +117 -17
- sky/jobs/constants.py +1 -1
- sky/jobs/server/core.py +4 -2
- sky/jobs/server/server.py +11 -11
- sky/jobs/state.py +307 -55
- sky/jobs/utils.py +248 -144
- sky/schemas/api/responses.py +2 -0
- sky/schemas/db/skypilot_config/001_initial_schema.py +30 -0
- sky/serve/server/server.py +7 -7
- sky/server/common.py +1 -13
- sky/server/requests/executor.py +20 -20
- sky/server/requests/payloads.py +3 -0
- sky/server/requests/requests.py +12 -19
- sky/server/requests/serializers/encoders.py +3 -3
- sky/server/server.py +34 -34
- sky/setup_files/alembic.ini +4 -0
- sky/skylet/services.py +5 -5
- sky/skypilot_config.py +87 -75
- sky/ssh_node_pools/server.py +4 -4
- sky/users/permission.py +4 -0
- sky/utils/db/db_utils.py +11 -3
- sky/utils/db/migration_utils.py +7 -3
- sky/volumes/server/server.py +3 -3
- sky/workspaces/server.py +6 -6
- {skypilot_nightly-1.0.0.dev20251021.dist-info → skypilot_nightly-1.0.0.dev20251022.dist-info}/METADATA +36 -35
- {skypilot_nightly-1.0.0.dev20251021.dist-info → skypilot_nightly-1.0.0.dev20251022.dist-info}/RECORD +73 -72
- sky/dashboard/out/_next/static/chunks/1141-3b40c39626f99c89.js +0 -11
- sky/dashboard/out/_next/static/chunks/2755.97300e1362fe7c98.js +0 -26
- sky/dashboard/out/_next/static/chunks/3015-7e0e8f06bb2f881c.js +0 -1
- sky/dashboard/out/_next/static/chunks/6856-5fdc9b851a18acdb.js +0 -1
- sky/dashboard/out/_next/static/chunks/8969-66237729cdf9749e.js +0 -1
- sky/dashboard/out/_next/static/chunks/9360.71e83b2ddc844ec2.js +0 -31
- sky/dashboard/out/_next/static/chunks/webpack-66f23594d38c7f16.js +0 -1
- sky/dashboard/out/_next/static/jDc1PlRsl9Cc5FQUMLBu8/_buildManifest.js +0 -1
- /sky/dashboard/out/_next/static/{jDc1PlRsl9Cc5FQUMLBu8 → IgACOQPupLbX9z-RYVEDx}/_ssgManifest.js +0 -0
- /sky/dashboard/out/_next/static/chunks/{1871-49141c317f3a9020.js → 1871-df9f87fcb7f24292.js} +0 -0
- /sky/dashboard/out/_next/static/chunks/pages/jobs/{[job]-e5c9ce6a24fc0de4.js → [job]-8677af16befde039.js} +0 -0
- /sky/dashboard/out/_next/static/chunks/pages/jobs/pools/{[pool]-bc979970c247d8f3.js → [pool]-e020fd69dbe76cea.js} +0 -0
- {skypilot_nightly-1.0.0.dev20251021.dist-info → skypilot_nightly-1.0.0.dev20251022.dist-info}/WHEEL +0 -0
- {skypilot_nightly-1.0.0.dev20251021.dist-info → skypilot_nightly-1.0.0.dev20251022.dist-info}/entry_points.txt +0 -0
- {skypilot_nightly-1.0.0.dev20251021.dist-info → skypilot_nightly-1.0.0.dev20251022.dist-info}/licenses/LICENSE +0 -0
- {skypilot_nightly-1.0.0.dev20251021.dist-info → skypilot_nightly-1.0.0.dev20251022.dist-info}/top_level.txt +0 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
<!DOCTYPE html><html><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width"/><meta name="next-head-count" content="2"/><link rel="preload" href="/dashboard/_next/static/css/4614e06482d7309e.css" as="style"/><link rel="stylesheet" href="/dashboard/_next/static/css/4614e06482d7309e.css" data-n-g=""/><noscript data-n-css=""></noscript><script defer="" nomodule="" src="/dashboard/_next/static/chunks/polyfills-78c92fac7aa8fdd8.js"></script><script src="/dashboard/_next/static/chunks/webpack-
|
|
1
|
+
<!DOCTYPE html><html><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width"/><meta name="next-head-count" content="2"/><link rel="preload" href="/dashboard/_next/static/css/4614e06482d7309e.css" as="style"/><link rel="stylesheet" href="/dashboard/_next/static/css/4614e06482d7309e.css" data-n-g=""/><noscript data-n-css=""></noscript><script defer="" nomodule="" src="/dashboard/_next/static/chunks/polyfills-78c92fac7aa8fdd8.js"></script><script src="/dashboard/_next/static/chunks/webpack-919e3c01ab6b2633.js" defer=""></script><script src="/dashboard/_next/static/chunks/framework-cf60a09ccd051a10.js" defer=""></script><script src="/dashboard/_next/static/chunks/main-f15ccb73239a3bf1.js" defer=""></script><script src="/dashboard/_next/static/chunks/pages/_app-ce361c6959bc2001.js" defer=""></script><script src="/dashboard/_next/static/chunks/616-3d59f75e2ccf9321.js" defer=""></script><script src="/dashboard/_next/static/chunks/6130-2be46d70a38f1e82.js" defer=""></script><script src="/dashboard/_next/static/chunks/5739-d67458fcb1386c92.js" defer=""></script><script src="/dashboard/_next/static/chunks/7411-b15471acd2cba716.js" defer=""></script><script src="/dashboard/_next/static/chunks/1272-1ef0bf0237faccdb.js" defer=""></script><script src="/dashboard/_next/static/chunks/7359-c8d04e06886000b3.js" defer=""></script><script src="/dashboard/_next/static/chunks/6989-01359c57e018caa4.js" defer=""></script><script src="/dashboard/_next/static/chunks/3850-ff4a9a69d978632b.js" defer=""></script><script src="/dashboard/_next/static/chunks/8969-0389e2cb52412db3.js" defer=""></script><script src="/dashboard/_next/static/chunks/6990-f6818c84ed8f1c86.js" defer=""></script><script src="/dashboard/_next/static/chunks/6135-4b4d5e824b7f9d3c.js" defer=""></script><script src="/dashboard/_next/static/chunks/4282-d2f3ef2fbf78e347.js" defer=""></script><script src="/dashboard/_next/static/chunks/6601-06114c982db410b6.js" defer=""></script><script src="/dashboard/_next/static/chunks/3015-d014dc5b9412fade.js" defer=""></script><script src="/dashboard/_next/static/chunks/1141-ec6f902ffb865853.js" defer=""></script><script src="/dashboard/_next/static/chunks/pages/workspaces/%5Bname%5D-9ad108cd67d16d96.js" defer=""></script><script src="/dashboard/_next/static/IgACOQPupLbX9z-RYVEDx/_buildManifest.js" defer=""></script><script src="/dashboard/_next/static/IgACOQPupLbX9z-RYVEDx/_ssgManifest.js" defer=""></script></head><body><div id="__next"></div><script id="__NEXT_DATA__" type="application/json">{"props":{"pageProps":{}},"page":"/workspaces/[name]","query":{},"buildId":"IgACOQPupLbX9z-RYVEDx","assetPrefix":"/dashboard","nextExport":true,"autoExport":true,"isFallback":false,"scriptLoader":[]}</script></body></html>
|
|
@@ -1 +1 @@
|
|
|
1
|
-
<!DOCTYPE html><html><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width"/><meta name="next-head-count" content="2"/><link rel="preload" href="/dashboard/_next/static/css/4614e06482d7309e.css" as="style"/><link rel="stylesheet" href="/dashboard/_next/static/css/4614e06482d7309e.css" data-n-g=""/><noscript data-n-css=""></noscript><script defer="" nomodule="" src="/dashboard/_next/static/chunks/polyfills-78c92fac7aa8fdd8.js"></script><script src="/dashboard/_next/static/chunks/webpack-
|
|
1
|
+
<!DOCTYPE html><html><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width"/><meta name="next-head-count" content="2"/><link rel="preload" href="/dashboard/_next/static/css/4614e06482d7309e.css" as="style"/><link rel="stylesheet" href="/dashboard/_next/static/css/4614e06482d7309e.css" data-n-g=""/><noscript data-n-css=""></noscript><script defer="" nomodule="" src="/dashboard/_next/static/chunks/polyfills-78c92fac7aa8fdd8.js"></script><script src="/dashboard/_next/static/chunks/webpack-919e3c01ab6b2633.js" defer=""></script><script src="/dashboard/_next/static/chunks/framework-cf60a09ccd051a10.js" defer=""></script><script src="/dashboard/_next/static/chunks/main-f15ccb73239a3bf1.js" defer=""></script><script src="/dashboard/_next/static/chunks/pages/_app-ce361c6959bc2001.js" defer=""></script><script src="/dashboard/_next/static/chunks/pages/workspaces-6fc994fa1ee6c6bf.js" defer=""></script><script src="/dashboard/_next/static/IgACOQPupLbX9z-RYVEDx/_buildManifest.js" defer=""></script><script src="/dashboard/_next/static/IgACOQPupLbX9z-RYVEDx/_ssgManifest.js" defer=""></script></head><body><div id="__next"></div><script id="__NEXT_DATA__" type="application/json">{"props":{"pageProps":{}},"page":"/workspaces","query":{},"buildId":"IgACOQPupLbX9z-RYVEDx","assetPrefix":"/dashboard","nextExport":true,"autoExport":true,"isFallback":false,"scriptLoader":[]}</script></body></html>
|
sky/global_user_state.py
CHANGED
|
@@ -32,6 +32,7 @@ from sky import sky_logging
|
|
|
32
32
|
from sky import skypilot_config
|
|
33
33
|
from sky.metrics import utils as metrics_lib
|
|
34
34
|
from sky.skylet import constants
|
|
35
|
+
from sky.utils import annotations
|
|
35
36
|
from sky.utils import common_utils
|
|
36
37
|
from sky.utils import context_utils
|
|
37
38
|
from sky.utils import registry
|
|
@@ -342,6 +343,10 @@ def initialize_and_get_db() -> sqlalchemy.engine.Engine:
|
|
|
342
343
|
|
|
343
344
|
# return engine
|
|
344
345
|
_SQLALCHEMY_ENGINE = engine
|
|
346
|
+
# Cache the result of _sqlite_supports_returning()
|
|
347
|
+
# ahead of time, as it won't change throughout
|
|
348
|
+
# the lifetime of the engine.
|
|
349
|
+
_sqlite_supports_returning()
|
|
345
350
|
return _SQLALCHEMY_ENGINE
|
|
346
351
|
|
|
347
352
|
|
|
@@ -372,19 +377,51 @@ def _init_db(func):
|
|
|
372
377
|
return wrapper
|
|
373
378
|
|
|
374
379
|
|
|
380
|
+
@annotations.lru_cache(scope='global', maxsize=1)
|
|
381
|
+
def _sqlite_supports_returning() -> bool:
|
|
382
|
+
"""Check if SQLite (3.35.0+) and SQLAlchemy (2.0+) support RETURNING.
|
|
383
|
+
|
|
384
|
+
See https://sqlite.org/lang_returning.html and
|
|
385
|
+
https://docs.sqlalchemy.org/en/20/dialects/sqlite.html#insert-update-delete-returning # pylint: disable=line-too-long
|
|
386
|
+
"""
|
|
387
|
+
sqlalchemy_version_parts = sqlalchemy.__version__.split('.')
|
|
388
|
+
assert len(sqlalchemy_version_parts) >= 1, \
|
|
389
|
+
f'Invalid SQLAlchemy version: {sqlalchemy.__version__}'
|
|
390
|
+
sqlalchemy_major = int(sqlalchemy_version_parts[0])
|
|
391
|
+
if sqlalchemy_major < 2:
|
|
392
|
+
return False
|
|
393
|
+
|
|
394
|
+
assert _SQLALCHEMY_ENGINE is not None
|
|
395
|
+
if (_SQLALCHEMY_ENGINE.dialect.name !=
|
|
396
|
+
db_utils.SQLAlchemyDialect.SQLITE.value):
|
|
397
|
+
return False
|
|
398
|
+
with orm.Session(_SQLALCHEMY_ENGINE) as session:
|
|
399
|
+
result = session.execute(sqlalchemy.text('SELECT sqlite_version()'))
|
|
400
|
+
version_str = result.scalar()
|
|
401
|
+
version_parts = version_str.split('.')
|
|
402
|
+
assert len(version_parts) >= 2, \
|
|
403
|
+
f'Invalid version string: {version_str}'
|
|
404
|
+
major, minor = int(version_parts[0]), int(version_parts[1])
|
|
405
|
+
return (major > 3) or (major == 3 and minor >= 35)
|
|
406
|
+
|
|
407
|
+
|
|
375
408
|
@_init_db
|
|
376
409
|
@metrics_lib.time_me
|
|
377
|
-
def add_or_update_user(
|
|
378
|
-
|
|
410
|
+
def add_or_update_user(
|
|
411
|
+
user: models.User,
|
|
412
|
+
allow_duplicate_name: bool = True,
|
|
413
|
+
return_user: bool = False
|
|
414
|
+
) -> typing.Union[bool, typing.Tuple[bool, models.User]]:
|
|
379
415
|
"""Store the mapping from user hash to user name for display purposes.
|
|
380
416
|
|
|
381
417
|
Returns:
|
|
382
|
-
|
|
418
|
+
If return_user=False: bool (whether the user is newly added)
|
|
419
|
+
If return_user=True: Tuple[bool, models.User]
|
|
383
420
|
"""
|
|
384
421
|
assert _SQLALCHEMY_ENGINE is not None
|
|
385
422
|
|
|
386
423
|
if user.name is None:
|
|
387
|
-
return False
|
|
424
|
+
return (False, user) if return_user else False
|
|
388
425
|
|
|
389
426
|
# Set created_at if not already set
|
|
390
427
|
created_at = user.created_at
|
|
@@ -396,7 +433,7 @@ def add_or_update_user(user: models.User,
|
|
|
396
433
|
existing_user = session.query(user_table).filter(
|
|
397
434
|
user_table.c.name == user.name).first()
|
|
398
435
|
if existing_user is not None:
|
|
399
|
-
return False
|
|
436
|
+
return (False, user) if return_user else False
|
|
400
437
|
|
|
401
438
|
if (_SQLALCHEMY_ENGINE.dialect.name ==
|
|
402
439
|
db_utils.SQLAlchemyDialect.SQLITE.value):
|
|
@@ -410,24 +447,57 @@ def add_or_update_user(user: models.User,
|
|
|
410
447
|
name=user.name,
|
|
411
448
|
password=user.password,
|
|
412
449
|
created_at=created_at)
|
|
450
|
+
use_returning = return_user and _sqlite_supports_returning()
|
|
451
|
+
if use_returning:
|
|
452
|
+
insert_stmnt = insert_stmnt.returning(
|
|
453
|
+
user_table.c.id,
|
|
454
|
+
user_table.c.name,
|
|
455
|
+
user_table.c.password,
|
|
456
|
+
user_table.c.created_at,
|
|
457
|
+
)
|
|
413
458
|
result = session.execute(insert_stmnt)
|
|
414
459
|
|
|
415
|
-
|
|
416
|
-
|
|
460
|
+
row = None
|
|
461
|
+
if use_returning:
|
|
462
|
+
# With RETURNING, check if we got a row back.
|
|
463
|
+
row = result.fetchone()
|
|
464
|
+
was_inserted = row is not None
|
|
465
|
+
else:
|
|
466
|
+
# Without RETURNING, use rowcount.
|
|
467
|
+
was_inserted = result.rowcount > 0
|
|
417
468
|
|
|
418
469
|
if not was_inserted:
|
|
419
470
|
# User existed, so update it (but don't update created_at)
|
|
471
|
+
update_values = {user_table.c.name: user.name}
|
|
420
472
|
if user.password:
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
473
|
+
update_values[user_table.c.password] = user.password
|
|
474
|
+
|
|
475
|
+
update_stmnt = sqlalchemy.update(user_table).where(
|
|
476
|
+
user_table.c.id == user.id).values(update_values)
|
|
477
|
+
if use_returning:
|
|
478
|
+
update_stmnt = update_stmnt.returning(
|
|
479
|
+
user_table.c.id, user_table.c.name,
|
|
480
|
+
user_table.c.password, user_table.c.created_at)
|
|
481
|
+
|
|
482
|
+
result = session.execute(update_stmnt)
|
|
483
|
+
if use_returning:
|
|
484
|
+
row = result.fetchone()
|
|
428
485
|
|
|
429
486
|
session.commit()
|
|
430
|
-
|
|
487
|
+
|
|
488
|
+
if return_user:
|
|
489
|
+
if row is None:
|
|
490
|
+
# row=None means the sqlite used has no RETURNING support,
|
|
491
|
+
# so we need to do a separate query
|
|
492
|
+
row = session.query(user_table).filter_by(
|
|
493
|
+
id=user.id).first()
|
|
494
|
+
updated_user = models.User(id=row.id,
|
|
495
|
+
name=row.name,
|
|
496
|
+
password=row.password,
|
|
497
|
+
created_at=row.created_at)
|
|
498
|
+
return was_inserted, updated_user
|
|
499
|
+
else:
|
|
500
|
+
return was_inserted
|
|
431
501
|
|
|
432
502
|
elif (_SQLALCHEMY_ENGINE.dialect.name ==
|
|
433
503
|
db_utils.SQLAlchemyDialect.POSTGRESQL.value):
|
|
@@ -452,6 +522,9 @@ def add_or_update_user(user: models.User,
|
|
|
452
522
|
upsert_stmnt = insert_stmnt.on_conflict_do_update(
|
|
453
523
|
index_elements=[user_table.c.id], set_=set_).returning(
|
|
454
524
|
user_table.c.id,
|
|
525
|
+
user_table.c.name,
|
|
526
|
+
user_table.c.password,
|
|
527
|
+
user_table.c.created_at,
|
|
455
528
|
# This will be True for INSERT, False for UPDATE
|
|
456
529
|
sqlalchemy.literal_column('(xmax = 0)').label('was_inserted'
|
|
457
530
|
))
|
|
@@ -459,10 +532,17 @@ def add_or_update_user(user: models.User,
|
|
|
459
532
|
result = session.execute(upsert_stmnt)
|
|
460
533
|
row = result.fetchone()
|
|
461
534
|
|
|
462
|
-
|
|
535
|
+
was_inserted = bool(row.was_inserted) if row else False
|
|
463
536
|
session.commit()
|
|
464
537
|
|
|
465
|
-
|
|
538
|
+
if return_user:
|
|
539
|
+
updated_user = models.User(id=row.id,
|
|
540
|
+
name=row.name,
|
|
541
|
+
password=row.password,
|
|
542
|
+
created_at=row.created_at)
|
|
543
|
+
return was_inserted, updated_user
|
|
544
|
+
else:
|
|
545
|
+
return was_inserted
|
|
466
546
|
else:
|
|
467
547
|
raise ValueError('Unsupported database dialect')
|
|
468
548
|
|
|
@@ -1081,6 +1161,26 @@ def get_handles_from_cluster_names(
|
|
|
1081
1161
|
}
|
|
1082
1162
|
|
|
1083
1163
|
|
|
1164
|
+
@_init_db
|
|
1165
|
+
@metrics_lib.time_me
|
|
1166
|
+
def get_cluster_name_to_handle_map(
|
|
1167
|
+
is_managed: Optional[bool] = None,
|
|
1168
|
+
) -> Dict[str, Optional['backends.ResourceHandle']]:
|
|
1169
|
+
assert _SQLALCHEMY_ENGINE is not None
|
|
1170
|
+
with orm.Session(_SQLALCHEMY_ENGINE) as session:
|
|
1171
|
+
query = session.query(cluster_table.c.name, cluster_table.c.handle)
|
|
1172
|
+
if is_managed is not None:
|
|
1173
|
+
query = query.filter(cluster_table.c.is_managed == int(is_managed))
|
|
1174
|
+
rows = query.all()
|
|
1175
|
+
name_to_handle = {}
|
|
1176
|
+
for row in rows:
|
|
1177
|
+
if row.handle and len(row.handle) > 0:
|
|
1178
|
+
name_to_handle[row.name] = pickle.loads(row.handle)
|
|
1179
|
+
else:
|
|
1180
|
+
name_to_handle[row.name] = None
|
|
1181
|
+
return name_to_handle
|
|
1182
|
+
|
|
1183
|
+
|
|
1084
1184
|
@_init_db_async
|
|
1085
1185
|
@metrics_lib.time_me
|
|
1086
1186
|
async def get_status_from_cluster_name_async(
|
sky/jobs/constants.py
CHANGED
|
@@ -46,7 +46,7 @@ JOBS_CLUSTER_NAME_PREFIX_LENGTH = 25
|
|
|
46
46
|
# The version of the lib files that jobs/utils use. Whenever there is an API
|
|
47
47
|
# change for the jobs/utils, we need to bump this version and update
|
|
48
48
|
# job.utils.ManagedJobCodeGen to handle the version update.
|
|
49
|
-
MANAGED_JOBS_VERSION =
|
|
49
|
+
MANAGED_JOBS_VERSION = 12
|
|
50
50
|
|
|
51
51
|
# The command for setting up the jobs dashboard on the controller. It firstly
|
|
52
52
|
# checks if the systemd services are available, and if not (e.g., Kubernetes
|
sky/jobs/server/core.py
CHANGED
|
@@ -663,12 +663,13 @@ def queue_v2_api(
|
|
|
663
663
|
page: Optional[int] = None,
|
|
664
664
|
limit: Optional[int] = None,
|
|
665
665
|
statuses: Optional[List[str]] = None,
|
|
666
|
+
fields: Optional[List[str]] = None,
|
|
666
667
|
) -> Tuple[List[responses.ManagedJobRecord], int, Dict[str, int], int]:
|
|
667
668
|
"""Gets statuses of managed jobs and parse the
|
|
668
669
|
jobs to responses.ManagedJobRecord."""
|
|
669
670
|
jobs, total, status_counts, total_no_filter = queue_v2(
|
|
670
671
|
refresh, skip_finished, all_users, job_ids, user_match, workspace_match,
|
|
671
|
-
name_match, pool_match, page, limit, statuses)
|
|
672
|
+
name_match, pool_match, page, limit, statuses, fields)
|
|
672
673
|
return [responses.ManagedJobRecord(**job) for job in jobs
|
|
673
674
|
], total, status_counts, total_no_filter
|
|
674
675
|
|
|
@@ -686,6 +687,7 @@ def queue_v2(
|
|
|
686
687
|
page: Optional[int] = None,
|
|
687
688
|
limit: Optional[int] = None,
|
|
688
689
|
statuses: Optional[List[str]] = None,
|
|
690
|
+
fields: Optional[List[str]] = None,
|
|
689
691
|
) -> Tuple[List[Dict[str, Any]], int, Dict[str, int], int]:
|
|
690
692
|
# NOTE(dev): Keep the docstring consistent between the Python API and CLI.
|
|
691
693
|
"""Gets statuses of managed jobs with filtering.
|
|
@@ -790,7 +792,7 @@ def queue_v2(
|
|
|
790
792
|
with metrics_lib.time_it('jobs.queue.generate_code', group='jobs'):
|
|
791
793
|
code = managed_job_utils.ManagedJobCodeGen.get_job_table(
|
|
792
794
|
skip_finished, accessible_workspaces, job_ids, workspace_match,
|
|
793
|
-
name_match, pool_match, page, limit, user_hashes, statuses)
|
|
795
|
+
name_match, pool_match, page, limit, user_hashes, statuses, fields)
|
|
794
796
|
with metrics_lib.time_it('jobs.queue.run_on_head', group='jobs'):
|
|
795
797
|
returncode, job_table_payload, stderr = backend.run_on_head(
|
|
796
798
|
handle,
|
sky/jobs/server/server.py
CHANGED
|
@@ -35,7 +35,7 @@ async def launch(request: fastapi.Request,
|
|
|
35
35
|
consolidation_mode = managed_jobs_utils.is_consolidation_mode()
|
|
36
36
|
schedule_type = (api_requests.ScheduleType.SHORT
|
|
37
37
|
if consolidation_mode else api_requests.ScheduleType.LONG)
|
|
38
|
-
executor.
|
|
38
|
+
await executor.schedule_request_async(
|
|
39
39
|
request_id=request.state.request_id,
|
|
40
40
|
request_name='jobs.launch',
|
|
41
41
|
request_body=jobs_launch_body,
|
|
@@ -50,7 +50,7 @@ async def launch(request: fastapi.Request,
|
|
|
50
50
|
@router.post('/queue')
|
|
51
51
|
async def queue(request: fastapi.Request,
|
|
52
52
|
jobs_queue_body: payloads.JobsQueueBody) -> None:
|
|
53
|
-
executor.
|
|
53
|
+
await executor.schedule_request_async(
|
|
54
54
|
request_id=request.state.request_id,
|
|
55
55
|
request_name='jobs.queue',
|
|
56
56
|
request_body=jobs_queue_body,
|
|
@@ -64,7 +64,7 @@ async def queue(request: fastapi.Request,
|
|
|
64
64
|
@router.post('/queue/v2')
|
|
65
65
|
async def queue_v2(request: fastapi.Request,
|
|
66
66
|
jobs_queue_body_v2: payloads.JobsQueueV2Body) -> None:
|
|
67
|
-
executor.
|
|
67
|
+
await executor.schedule_request_async(
|
|
68
68
|
request_id=request.state.request_id,
|
|
69
69
|
request_name='jobs.queue_v2',
|
|
70
70
|
request_body=jobs_queue_body_v2,
|
|
@@ -79,7 +79,7 @@ async def queue_v2(request: fastapi.Request,
|
|
|
79
79
|
@router.post('/cancel')
|
|
80
80
|
async def cancel(request: fastapi.Request,
|
|
81
81
|
jobs_cancel_body: payloads.JobsCancelBody) -> None:
|
|
82
|
-
executor.
|
|
82
|
+
await executor.schedule_request_async(
|
|
83
83
|
request_id=request.state.request_id,
|
|
84
84
|
request_name='jobs.cancel',
|
|
85
85
|
request_body=jobs_cancel_body,
|
|
@@ -101,7 +101,7 @@ async def logs(
|
|
|
101
101
|
schedule_type = api_requests.ScheduleType.LONG
|
|
102
102
|
if schedule_type == api_requests.ScheduleType.SHORT:
|
|
103
103
|
executor.check_request_thread_executor_available()
|
|
104
|
-
request_task = executor.
|
|
104
|
+
request_task = await executor.prepare_request_async(
|
|
105
105
|
request_id=request.state.request_id,
|
|
106
106
|
request_name='jobs.logs',
|
|
107
107
|
request_body=jobs_logs_body,
|
|
@@ -141,7 +141,7 @@ async def download_logs(
|
|
|
141
141
|
# We should reuse the original request body, so that the env vars, such as
|
|
142
142
|
# user hash, are kept the same.
|
|
143
143
|
jobs_download_logs_body.local_dir = str(logs_dir_on_api_server)
|
|
144
|
-
executor.
|
|
144
|
+
await executor.schedule_request_async(
|
|
145
145
|
request_id=request.state.request_id,
|
|
146
146
|
request_name='jobs.download_logs',
|
|
147
147
|
request_body=jobs_download_logs_body,
|
|
@@ -155,7 +155,7 @@ async def download_logs(
|
|
|
155
155
|
@router.post('/pool_apply')
|
|
156
156
|
async def pool_apply(request: fastapi.Request,
|
|
157
157
|
jobs_pool_apply_body: payloads.JobsPoolApplyBody) -> None:
|
|
158
|
-
executor.
|
|
158
|
+
await executor.schedule_request_async(
|
|
159
159
|
request_id=request.state.request_id,
|
|
160
160
|
request_name='jobs.pool_apply',
|
|
161
161
|
request_body=jobs_pool_apply_body,
|
|
@@ -168,7 +168,7 @@ async def pool_apply(request: fastapi.Request,
|
|
|
168
168
|
@router.post('/pool_down')
|
|
169
169
|
async def pool_down(request: fastapi.Request,
|
|
170
170
|
jobs_pool_down_body: payloads.JobsPoolDownBody) -> None:
|
|
171
|
-
executor.
|
|
171
|
+
await executor.schedule_request_async(
|
|
172
172
|
request_id=request.state.request_id,
|
|
173
173
|
request_name='jobs.pool_down',
|
|
174
174
|
request_body=jobs_pool_down_body,
|
|
@@ -182,7 +182,7 @@ async def pool_down(request: fastapi.Request,
|
|
|
182
182
|
async def pool_status(
|
|
183
183
|
request: fastapi.Request,
|
|
184
184
|
jobs_pool_status_body: payloads.JobsPoolStatusBody) -> None:
|
|
185
|
-
executor.
|
|
185
|
+
await executor.schedule_request_async(
|
|
186
186
|
request_id=request.state.request_id,
|
|
187
187
|
request_name='jobs.pool_status',
|
|
188
188
|
request_body=jobs_pool_status_body,
|
|
@@ -197,7 +197,7 @@ async def pool_tail_logs(
|
|
|
197
197
|
request: fastapi.Request, log_body: payloads.JobsPoolLogsBody,
|
|
198
198
|
background_tasks: fastapi.BackgroundTasks
|
|
199
199
|
) -> fastapi.responses.StreamingResponse:
|
|
200
|
-
executor.
|
|
200
|
+
await executor.schedule_request_async(
|
|
201
201
|
request_id=request.state.request_id,
|
|
202
202
|
request_name='jobs.pool_logs',
|
|
203
203
|
request_body=log_body,
|
|
@@ -233,7 +233,7 @@ async def pool_download_logs(
|
|
|
233
233
|
# We should reuse the original request body, so that the env vars, such as
|
|
234
234
|
# user hash, are kept the same.
|
|
235
235
|
download_logs_body.local_dir = str(logs_dir_on_api_server)
|
|
236
|
-
executor.
|
|
236
|
+
await executor.schedule_request_async(
|
|
237
237
|
request_id=request.state.request_id,
|
|
238
238
|
request_name='jobs.pool_sync_down_logs',
|
|
239
239
|
request_body=download_logs_body,
|