fractal-server 2.14.0a13__py3-none-any.whl → 2.14.0a15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/linkusergroup.py +6 -2
- fractal_server/app/models/v2/dataset.py +1 -1
- fractal_server/app/models/v2/job.py +7 -3
- fractal_server/app/models/v2/task_group.py +2 -2
- fractal_server/app/models/v2/workflow.py +1 -1
- fractal_server/app/models/v2/workflowtask.py +1 -1
- fractal_server/app/routes/admin/v2/task_group.py +0 -17
- fractal_server/app/routes/api/v2/_aux_functions_history.py +8 -0
- fractal_server/app/routes/api/v2/dataset.py +0 -8
- fractal_server/app/routes/api/v2/history.py +111 -27
- fractal_server/app/routes/api/v2/images.py +16 -14
- fractal_server/app/routes/api/v2/project.py +0 -52
- fractal_server/app/routes/api/v2/task_group.py +0 -17
- fractal_server/app/routes/api/v2/workflow.py +0 -8
- fractal_server/app/routes/auth/group.py +0 -16
- fractal_server/app/runner/executors/base_runner.py +5 -0
- fractal_server/app/runner/executors/local/runner.py +15 -7
- fractal_server/app/runner/executors/slurm_common/_handle_exception_proxy.py +17 -0
- fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +677 -0
- fractal_server/app/runner/executors/slurm_common/slurm_job_task_models.py +102 -0
- fractal_server/app/runner/executors/slurm_ssh/runner.py +110 -648
- fractal_server/app/runner/executors/slurm_sudo/runner.py +32 -661
- fractal_server/app/runner/task_files.py +20 -6
- fractal_server/app/runner/v2/_slurm_ssh.py +6 -6
- fractal_server/app/runner/v2/_slurm_sudo.py +4 -4
- fractal_server/app/runner/v2/runner.py +4 -0
- fractal_server/app/runner/v2/runner_functions.py +2 -2
- fractal_server/app/runner/v2/submit_workflow.py +7 -16
- fractal_server/app/schemas/v2/__init__.py +3 -1
- fractal_server/app/schemas/v2/history.py +27 -2
- fractal_server/config.py +6 -2
- fractal_server/images/tools.py +23 -0
- fractal_server/migrations/versions/5b6007027595_on_cascade.py +250 -0
- fractal_server/migrations/versions/fbce16ff4e47_new_history_items.py +2 -2
- fractal_server/tasks/v2/utils_background.py +0 -19
- {fractal_server-2.14.0a13.dist-info → fractal_server-2.14.0a15.dist-info}/METADATA +1 -1
- {fractal_server-2.14.0a13.dist-info → fractal_server-2.14.0a15.dist-info}/RECORD +41 -42
- fractal_server/app/runner/executors/slurm_common/_check_jobs_status.py +0 -77
- fractal_server/app/runner/executors/slurm_ssh/_check_job_status_ssh.py +0 -67
- fractal_server/app/runner/executors/slurm_ssh/_executor_wait_thread.py +0 -126
- fractal_server/app/runner/executors/slurm_ssh/_slurm_job.py +0 -116
- fractal_server/app/runner/executors/slurm_ssh/executor.py +0 -1386
- {fractal_server-2.14.0a13.dist-info → fractal_server-2.14.0a15.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.0a13.dist-info → fractal_server-2.14.0a15.dist-info}/WHEEL +0 -0
- {fractal_server-2.14.0a13.dist-info → fractal_server-2.14.0a15.dist-info}/entry_points.txt +0 -0
@@ -1,24 +1,24 @@
|
|
1
|
-
fractal_server/__init__.py,sha256=
|
1
|
+
fractal_server/__init__.py,sha256=RTgktqJdR_QVcUPwuSBCYyBhAT7xsBxkixaJXZsTluc,26
|
2
2
|
fractal_server/__main__.py,sha256=rkM8xjY1KeS3l63irB8yCrlVobR-73uDapC4wvrIlxI,6957
|
3
3
|
fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
|
4
4
|
fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
5
|
fractal_server/app/db/__init__.py,sha256=wup2wcOkyOh8Vd0Xm76PZn_naxeMqaL4eF8DHHXTGlI,2889
|
6
6
|
fractal_server/app/history/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
7
7
|
fractal_server/app/models/__init__.py,sha256=xJWiGAwpXmCpnFMC4c_HTqoUCzMOXrakoGLUH_uMvdA,415
|
8
|
-
fractal_server/app/models/linkusergroup.py,sha256=
|
8
|
+
fractal_server/app/models/linkusergroup.py,sha256=3KkkE4QIUAlTrBAZs_tVy0pGvAxUAq6yOEjflct_z2M,678
|
9
9
|
fractal_server/app/models/linkuserproject.py,sha256=hvaxh3Lkiy2uUCwB8gvn8RorCpvxSSdzWdCS_U1GL7g,315
|
10
10
|
fractal_server/app/models/security.py,sha256=mMb_HiwWY74QZrs9xuyno0CVSmk4GYQWk5FxGixr8SU,3860
|
11
11
|
fractal_server/app/models/user_settings.py,sha256=Y-ZV-uZAFLZqXxy8c5_Qeh_F7zQuZDWOgLpU6Zs6iqU,1316
|
12
12
|
fractal_server/app/models/v2/__init__.py,sha256=vjHwek7-IXmaZZL9VF0nD30YL9ca4wNc8P4RXJK_kDc,832
|
13
13
|
fractal_server/app/models/v2/accounting.py,sha256=f2ALxfKKBNxFLJTtC2-YqRepVK253x68y7zkD2V_Nls,1115
|
14
|
-
fractal_server/app/models/v2/dataset.py,sha256=
|
14
|
+
fractal_server/app/models/v2/dataset.py,sha256=Xa3YLmqvSChBJoqlSsjmt-5x0zC-6rSx2eafFnMukfo,1240
|
15
15
|
fractal_server/app/models/v2/history.py,sha256=C0pqn_S5yqc8PjayoyRNcsk5Mt_SxvHitdQznuxJvGM,2044
|
16
|
-
fractal_server/app/models/v2/job.py,sha256=
|
16
|
+
fractal_server/app/models/v2/job.py,sha256=JWrEjX_E4iRFr5MbmtV_aY28J-5D469awLr0rfa5Kig,2052
|
17
17
|
fractal_server/app/models/v2/project.py,sha256=rAHoh5KfYwIaW7rTX0_O0jvWmxEvfo1BafvmcXuSSRk,786
|
18
18
|
fractal_server/app/models/v2/task.py,sha256=8KEROaadgccXRZIP7EriBp2j1FgzYkgiirOi5_fG79M,1494
|
19
|
-
fractal_server/app/models/v2/task_group.py,sha256=
|
20
|
-
fractal_server/app/models/v2/workflow.py,sha256=
|
21
|
-
fractal_server/app/models/v2/workflowtask.py,sha256=
|
19
|
+
fractal_server/app/models/v2/task_group.py,sha256=yIzKAyJIFYYhG_K3AO-WGWYftygrk5D8H_WvAB7QnQk,3541
|
20
|
+
fractal_server/app/models/v2/workflow.py,sha256=tbZZ5IqsKQWiYdURjAS7n2oCpKK_g1ARQN2BOecry3k,1089
|
21
|
+
fractal_server/app/models/v2/workflowtask.py,sha256=zudfD3f4eUXZLq5NRZnIAlWCuAFk6w2E_G-uOcZkMco,1246
|
22
22
|
fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
23
23
|
fractal_server/app/routes/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
24
24
|
fractal_server/app/routes/admin/v2/__init__.py,sha256=_5lqb6-M8-fZqE1HRMep6pAFYRUKMxrvbZOKs-RXWkw,933
|
@@ -27,34 +27,34 @@ fractal_server/app/routes/admin/v2/impersonate.py,sha256=gc4lshfEPFR6W2asH7aKu6h
|
|
27
27
|
fractal_server/app/routes/admin/v2/job.py,sha256=4soc-5d99QEsir7U9AqpofgaGggSBwgMm7mXW5LBvSI,7439
|
28
28
|
fractal_server/app/routes/admin/v2/project.py,sha256=luy-yiGX1JYTdPm1hpIdDUUqPm8xHuipLy9k2X6zu74,1223
|
29
29
|
fractal_server/app/routes/admin/v2/task.py,sha256=QOwgyDU9m7T_wLMwkdgfFaoMjNxcDg6zMVpngxhUvqk,4374
|
30
|
-
fractal_server/app/routes/admin/v2/task_group.py,sha256=
|
30
|
+
fractal_server/app/routes/admin/v2/task_group.py,sha256=LG41hAsllBL6kc-JLxRNG_IrI6frIKrIF3xD0GeeTiI,7173
|
31
31
|
fractal_server/app/routes/admin/v2/task_group_lifecycle.py,sha256=0e0ZJ_k75TVHaT2o8Xk33DPDSgh-eBhZf-y4y7t-Adg,9429
|
32
32
|
fractal_server/app/routes/api/__init__.py,sha256=B8l6PSAhR10iZqHEiyTat-_0tkeKdrCigIE6DJGP5b8,638
|
33
33
|
fractal_server/app/routes/api/v2/__init__.py,sha256=9o9zxTU2IJrC_JQ8GUMft3niiBZ39YLvODUeraiRRdQ,2465
|
34
34
|
fractal_server/app/routes/api/v2/_aux_functions.py,sha256=eE-TdEMI_UX3LBDUGwjG5NyUcihDVaHYlG15NlTJ9DI,12872
|
35
|
-
fractal_server/app/routes/api/v2/_aux_functions_history.py,sha256=
|
35
|
+
fractal_server/app/routes/api/v2/_aux_functions_history.py,sha256=ZlI6nwzB5r9AiY0C8TzJS_EQOTPKgkRYl3GpxFAu2bg,4430
|
36
36
|
fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=qdXCb6IP8-qPEAxGZKljtjIqNzIAyRaAsQSRi5VqFHM,6773
|
37
37
|
fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=uhNSs-jcS7ndIUFKiOC1yrDiViw3uvKEXi9UL04BMks,11642
|
38
|
-
fractal_server/app/routes/api/v2/dataset.py,sha256=
|
39
|
-
fractal_server/app/routes/api/v2/history.py,sha256=
|
40
|
-
fractal_server/app/routes/api/v2/images.py,sha256=
|
38
|
+
fractal_server/app/routes/api/v2/dataset.py,sha256=h5AhE0sdhQ20ZlIbEJsFnHIOUW0S1VHFpoflpBkVScs,8936
|
39
|
+
fractal_server/app/routes/api/v2/history.py,sha256=BuXVCDUCIYG5YF49D4b_CsqQrx6uFH6WB4TgkrdS2zo,15926
|
40
|
+
fractal_server/app/routes/api/v2/images.py,sha256=BGpO94gVd8BTpCN6Mun2RXmjrPmfkIp73m8RN7uiGW4,8361
|
41
41
|
fractal_server/app/routes/api/v2/job.py,sha256=MU1sHIKk_89WrD0TD44d4ufzqnywot7On_W71KjyUbQ,6500
|
42
|
-
fractal_server/app/routes/api/v2/project.py,sha256=
|
42
|
+
fractal_server/app/routes/api/v2/project.py,sha256=uAZgATiHcOvbnRX-vv1D3HoaEUvLUd7vzVmGcqOP8ZY,4602
|
43
43
|
fractal_server/app/routes/api/v2/status_legacy.py,sha256=Q5ZWQNfeZKL8Xgtou2Xr80iaF1uO-r4oSKgq5H42V_8,6349
|
44
44
|
fractal_server/app/routes/api/v2/submit.py,sha256=hCwwC6bXP7EyhgGyVLv1ClybRH1YytDVoPunOzpsf0s,8822
|
45
45
|
fractal_server/app/routes/api/v2/task.py,sha256=O7pquZhXIS4lRs5XqHvstiwe8BiCuS-B3ZKJI1g6EJU,6985
|
46
46
|
fractal_server/app/routes/api/v2/task_collection.py,sha256=IDNF6sjDuU37HIQ0TuQA-TZIuf7nfHAQXUUNmkrlhLM,12706
|
47
47
|
fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=cctW61-C2QYF2KXluS15lLhZJS_kt30Ca6UGLFO32z0,6207
|
48
|
-
fractal_server/app/routes/api/v2/task_group.py,sha256=
|
48
|
+
fractal_server/app/routes/api/v2/task_group.py,sha256=62zcVTdheXM5V3WmFuqisIqgETjXmZaRpNMcDX5bXS0,7408
|
49
49
|
fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=3o9bCC8ubMwffQPPaxQZy-CjH9IB2RkIReIecI6L2_w,9300
|
50
50
|
fractal_server/app/routes/api/v2/verify_image_types.py,sha256=IOB96X3_FYBd9L_QiyVSEoV13ZP7YGS4WlBIDA1Op4I,1979
|
51
|
-
fractal_server/app/routes/api/v2/workflow.py,sha256=
|
51
|
+
fractal_server/app/routes/api/v2/workflow.py,sha256=sW6Nm7dfzUY354hawyEkpQHy7rUvV2FCV8DPorH-TDU,10270
|
52
52
|
fractal_server/app/routes/api/v2/workflow_import.py,sha256=INmnhlMEBJp-vHPR0f940DANPmIidts3OfcooeM_aNA,11205
|
53
53
|
fractal_server/app/routes/api/v2/workflowtask.py,sha256=7_syX2EO7ibF6Xkm7HBPhsUYq6aYnKNeC5iSaafQhG4,11342
|
54
54
|
fractal_server/app/routes/auth/__init__.py,sha256=fao6CS0WiAjHDTvBzgBVV_bSXFpEAeDBF6Z6q7rRkPc,1658
|
55
55
|
fractal_server/app/routes/auth/_aux_auth.py,sha256=UZgauY0V6mSqjte_sYI1cBl2h8bcbLaeWzgpl1jdJlk,4883
|
56
56
|
fractal_server/app/routes/auth/current_user.py,sha256=FUegTahlxT3BdPVCQYir0-ogg2YAaZ1DYuLcE_5NC9Y,5906
|
57
|
-
fractal_server/app/routes/auth/group.py,sha256=
|
57
|
+
fractal_server/app/routes/auth/group.py,sha256=P6naOD11Jud2IbdXKpHVjrdRo3IKAqnyXspT8k49k_w,7893
|
58
58
|
fractal_server/app/routes/auth/login.py,sha256=tSu6OBLOieoBtMZB4JkBAdEgH2Y8KqPGSbwy7NIypIo,566
|
59
59
|
fractal_server/app/routes/auth/oauth.py,sha256=KCtJHSzemC4S8AfX9bLLdVhlF1nU4DOyox-sNQtcWew,1978
|
60
60
|
fractal_server/app/routes/auth/register.py,sha256=DlHq79iOvGd_gt2v9uwtsqIKeO6i_GKaW59VIkllPqY,587
|
@@ -70,44 +70,42 @@ fractal_server/app/runner/components.py,sha256=-Ii5l8d_V6f5DFOd-Zsr8VYmOsyqw0Hox
|
|
70
70
|
fractal_server/app/runner/compress_folder.py,sha256=HSc1tv7x2DBjBoXwugZlC79rm9GNBIWtQKK9yWn5ZBI,3991
|
71
71
|
fractal_server/app/runner/exceptions.py,sha256=JC5ufHyeA1hYD_rkZUscI30DD8D903ncag7Z3AArmUY,4215
|
72
72
|
fractal_server/app/runner/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
73
|
-
fractal_server/app/runner/executors/base_runner.py,sha256=
|
73
|
+
fractal_server/app/runner/executors/base_runner.py,sha256=s5aZLDPzC565FadaqFxrCLIlQzBn2D9iOpEjnBZROkk,5541
|
74
74
|
fractal_server/app/runner/executors/local/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
75
75
|
fractal_server/app/runner/executors/local/get_local_config.py,sha256=wbrIYuGOvABOStrE7jNrC4ULPhtBQ5Q7Y3aKm_icomg,3508
|
76
|
-
fractal_server/app/runner/executors/local/runner.py,sha256=
|
76
|
+
fractal_server/app/runner/executors/local/runner.py,sha256=o_s8DIwI0UlBQ8NozPLG9VCtvBkhlCkC7um7yAei2j4,7936
|
77
77
|
fractal_server/app/runner/executors/slurm_common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
78
78
|
fractal_server/app/runner/executors/slurm_common/_batching.py,sha256=ZY020JZlDS5mfpgpWTChQkyHU7iLE5kx2HVd57_C6XA,8850
|
79
|
-
fractal_server/app/runner/executors/slurm_common/
|
79
|
+
fractal_server/app/runner/executors/slurm_common/_handle_exception_proxy.py,sha256=jU2N4vMafdcDPqVXwSApu4zxskCqhHmsXF3hBpOAAFA,577
|
80
80
|
fractal_server/app/runner/executors/slurm_common/_job_states.py,sha256=nuV-Zba38kDrRESOVB3gaGbrSPZc4q7YGichQaeqTW0,238
|
81
81
|
fractal_server/app/runner/executors/slurm_common/_slurm_config.py,sha256=fZaFUUXqDH0p3DndCFUpFqTqyD2tMVCuSYgYLAycpVw,15897
|
82
|
+
fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py,sha256=y9FDJtgqLT8BA6vaxdpM-49bzNMOeTdul4zZJkv288A,25832
|
82
83
|
fractal_server/app/runner/executors/slurm_common/get_slurm_config.py,sha256=-fAX1DZMB5RZnyYanIJD72mWOJAPkh21jd4loDXKJw4,5994
|
83
84
|
fractal_server/app/runner/executors/slurm_common/remote.py,sha256=iXLu4d-bWzn7qmDaOjKFkcuaSHLjPESAMSLcg6c99fc,5852
|
85
|
+
fractal_server/app/runner/executors/slurm_common/slurm_job_task_models.py,sha256=YGgzTspkK9ItSMzwuYv_1tY7_1g89Qpeny5Auinxk1E,2708
|
84
86
|
fractal_server/app/runner/executors/slurm_common/utils_executors.py,sha256=naPyJI0I3lD-sYHbSXbMFGUBK4h_SggA5V91Z1Ch1Xg,1416
|
85
87
|
fractal_server/app/runner/executors/slurm_ssh/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
86
|
-
fractal_server/app/runner/executors/slurm_ssh/
|
87
|
-
fractal_server/app/runner/executors/slurm_ssh/_executor_wait_thread.py,sha256=lnW8dNNPqqbpQvojVBQaNJm4wN3Qkw02RWBZ1w68Hyw,3755
|
88
|
-
fractal_server/app/runner/executors/slurm_ssh/_slurm_job.py,sha256=IL1C52dezEiincVX2yKryNiPHi4YOMURNLdQO_QPdGw,4406
|
89
|
-
fractal_server/app/runner/executors/slurm_ssh/executor.py,sha256=StEX6vN9jY79nTxqRDb5OEhkTVd3jYhT4X0_luZSqd4,53678
|
90
|
-
fractal_server/app/runner/executors/slurm_ssh/runner.py,sha256=LON7H3RGPYwqmDj0gEcPg7CtBGQzClU8o0fubsXk-Rw,24482
|
88
|
+
fractal_server/app/runner/executors/slurm_ssh/runner.py,sha256=n9DAn2Rn_4gUphJCJk4CaQH7WQP4nBNZxqF9dj0H5cw,5768
|
91
89
|
fractal_server/app/runner/executors/slurm_sudo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
92
90
|
fractal_server/app/runner/executors/slurm_sudo/_subprocess_run_as_user.py,sha256=O1bNg1DiSDJmQE0RmOk2Ii47DagiXp5ryd0R6KxO2OM,3177
|
93
|
-
fractal_server/app/runner/executors/slurm_sudo/runner.py,sha256=
|
91
|
+
fractal_server/app/runner/executors/slurm_sudo/runner.py,sha256=1NoXMQH-JAzBHGmzlUdu6P1gLGJE5y17U3yLxJOyyHE,5045
|
94
92
|
fractal_server/app/runner/extract_archive.py,sha256=tLpjDrX47OjTNhhoWvm6iNukg8KoieWyTb7ZfvE9eWU,2483
|
95
93
|
fractal_server/app/runner/filenames.py,sha256=lPnxKHtdRizr6FqG3zOdjDPyWA7GoaJGTtiuJV0gA8E,70
|
96
94
|
fractal_server/app/runner/run_subprocess.py,sha256=c3JbYXq3hX2aaflQU19qJ5Xs6J6oXGNvnTEoAfv2bxc,959
|
97
95
|
fractal_server/app/runner/set_start_and_last_task_index.py,sha256=-q4zVybAj8ek2XlbENKlfOAJ39hT_zoJoZkqzDqiAMY,1254
|
98
96
|
fractal_server/app/runner/shutdown.py,sha256=9pfSKHDNdIcm0eY-opgRTi7y0HmvfPmYiu9JR6Idark,2082
|
99
|
-
fractal_server/app/runner/task_files.py,sha256=
|
97
|
+
fractal_server/app/runner/task_files.py,sha256=kL2ymvaCq_TIhBrSk4VqlRJcSkTkzv-cb8FYYiRmPfs,3075
|
100
98
|
fractal_server/app/runner/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
101
99
|
fractal_server/app/runner/v2/_local.py,sha256=DK8yagbvd6HHjcDVhUzTy0f7MURlTkQha-NM6OZKgJc,3044
|
102
|
-
fractal_server/app/runner/v2/_slurm_ssh.py,sha256=
|
103
|
-
fractal_server/app/runner/v2/_slurm_sudo.py,sha256=
|
100
|
+
fractal_server/app/runner/v2/_slurm_ssh.py,sha256=_bytOf8z9sdrhI03D6eqg-aQPnJ7V2-qnqpcHAYizns,3278
|
101
|
+
fractal_server/app/runner/v2/_slurm_sudo.py,sha256=DBCNxifXmMkpu71Wnk5u9-wKT7PV1WROQuY_4DYoZRI,2993
|
104
102
|
fractal_server/app/runner/v2/db_tools.py,sha256=jj5Jlm5xPXIxpxbTyVP9QJ_8wxkzdWFU9DzgLW-8fhs,2420
|
105
103
|
fractal_server/app/runner/v2/deduplicate_list.py,sha256=IVTE4abBU1bUprFTkxrTfYKnvkNTanWQ-KWh_etiT08,645
|
106
104
|
fractal_server/app/runner/v2/merge_outputs.py,sha256=D1L4Taieq9i71SPQyNc1kMokgHh-sV_MqF3bv7QMDBc,907
|
107
|
-
fractal_server/app/runner/v2/runner.py,sha256=
|
108
|
-
fractal_server/app/runner/v2/runner_functions.py,sha256=
|
105
|
+
fractal_server/app/runner/v2/runner.py,sha256=SsKEZAsB8sPV8W3khTkAqaGdDwoTm_lav-fx6DdCwyA,15294
|
106
|
+
fractal_server/app/runner/v2/runner_functions.py,sha256=GRx1oZzW1KCWq8WS57OY2XFI8cDqBk6Bv-CEeWqIaHQ,16467
|
109
107
|
fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=9t1CHN3EyfsGRWfG257YPY5WjQ6zuztsw_KZrpEAFPo,3703
|
110
|
-
fractal_server/app/runner/v2/submit_workflow.py,sha256=
|
108
|
+
fractal_server/app/runner/v2/submit_workflow.py,sha256=EDUyUuIPwZHb2zm7SCRRoFsGq2cN-b5OKw6CYkZ8kWk,13048
|
111
109
|
fractal_server/app/runner/v2/task_interface.py,sha256=IXdQTI8rXFgXv1Ez0js4CjKFf3QwO2GCHRTuwiFtiTQ,2891
|
112
110
|
fractal_server/app/runner/versions.py,sha256=dSaPRWqmFPHjg20kTCHmi_dmGNcCETflDtDLronNanU,852
|
113
111
|
fractal_server/app/schemas/__init__.py,sha256=stURAU_t3AOBaH0HSUbV-GKhlPKngnnIMoqWc3orFyI,135
|
@@ -116,11 +114,11 @@ fractal_server/app/schemas/_validators.py,sha256=ZzTlTTzRATzf9Snx4Xp67aDmG77GaM2
|
|
116
114
|
fractal_server/app/schemas/user.py,sha256=oCftAKeHdFFowpLyh1G-RiylR8cIO7fTn0PkT5xjs0E,2494
|
117
115
|
fractal_server/app/schemas/user_group.py,sha256=Uao1igRYflBu7Dg6Zs0kaFU3zBFJzIwDLrkFfaJk6z8,2176
|
118
116
|
fractal_server/app/schemas/user_settings.py,sha256=z7hx54yTrWfjo98oX_1lkeRh1UGrC1dSRH6yIOpnCsY,2772
|
119
|
-
fractal_server/app/schemas/v2/__init__.py,sha256=
|
117
|
+
fractal_server/app/schemas/v2/__init__.py,sha256=wXS4ZEzobWx5dh-XLjMZWpd-JMwWFPODUeUVoMQGRv4,3099
|
120
118
|
fractal_server/app/schemas/v2/accounting.py,sha256=Wylt7uWTiDIFlHJOh4XEtYitk2FjFlmnodDrJDxcr0E,397
|
121
119
|
fractal_server/app/schemas/v2/dataset.py,sha256=xNWdOW8hhL5Wx-iwyUPrZfWcC8fFuGDgdOHvZLbGVME,2782
|
122
120
|
fractal_server/app/schemas/v2/dumps.py,sha256=uc9itXekO5IFfR6UucpQ5BX9NZZ8erE4hRR6S6aXlOc,2284
|
123
|
-
fractal_server/app/schemas/v2/history.py,sha256=
|
121
|
+
fractal_server/app/schemas/v2/history.py,sha256=EhfTBYNVVWeWTj5QFrnuaD4zB73fThK1lyhxB2OBFoc,1659
|
124
122
|
fractal_server/app/schemas/v2/job.py,sha256=OXPB4oPiMVWYgZu0lGzM_LGACvhWBavsW7c3MmivdDM,4556
|
125
123
|
fractal_server/app/schemas/v2/manifest.py,sha256=8mmB0QwxEgAeGgwKD_fT-o-wFy7lb6HxNXbp17IJqNY,7281
|
126
124
|
fractal_server/app/schemas/v2/project.py,sha256=ulgCmUnX0w-0jrSjVYIT7sxeK95CSNGh2msXydhsgYI,885
|
@@ -133,13 +131,13 @@ fractal_server/app/schemas/v2/workflowtask.py,sha256=rVbmNihDAJL_Sckbt1hBK2JEcb-
|
|
133
131
|
fractal_server/app/security/__init__.py,sha256=e2cveg5hQpieGD3bSPd5GTOMthvJ-HXH3buSb9WVfEU,14096
|
134
132
|
fractal_server/app/security/signup_email.py,sha256=Xd6QYxcdmg0PHpDwmUE8XQmPcOj3Xjy5oROcIMhmltM,1472
|
135
133
|
fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
|
136
|
-
fractal_server/config.py,sha256=
|
134
|
+
fractal_server/config.py,sha256=RQmM9IGlT3K7jpWolwVPVe9KoDgFY3abk9IjEmmtvhs,28570
|
137
135
|
fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
|
138
136
|
fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
|
139
137
|
fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
|
140
138
|
fractal_server/images/__init__.py,sha256=-_wjoKtSX02P1KjDxDP_EXKvmbONTRmbf7iGVTsyBpM,154
|
141
139
|
fractal_server/images/models.py,sha256=jdGKMPi8WlO9Kvns4grIOU5LjujnvwIGjMFMC0wNy08,3501
|
142
|
-
fractal_server/images/tools.py,sha256
|
140
|
+
fractal_server/images/tools.py,sha256=-zFDzRv6cbbRo21OrD0eZY5qWcoMX8dxgEnfyI3tOcg,4140
|
143
141
|
fractal_server/logger.py,sha256=5Z3rfsFwl8UysVljTOaaIvt8Pyp6CVH492ez3jE8WAw,5113
|
144
142
|
fractal_server/main.py,sha256=FD9KzTTsXTQnTW0z3Hu7y0Nj_oAkBeZEInKDXFd4hjE,4561
|
145
143
|
fractal_server/migrations/env.py,sha256=nfyBpMIOT3kny6t-b-tUjyRjZ4k906bb1_wCQ7me1BI,1353
|
@@ -154,6 +152,7 @@ fractal_server/migrations/versions/4c308bcaea2b_add_task_args_schema_and_task_ar
|
|
154
152
|
fractal_server/migrations/versions/4cedeb448a53_workflowtask_foreign_keys_not_nullables.py,sha256=Mob8McGYAcmgvrseyyYOa54E6Gsgr-4SiGdC-r9O4_A,1157
|
155
153
|
fractal_server/migrations/versions/501961cfcd85_remove_link_between_v1_and_v2_tasks_.py,sha256=5ROUgcoZOdjf8kMt6cxuvPhzHmV6xaCxvZEbhUEyZM4,3271
|
156
154
|
fractal_server/migrations/versions/50a13d6138fd_initial_schema.py,sha256=zwXegXs9J40eyCWi3w0c_iIBVJjXNn4VdVnQaT3KxDg,8770
|
155
|
+
fractal_server/migrations/versions/5b6007027595_on_cascade.py,sha256=44EmzOkk5-FJwtAy4TQuj7EctKwz6ZERkQjh_ljdDJc,7926
|
157
156
|
fractal_server/migrations/versions/5bf02391cfef_v2.py,sha256=axhNkr_H6R4rRbY7oGYazNbFvPXeSyBDWFVbKNmiqs8,8433
|
158
157
|
fractal_server/migrations/versions/70e77f1c38b0_add_applyworkflow_first_task_index_and_.py,sha256=Q-DsMzG3IcUV2Ol1dhJWosDvKERamBE6QvA2zzS5zpQ,1632
|
159
158
|
fractal_server/migrations/versions/71eefd1dd202_add_slurm_accounts.py,sha256=mbWuCkTpRAdGbRhW7lhXs_e5S6O37UAcCN6JfoY5H8A,1353
|
@@ -176,7 +175,7 @@ fractal_server/migrations/versions/e75cac726012_make_applyworkflow_start_timesta
|
|
176
175
|
fractal_server/migrations/versions/e81103413827_add_job_type_filters.py,sha256=t4ImlKNHx5JMgBL2sTpLWunv1gwY8OCFOKd3G338mdE,890
|
177
176
|
fractal_server/migrations/versions/efa89c30e0a4_add_project_timestamp_created.py,sha256=jilQW3QIqYQ4Q6hCnUiG7UtNMpA41ujqrB3tPFiPM1Q,1221
|
178
177
|
fractal_server/migrations/versions/f384e1c0cf5d_drop_task_default_args_columns.py,sha256=9BwqUS9Gf7UW_KjrzHbtViC880qhD452KAytkHWWZyk,746
|
179
|
-
fractal_server/migrations/versions/fbce16ff4e47_new_history_items.py,sha256=
|
178
|
+
fractal_server/migrations/versions/fbce16ff4e47_new_history_items.py,sha256=8Wh-t2-1J4pABxjfT2NYHQzpZkNbgNqHfUHwQ6yB1us,3950
|
180
179
|
fractal_server/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
181
180
|
fractal_server/ssh/__init__.py,sha256=sVUmzxf7_DuXG1xoLQ1_00fo5NPhi2LJipSmU5EAkPs,124
|
182
181
|
fractal_server/ssh/_fabric.py,sha256=lNy4IX1I4We6VoWa4Bz4fUPuApLMSoejpyE6I3jDZeM,22869
|
@@ -201,7 +200,7 @@ fractal_server/tasks/v2/templates/3_pip_freeze.sh,sha256=JldREScEBI4cD_qjfX4UK7V
|
|
201
200
|
fractal_server/tasks/v2/templates/4_pip_show.sh,sha256=qm1vPy6AkKhWDjCJGXS8LqCLYO3KsAyRK325ZsFcF6U,1747
|
202
201
|
fractal_server/tasks/v2/templates/5_get_venv_size_and_file_number.sh,sha256=q-6ZUvA6w6FDVEoSd9O63LaJ9tKZc7qAFH72SGPrd_k,284
|
203
202
|
fractal_server/tasks/v2/templates/6_pip_install_from_freeze.sh,sha256=A2y8RngEjAcRhG-_owA6P7tAdrS_AszFuGXnaeMV8u0,1122
|
204
|
-
fractal_server/tasks/v2/utils_background.py,sha256=
|
203
|
+
fractal_server/tasks/v2/utils_background.py,sha256=FKJvtz1YuFLXV1TuoSeebWA0uDWsQIUxpFqWtKSJigc,3423
|
205
204
|
fractal_server/tasks/v2/utils_database.py,sha256=h4Pa-JxcVk7WA4_Pz8CxFT9sX3sA43p2rVRg7FVSgvY,967
|
206
205
|
fractal_server/tasks/v2/utils_package_names.py,sha256=RDg__xrvQs4ieeVzmVdMcEh95vGQYrv9Hfal-5EDBM8,2393
|
207
206
|
fractal_server/tasks/v2/utils_python_interpreter.py,sha256=5_wrlrTqXyo1YuLZvAW9hrSoh5MyLOzdPVUlUwM7uDQ,955
|
@@ -209,8 +208,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=Kc_nSzdlV6KIsO0CQSPs1w70zLyENP
|
|
209
208
|
fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
|
210
209
|
fractal_server/utils.py,sha256=PMwrxWFxRTQRl1b9h-NRIbFGPKqpH_hXnkAT3NfZdpY,3571
|
211
210
|
fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
|
212
|
-
fractal_server-2.14.
|
213
|
-
fractal_server-2.14.
|
214
|
-
fractal_server-2.14.
|
215
|
-
fractal_server-2.14.
|
216
|
-
fractal_server-2.14.
|
211
|
+
fractal_server-2.14.0a15.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
|
212
|
+
fractal_server-2.14.0a15.dist-info/METADATA,sha256=HRQC8Dsp32xLGZ_ThsguhcO2XvKgLuHwo0ZDUEGBLu8,4563
|
213
|
+
fractal_server-2.14.0a15.dist-info/WHEEL,sha256=7dDg4QLnNKTvwIDR9Ac8jJaAmBC_owJrckbC0jjThyA,88
|
214
|
+
fractal_server-2.14.0a15.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
|
215
|
+
fractal_server-2.14.0a15.dist-info/RECORD,,
|
@@ -1,77 +0,0 @@
|
|
1
|
-
import subprocess # nosec
|
2
|
-
|
3
|
-
from fractal_server.app.runner.executors.slurm_common._job_states import (
|
4
|
-
STATES_FINISHED,
|
5
|
-
)
|
6
|
-
from fractal_server.logger import set_logger
|
7
|
-
|
8
|
-
|
9
|
-
logger = set_logger(__name__)
|
10
|
-
|
11
|
-
|
12
|
-
def run_squeue(job_ids: list[str]) -> subprocess.CompletedProcess:
|
13
|
-
res = subprocess.run( # nosec
|
14
|
-
[
|
15
|
-
"squeue",
|
16
|
-
"--noheader",
|
17
|
-
"--format='%i %T'",
|
18
|
-
"--jobs",
|
19
|
-
",".join([str(j) for j in job_ids]),
|
20
|
-
"--states=all",
|
21
|
-
],
|
22
|
-
capture_output=True,
|
23
|
-
encoding="utf-8",
|
24
|
-
check=False,
|
25
|
-
)
|
26
|
-
if res.returncode != 0:
|
27
|
-
logger.warning(
|
28
|
-
f"squeue command with {job_ids} failed with:"
|
29
|
-
f"\n{res.stderr=}\n{res.stdout=}"
|
30
|
-
)
|
31
|
-
|
32
|
-
return res
|
33
|
-
|
34
|
-
|
35
|
-
def are_all_jobs_on_squeue(job_ids: list[str]) -> bool:
|
36
|
-
pass
|
37
|
-
|
38
|
-
|
39
|
-
def get_finished_jobs(job_ids: list[str]) -> set[str]:
|
40
|
-
"""
|
41
|
-
Check which ones of the given Slurm jobs already finished
|
42
|
-
|
43
|
-
The function is based on the `_jobs_finished` function from
|
44
|
-
clusterfutures (version 0.5).
|
45
|
-
Original Copyright: 2022 Adrian Sampson
|
46
|
-
(released under the MIT licence)
|
47
|
-
"""
|
48
|
-
|
49
|
-
# If there is no Slurm job to check, return right away
|
50
|
-
if not job_ids:
|
51
|
-
return set()
|
52
|
-
id_to_state = dict()
|
53
|
-
|
54
|
-
res = run_squeue(job_ids)
|
55
|
-
if res.returncode == 0:
|
56
|
-
id_to_state = {
|
57
|
-
out.split()[0]: out.split()[1] for out in res.stdout.splitlines()
|
58
|
-
}
|
59
|
-
else:
|
60
|
-
id_to_state = dict()
|
61
|
-
for j in job_ids:
|
62
|
-
res = run_squeue([j])
|
63
|
-
if res.returncode != 0:
|
64
|
-
logger.info(f"Job {j} not found. Marked it as completed")
|
65
|
-
id_to_state.update({str(j): "COMPLETED"})
|
66
|
-
else:
|
67
|
-
id_to_state.update(
|
68
|
-
{res.stdout.split()[0]: res.stdout.split()[1]}
|
69
|
-
)
|
70
|
-
|
71
|
-
# Finished jobs only stay in squeue for a few mins (configurable). If
|
72
|
-
# a job ID isn't there, we'll assume it's finished.
|
73
|
-
return {
|
74
|
-
j
|
75
|
-
for j in job_ids
|
76
|
-
if id_to_state.get(j, "COMPLETED") in STATES_FINISHED
|
77
|
-
}
|
@@ -1,67 +0,0 @@
|
|
1
|
-
from fractal_server.app.runner.executors.slurm_common._job_states import (
|
2
|
-
STATES_FINISHED,
|
3
|
-
)
|
4
|
-
from fractal_server.logger import set_logger
|
5
|
-
from fractal_server.ssh._fabric import FractalSSH
|
6
|
-
|
7
|
-
logger = set_logger(__name__)
|
8
|
-
|
9
|
-
|
10
|
-
def run_squeue(
|
11
|
-
*,
|
12
|
-
job_ids: list[str],
|
13
|
-
fractal_ssh: FractalSSH,
|
14
|
-
) -> str:
|
15
|
-
job_id_single_str = ",".join([str(j) for j in job_ids])
|
16
|
-
cmd = (
|
17
|
-
f"squeue --noheader --format='%i %T' --jobs {job_id_single_str}"
|
18
|
-
" --states=all"
|
19
|
-
)
|
20
|
-
stdout = fractal_ssh.run_command(cmd)
|
21
|
-
return stdout
|
22
|
-
|
23
|
-
|
24
|
-
def get_finished_jobs_ssh(
|
25
|
-
*,
|
26
|
-
fractal_ssh: FractalSSH,
|
27
|
-
job_ids: list[str],
|
28
|
-
) -> set[str]:
|
29
|
-
"""
|
30
|
-
# FIXME: make uniform with non-ssh one
|
31
|
-
|
32
|
-
Check which ones of the given Slurm jobs already finished
|
33
|
-
|
34
|
-
The function is based on the `_jobs_finished` function from
|
35
|
-
clusterfutures (version 0.5).
|
36
|
-
Original Copyright: 2022 Adrian Sampson
|
37
|
-
(released under the MIT licence)
|
38
|
-
"""
|
39
|
-
|
40
|
-
# If there is no Slurm job to check, return right away
|
41
|
-
if not job_ids:
|
42
|
-
return set()
|
43
|
-
|
44
|
-
id_to_state = dict()
|
45
|
-
|
46
|
-
try:
|
47
|
-
stdout = run_squeue(job_ids=job_ids, fractal_ssh=fractal_ssh)
|
48
|
-
id_to_state = {
|
49
|
-
line.split()[0]: line.split()[1] for line in stdout.splitlines()
|
50
|
-
}
|
51
|
-
except Exception: # FIXME
|
52
|
-
id_to_state = dict()
|
53
|
-
for j in job_ids:
|
54
|
-
try:
|
55
|
-
stdout = run_squeue([j])
|
56
|
-
id_to_state.update({stdout.split()[0]: stdout.split()[1]})
|
57
|
-
except Exception:
|
58
|
-
logger.info(f"Job {j} not found. Marked it as completed")
|
59
|
-
id_to_state.update({str(j): "COMPLETED"})
|
60
|
-
|
61
|
-
# Finished jobs only stay in squeue for a few mins (configurable). If
|
62
|
-
# a job ID isn't there, we'll assume it's finished.
|
63
|
-
return {
|
64
|
-
j
|
65
|
-
for j in job_ids
|
66
|
-
if id_to_state.get(j, "COMPLETED") in STATES_FINISHED
|
67
|
-
}
|
@@ -1,126 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
import threading
|
3
|
-
import time
|
4
|
-
import traceback
|
5
|
-
from itertools import count
|
6
|
-
|
7
|
-
from fractal_server.app.runner.exceptions import JobExecutionError
|
8
|
-
from fractal_server.logger import set_logger
|
9
|
-
|
10
|
-
logger = set_logger(__name__)
|
11
|
-
|
12
|
-
|
13
|
-
class FractalSlurmSSHWaitThread(threading.Thread):
|
14
|
-
"""
|
15
|
-
Thread that monitors a pool of SLURM jobs
|
16
|
-
|
17
|
-
This class is a custom re-implementation of the waiting thread class from:
|
18
|
-
|
19
|
-
> clusterfutures <https://github.com/sampsyo/clusterfutures>
|
20
|
-
> Original Copyright
|
21
|
-
> Copyright 2021 Adrian Sampson <asampson@cs.washington.edu>
|
22
|
-
> License: MIT
|
23
|
-
|
24
|
-
Attributes:
|
25
|
-
shutdown_file:
|
26
|
-
shutdown_callback:
|
27
|
-
slurm_poll_interval:
|
28
|
-
jobs_finished_callback:
|
29
|
-
active_job_ids:
|
30
|
-
shutdown:
|
31
|
-
lock:
|
32
|
-
"""
|
33
|
-
|
34
|
-
shutdown_file: str
|
35
|
-
shutdown_callback: callable
|
36
|
-
slurm_poll_interval = 30
|
37
|
-
jobs_finished_callback: callable
|
38
|
-
active_job_ids: list[str]
|
39
|
-
shutdown: bool
|
40
|
-
_lock: threading.Lock
|
41
|
-
|
42
|
-
def __init__(self, callback: callable, interval=1):
|
43
|
-
"""
|
44
|
-
Init method
|
45
|
-
|
46
|
-
This method is executed on the main thread.
|
47
|
-
"""
|
48
|
-
threading.Thread.__init__(self, daemon=True)
|
49
|
-
self.callback = callback
|
50
|
-
self.interval = interval
|
51
|
-
self._lock = threading.Lock()
|
52
|
-
self.shutdown = False
|
53
|
-
self.active_job_ids = []
|
54
|
-
|
55
|
-
def wait(self, *, job_id: str):
|
56
|
-
"""
|
57
|
-
Add a a new job to the set of jobs being waited for.
|
58
|
-
|
59
|
-
This method is executed on the main thread.
|
60
|
-
"""
|
61
|
-
if self.shutdown:
|
62
|
-
error_msg = "Cannot call `wait` method after executor shutdown."
|
63
|
-
logger.warning(error_msg)
|
64
|
-
raise JobExecutionError(info=error_msg)
|
65
|
-
with self._lock:
|
66
|
-
self.active_job_ids.append(job_id)
|
67
|
-
|
68
|
-
def check_shutdown(self):
|
69
|
-
"""
|
70
|
-
Check whether the shutdown file exists
|
71
|
-
|
72
|
-
This method is executed on the waiting thread.
|
73
|
-
"""
|
74
|
-
if os.path.exists(self.shutdown_file):
|
75
|
-
logger.info(
|
76
|
-
f"Detected executor-shutdown file {self.shutdown_file}"
|
77
|
-
)
|
78
|
-
self.shutdown = True
|
79
|
-
|
80
|
-
def check_jobs(self):
|
81
|
-
"""
|
82
|
-
Check whether some jobs are over, and call callback.
|
83
|
-
|
84
|
-
This method is executed on the waiting thread.
|
85
|
-
"""
|
86
|
-
try:
|
87
|
-
if self.active_job_ids == []:
|
88
|
-
return
|
89
|
-
finished_jobs = self.jobs_finished_callback(self.active_job_ids)
|
90
|
-
if finished_jobs == set(self.active_job_ids):
|
91
|
-
self.callback(self.active_job_ids)
|
92
|
-
self.active_job_ids = []
|
93
|
-
|
94
|
-
except Exception:
|
95
|
-
# If anything goes wrong, print an exception without re-raising
|
96
|
-
traceback.print_exc()
|
97
|
-
|
98
|
-
def run(self):
|
99
|
-
"""
|
100
|
-
Run forever (until a shutdown takes place) and trigger callback
|
101
|
-
|
102
|
-
This method is executed on the waiting thread.
|
103
|
-
|
104
|
-
Note that `shutdown_callback` only takes care of cleaning up the
|
105
|
-
FractalSlurmExecutor variables, and then the `return` here is enough
|
106
|
-
to fully clean up the `FractalFileWaitThread` object.
|
107
|
-
"""
|
108
|
-
|
109
|
-
# FIXME SSH: are those try/except below needed?
|
110
|
-
|
111
|
-
skip = max(self.slurm_poll_interval // self.interval, 1)
|
112
|
-
for ind in count():
|
113
|
-
self.check_shutdown()
|
114
|
-
if self.shutdown:
|
115
|
-
try:
|
116
|
-
self.shutdown_callback()
|
117
|
-
except Exception: # nosec
|
118
|
-
pass
|
119
|
-
return
|
120
|
-
if ind % skip == 0:
|
121
|
-
with self._lock:
|
122
|
-
try:
|
123
|
-
self.check_jobs()
|
124
|
-
except Exception: # nosec
|
125
|
-
pass
|
126
|
-
time.sleep(self.interval)
|
@@ -1,116 +0,0 @@
|
|
1
|
-
import uuid
|
2
|
-
from pathlib import Path
|
3
|
-
from typing import Optional
|
4
|
-
|
5
|
-
from fractal_server.app.runner.executors.slurm_common._slurm_config import (
|
6
|
-
SlurmConfig,
|
7
|
-
)
|
8
|
-
|
9
|
-
|
10
|
-
class SlurmJob:
|
11
|
-
"""
|
12
|
-
Collect information related to a FractalSlurmSSHExecutor job
|
13
|
-
|
14
|
-
This includes three groups of attributes:
|
15
|
-
|
16
|
-
1. Attributes related to the (possibly multi-task) SLURM job, e.g.
|
17
|
-
submission-file path.
|
18
|
-
2. Attributes related to single tasks, e.g. the paths of their input/output
|
19
|
-
pickle files.
|
20
|
-
3. SLURM configuration options, encoded in a SlurmConfig object.
|
21
|
-
|
22
|
-
Note: A SlurmJob object is generally defined as a multi-task job. Jobs
|
23
|
-
coming from the `map` method must have `single_task_submission=False` (even
|
24
|
-
if `num_tasks_tot=1`), while jobs coming from `submit` must have it set to
|
25
|
-
`True`.
|
26
|
-
|
27
|
-
Attributes:
|
28
|
-
num_tasks_tot:
|
29
|
-
Total number of tasks to be executed as part of this SLURM job.
|
30
|
-
single_task_submission:
|
31
|
-
This must be `True` for jobs submitted as part of the `submit`
|
32
|
-
method, and `False` for jobs coming from the `map` method.
|
33
|
-
slurm_file_prefix:
|
34
|
-
Prefix for SLURM-job related files (submission script and SLURM
|
35
|
-
stdout/stderr); this is also needed in the
|
36
|
-
`_copy_files_from_remote_to_local` method.
|
37
|
-
wftask_file_prefixes:
|
38
|
-
Prefix for files that are created as part of the functions
|
39
|
-
submitted for execution on the `FractalSlurmSSHExecutor`; this is
|
40
|
-
needed in the `_copy_files_from_remote_to_local` method, and also
|
41
|
-
to construct the names of per-task input/output pickle files.
|
42
|
-
wftask_subfolder_name:
|
43
|
-
Name of the per-task subfolder (e.g. `7_task_name`).
|
44
|
-
slurm_script:
|
45
|
-
Path of SLURM submission script.
|
46
|
-
slurm_stdout:
|
47
|
-
Path of SLURM stdout file; if this includes `"%j"`, then this
|
48
|
-
string will be replaced by the SLURM job ID upon `sbatch`
|
49
|
-
submission.
|
50
|
-
slurm_stderr:
|
51
|
-
Path of SLURM stderr file; see `slurm_stdout` concerning `"%j"`.
|
52
|
-
workerids:
|
53
|
-
IDs that enter in the per-task input/output pickle files (one per
|
54
|
-
task).
|
55
|
-
input_pickle_files:
|
56
|
-
Input pickle files (one per task).
|
57
|
-
output_pickle_files:
|
58
|
-
Output pickle files (one per task).
|
59
|
-
slurm_config:
|
60
|
-
`SlurmConfig` object.
|
61
|
-
"""
|
62
|
-
|
63
|
-
# Job-related attributes
|
64
|
-
num_tasks_tot: int
|
65
|
-
single_task_submission: bool
|
66
|
-
slurm_file_prefix: str
|
67
|
-
slurm_script_local: Path
|
68
|
-
slurm_script_remote: Path
|
69
|
-
slurm_stdout_local: Path
|
70
|
-
slurm_stdout_remote: Path
|
71
|
-
slurm_stderr_local: Path
|
72
|
-
slurm_stderr_remote: Path
|
73
|
-
|
74
|
-
# Per-task attributes
|
75
|
-
wftask_subfolder_name: str
|
76
|
-
workerids: tuple[str, ...]
|
77
|
-
wftask_file_prefixes: tuple[str, ...]
|
78
|
-
input_pickle_files_local: tuple[Path, ...]
|
79
|
-
input_pickle_files_remote: tuple[Path, ...]
|
80
|
-
output_pickle_files_local: tuple[Path, ...]
|
81
|
-
output_pickle_files_remote: tuple[Path, ...]
|
82
|
-
|
83
|
-
# Slurm configuration
|
84
|
-
slurm_config: SlurmConfig
|
85
|
-
|
86
|
-
def __init__(
|
87
|
-
self,
|
88
|
-
num_tasks_tot: int,
|
89
|
-
slurm_config: SlurmConfig,
|
90
|
-
slurm_file_prefix: Optional[str] = None,
|
91
|
-
wftask_file_prefixes: Optional[tuple[str, ...]] = None,
|
92
|
-
single_task_submission: bool = False,
|
93
|
-
):
|
94
|
-
if single_task_submission and num_tasks_tot > 1:
|
95
|
-
raise ValueError(
|
96
|
-
"Trying to initialize SlurmJob with"
|
97
|
-
f"{single_task_submission=} and {num_tasks_tot=}."
|
98
|
-
)
|
99
|
-
self.num_tasks_tot = num_tasks_tot
|
100
|
-
self.single_task_submission = single_task_submission
|
101
|
-
self.slurm_file_prefix = slurm_file_prefix or "default_slurm_prefix"
|
102
|
-
if wftask_file_prefixes is None:
|
103
|
-
self.wftask_file_prefixes = tuple(
|
104
|
-
"default_wftask_prefix" for i in range(self.num_tasks_tot)
|
105
|
-
)
|
106
|
-
else:
|
107
|
-
self.wftask_file_prefixes = wftask_file_prefixes
|
108
|
-
self.workerids = tuple(uuid.uuid4() for i in range(self.num_tasks_tot))
|
109
|
-
self.slurm_config = slurm_config
|
110
|
-
|
111
|
-
def get_clean_output_pickle_files(self) -> tuple[str, ...]:
|
112
|
-
"""
|
113
|
-
Transform all pathlib.Path objects in self.output_pickle_files to
|
114
|
-
strings
|
115
|
-
"""
|
116
|
-
return tuple(str(f.as_posix()) for f in self.output_pickle_files_local)
|