fractal-server 2.14.12__py3-none-any.whl → 2.14.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/routes/api/v2/_aux_task_group_disambiguation.py +163 -0
  3. fractal_server/app/routes/api/v2/pre_submission_checks.py +3 -2
  4. fractal_server/app/routes/api/v2/task.py +5 -4
  5. fractal_server/app/routes/api/v2/task_group.py +52 -4
  6. fractal_server/app/routes/api/v2/task_version_update.py +18 -10
  7. fractal_server/app/routes/api/v2/workflow_import.py +3 -70
  8. fractal_server/app/routes/api/v2/workflowtask.py +6 -5
  9. fractal_server/app/runner/executors/base_runner.py +38 -17
  10. fractal_server/app/runner/executors/local/runner.py +14 -14
  11. fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +12 -14
  12. fractal_server/app/runner/v2/runner.py +19 -8
  13. fractal_server/app/runner/v2/runner_functions.py +12 -8
  14. fractal_server/app/schemas/v2/__init__.py +1 -0
  15. fractal_server/app/schemas/v2/dumps.py +2 -2
  16. fractal_server/app/schemas/v2/manifest.py +2 -9
  17. fractal_server/app/schemas/v2/task.py +18 -14
  18. fractal_server/app/schemas/v2/workflowtask.py +2 -2
  19. fractal_server/exceptions.py +2 -0
  20. fractal_server/utils.py +0 -49
  21. {fractal_server-2.14.12.dist-info → fractal_server-2.14.14.dist-info}/METADATA +1 -1
  22. {fractal_server-2.14.12.dist-info → fractal_server-2.14.14.dist-info}/RECORD +25 -23
  23. {fractal_server-2.14.12.dist-info → fractal_server-2.14.14.dist-info}/LICENSE +0 -0
  24. {fractal_server-2.14.12.dist-info → fractal_server-2.14.14.dist-info}/WHEEL +0 -0
  25. {fractal_server-2.14.12.dist-info → fractal_server-2.14.14.dist-info}/entry_points.txt +0 -0
@@ -3,19 +3,21 @@ from concurrent.futures import Future
3
3
  from concurrent.futures import ThreadPoolExecutor
4
4
  from pathlib import Path
5
5
  from typing import Any
6
- from typing import Literal
7
6
 
8
7
  from ..call_command_wrapper import call_command_wrapper
9
8
  from .get_local_config import LocalBackendConfig
10
9
  from fractal_server.app.db import get_sync_db
11
10
  from fractal_server.app.runner.exceptions import TaskExecutionError
12
11
  from fractal_server.app.runner.executors.base_runner import BaseRunner
12
+ from fractal_server.app.runner.executors.base_runner import MultisubmitTaskType
13
+ from fractal_server.app.runner.executors.base_runner import SubmitTaskType
13
14
  from fractal_server.app.runner.task_files import TaskFiles
14
15
  from fractal_server.app.runner.v2.db_tools import (
15
16
  bulk_update_status_of_history_unit,
16
17
  )
17
18
  from fractal_server.app.runner.v2.db_tools import update_status_of_history_unit
18
19
  from fractal_server.app.schemas.v2 import HistoryUnitStatus
20
+ from fractal_server.app.schemas.v2 import TaskType
19
21
  from fractal_server.logger import set_logger
20
22
 
21
23
  logger = set_logger(__name__)
@@ -87,12 +89,7 @@ class LocalRunner(BaseRunner):
87
89
  history_unit_id: int,
88
90
  task_files: TaskFiles,
89
91
  config: LocalBackendConfig,
90
- task_type: Literal[
91
- "non_parallel",
92
- "converter_non_parallel",
93
- "compound",
94
- "converter_compound",
95
- ],
92
+ task_type: SubmitTaskType,
96
93
  user_id: int,
97
94
  ) -> tuple[Any, Exception]:
98
95
  logger.debug("[submit] START")
@@ -129,7 +126,10 @@ class LocalRunner(BaseRunner):
129
126
  try:
130
127
  result = future.result()
131
128
  logger.debug("[submit] END with result")
132
- if task_type not in ["compound", "converter_compound"]:
129
+ if task_type not in [
130
+ TaskType.COMPOUND,
131
+ TaskType.CONVERTER_COMPOUND,
132
+ ]:
133
133
  update_status_of_history_unit(
134
134
  history_unit_id=history_unit_id,
135
135
  status=HistoryUnitStatus.DONE,
@@ -154,7 +154,7 @@ class LocalRunner(BaseRunner):
154
154
  list_parameters: list[dict],
155
155
  history_unit_ids: list[int],
156
156
  list_task_files: list[TaskFiles],
157
- task_type: Literal["parallel", "compound", "converter_compound"],
157
+ task_type: MultisubmitTaskType,
158
158
  config: LocalBackendConfig,
159
159
  user_id: int,
160
160
  ) -> tuple[dict[int, Any], dict[int, BaseException]]:
@@ -197,7 +197,7 @@ class LocalRunner(BaseRunner):
197
197
  exceptions = {
198
198
  ind: exception for ind in range(len(list_parameters))
199
199
  }
200
- if task_type == "parallel":
200
+ if task_type == TaskType.PARALLEL:
201
201
  with next(get_sync_db()) as db:
202
202
  bulk_update_status_of_history_unit(
203
203
  history_unit_ids=history_unit_ids,
@@ -233,7 +233,7 @@ class LocalRunner(BaseRunner):
233
233
  positional_index
234
234
  ]
235
235
  exceptions[positional_index] = TaskExecutionError(str(e))
236
- if task_type == "parallel":
236
+ if task_type == TaskType.PARALLEL:
237
237
  with next(get_sync_db()) as db:
238
238
  update_status_of_history_unit(
239
239
  history_unit_id=current_history_unit_id,
@@ -252,14 +252,14 @@ class LocalRunner(BaseRunner):
252
252
  with next(get_sync_db()) as db:
253
253
  for positional_index, fut in finished_futures:
254
254
  active_futures.pop(positional_index)
255
- if task_type == "parallel":
255
+ if task_type == TaskType.PARALLEL:
256
256
  current_history_unit_id = history_unit_ids[
257
257
  positional_index
258
258
  ]
259
259
 
260
260
  try:
261
261
  results[positional_index] = fut.result()
262
- if task_type == "parallel":
262
+ if task_type == TaskType.PARALLEL:
263
263
  update_status_of_history_unit(
264
264
  history_unit_id=current_history_unit_id,
265
265
  status=HistoryUnitStatus.DONE,
@@ -275,7 +275,7 @@ class LocalRunner(BaseRunner):
275
275
  exceptions[positional_index] = TaskExecutionError(
276
276
  str(e)
277
277
  )
278
- if task_type == "parallel":
278
+ if task_type == TaskType.PARALLEL:
279
279
  update_status_of_history_unit(
280
280
  history_unit_id=current_history_unit_id,
281
281
  status=HistoryUnitStatus.FAILED,
@@ -19,6 +19,8 @@ from fractal_server.app.models.v2 import AccountingRecordSlurm
19
19
  from fractal_server.app.runner.exceptions import JobExecutionError
20
20
  from fractal_server.app.runner.exceptions import TaskExecutionError
21
21
  from fractal_server.app.runner.executors.base_runner import BaseRunner
22
+ from fractal_server.app.runner.executors.base_runner import MultisubmitTaskType
23
+ from fractal_server.app.runner.executors.base_runner import SubmitTaskType
22
24
  from fractal_server.app.runner.filenames import SHUTDOWN_FILENAME
23
25
  from fractal_server.app.runner.task_files import TaskFiles
24
26
  from fractal_server.app.runner.v2.db_tools import (
@@ -26,6 +28,7 @@ from fractal_server.app.runner.v2.db_tools import (
26
28
  )
27
29
  from fractal_server.app.runner.v2.db_tools import update_status_of_history_unit
28
30
  from fractal_server.app.schemas.v2 import HistoryUnitStatus
31
+ from fractal_server.app.schemas.v2 import TaskType
29
32
  from fractal_server.config import get_settings
30
33
  from fractal_server.logger import set_logger
31
34
  from fractal_server.syringe import Inject
@@ -501,12 +504,7 @@ class BaseSlurmRunner(BaseRunner):
501
504
  history_unit_id: int,
502
505
  task_files: TaskFiles,
503
506
  config: SlurmConfig,
504
- task_type: Literal[
505
- "non_parallel",
506
- "converter_non_parallel",
507
- "compound",
508
- "converter_compound",
509
- ],
507
+ task_type: SubmitTaskType,
510
508
  user_id: int,
511
509
  ) -> tuple[Any, Exception]:
512
510
  logger.debug("[submit] START")
@@ -604,8 +602,8 @@ class BaseSlurmRunner(BaseRunner):
604
602
  )
605
603
  else:
606
604
  if task_type not in [
607
- "compound",
608
- "converter_compound",
605
+ TaskType.COMPOUND,
606
+ TaskType.CONVERTER_COMPOUND,
609
607
  ]:
610
608
  update_status_of_history_unit(
611
609
  history_unit_id=history_unit_id,
@@ -641,7 +639,7 @@ class BaseSlurmRunner(BaseRunner):
641
639
  list_parameters: list[dict],
642
640
  history_unit_ids: list[int],
643
641
  list_task_files: list[TaskFiles],
644
- task_type: Literal["parallel", "compound", "converter_compound"],
642
+ task_type: MultisubmitTaskType,
645
643
  config: SlurmConfig,
646
644
  user_id: int,
647
645
  ) -> tuple[dict[int, Any], dict[int, BaseException]]:
@@ -654,7 +652,7 @@ class BaseSlurmRunner(BaseRunner):
654
652
  logger.debug(f"[multisubmit] START, {len(list_parameters)=}")
655
653
  try:
656
654
  if self.is_shutdown():
657
- if task_type == "parallel":
655
+ if task_type == TaskType.PARALLEL:
658
656
  with next(get_sync_db()) as db:
659
657
  bulk_update_status_of_history_unit(
660
658
  history_unit_ids=history_unit_ids,
@@ -680,7 +678,7 @@ class BaseSlurmRunner(BaseRunner):
680
678
  workdir_remote = list_task_files[0].wftask_subfolder_remote
681
679
 
682
680
  # Create local&remote task subfolders
683
- if task_type == "parallel":
681
+ if task_type == TaskType.PARALLEL:
684
682
  self._mkdir_local_folder(workdir_local.as_posix())
685
683
  self._mkdir_remote_folder(folder=workdir_remote.as_posix())
686
684
 
@@ -758,7 +756,7 @@ class BaseSlurmRunner(BaseRunner):
758
756
  f" Original error {str(e)}"
759
757
  )
760
758
  self.scancel_jobs()
761
- if task_type == "parallel":
759
+ if task_type == TaskType.PARALLEL:
762
760
  with next(get_sync_db()) as db:
763
761
  bulk_update_status_of_history_unit(
764
762
  history_unit_ids=history_unit_ids,
@@ -824,7 +822,7 @@ class BaseSlurmRunner(BaseRunner):
824
822
  # `result is None` is not relevant for this purpose.
825
823
  if exception is not None:
826
824
  exceptions[task.index] = exception
827
- if task_type == "parallel":
825
+ if task_type == TaskType.PARALLEL:
828
826
  update_status_of_history_unit(
829
827
  history_unit_id=history_unit_ids[
830
828
  task.index
@@ -834,7 +832,7 @@ class BaseSlurmRunner(BaseRunner):
834
832
  )
835
833
  else:
836
834
  results[task.index] = result
837
- if task_type == "parallel":
835
+ if task_type == TaskType.PARALLEL:
838
836
  update_status_of_history_unit(
839
837
  history_unit_id=history_unit_ids[
840
838
  task.index
@@ -10,10 +10,6 @@ from sqlalchemy.orm.attributes import flag_modified
10
10
  from sqlmodel import delete
11
11
  from sqlmodel import update
12
12
 
13
- from ....images import SingleImage
14
- from ....images.tools import filter_image_list
15
- from ....images.tools import find_image_by_zarr_url
16
- from ..exceptions import JobExecutionError
17
13
  from .merge_outputs import merge_outputs
18
14
  from .runner_functions import run_v2_task_compound
19
15
  from .runner_functions import run_v2_task_non_parallel
@@ -28,13 +24,18 @@ from fractal_server.app.models.v2 import HistoryRun
28
24
  from fractal_server.app.models.v2 import HistoryUnit
29
25
  from fractal_server.app.models.v2 import TaskGroupV2
30
26
  from fractal_server.app.models.v2 import WorkflowTaskV2
27
+ from fractal_server.app.runner.exceptions import JobExecutionError
31
28
  from fractal_server.app.runner.executors.base_runner import BaseRunner
32
29
  from fractal_server.app.runner.v2.db_tools import update_status_of_history_run
33
30
  from fractal_server.app.schemas.v2 import HistoryUnitStatus
34
31
  from fractal_server.app.schemas.v2 import TaskDumpV2
35
32
  from fractal_server.app.schemas.v2 import TaskGroupDumpV2
33
+ from fractal_server.app.schemas.v2 import TaskType
34
+ from fractal_server.images import SingleImage
36
35
  from fractal_server.images.status_tools import enrich_images_sync
37
36
  from fractal_server.images.status_tools import IMAGE_STATUS_KEY
37
+ from fractal_server.images.tools import filter_image_list
38
+ from fractal_server.images.tools import find_image_by_zarr_url
38
39
  from fractal_server.images.tools import merge_type_filters
39
40
  from fractal_server.types import AttributeFilters
40
41
 
@@ -131,7 +132,11 @@ def execute_tasks_v2(
131
132
  # PRE TASK EXECUTION
132
133
 
133
134
  # Filter images by types and attributes (in two steps)
134
- if wftask.task_type in ["compound", "parallel", "non_parallel"]:
135
+ if wftask.task_type in [
136
+ TaskType.COMPOUND,
137
+ TaskType.PARALLEL,
138
+ TaskType.NON_PARALLEL,
139
+ ]:
135
140
  # Non-converter task
136
141
  type_filters = copy(current_dataset_type_filters)
137
142
  type_filters_patch = merge_type_filters(
@@ -190,7 +195,10 @@ def execute_tasks_v2(
190
195
 
191
196
  # TASK EXECUTION (V2)
192
197
  try:
193
- if task.type in ["non_parallel", "converter_non_parallel"]:
198
+ if task.type in [
199
+ TaskType.NON_PARALLEL,
200
+ TaskType.CONVERTER_NON_PARALLEL,
201
+ ]:
194
202
  outcomes_dict, num_tasks = run_v2_task_non_parallel(
195
203
  images=filtered_images,
196
204
  zarr_dir=zarr_dir,
@@ -205,7 +213,7 @@ def execute_tasks_v2(
205
213
  user_id=user_id,
206
214
  task_type=task.type,
207
215
  )
208
- elif task.type == "parallel":
216
+ elif task.type == TaskType.PARALLEL:
209
217
  outcomes_dict, num_tasks = run_v2_task_parallel(
210
218
  images=filtered_images,
211
219
  wftask=wftask,
@@ -218,7 +226,10 @@ def execute_tasks_v2(
218
226
  dataset_id=dataset.id,
219
227
  user_id=user_id,
220
228
  )
221
- elif task.type in ["compound", "converter_compound"]:
229
+ elif task.type in [
230
+ TaskType.COMPOUND,
231
+ TaskType.CONVERTER_COMPOUND,
232
+ ]:
222
233
  outcomes_dict, num_tasks = run_v2_task_compound(
223
234
  images=filtered_images,
224
235
  zarr_dir=zarr_dir,
@@ -31,6 +31,7 @@ from fractal_server.app.runner.v2.task_interface import (
31
31
  _cast_and_validate_TaskOutput,
32
32
  )
33
33
  from fractal_server.app.schemas.v2 import HistoryUnitStatus
34
+ from fractal_server.app.schemas.v2 import TaskType
34
35
  from fractal_server.logger import set_logger
35
36
 
36
37
  __all__ = [
@@ -135,14 +136,17 @@ def run_v2_task_non_parallel(
135
136
  ],
136
137
  dataset_id: int,
137
138
  history_run_id: int,
138
- task_type: Literal["non_parallel", "converter_non_parallel"],
139
+ task_type: Literal[TaskType.NON_PARALLEL, TaskType.CONVERTER_NON_PARALLEL],
139
140
  user_id: int,
140
141
  ) -> tuple[dict[int, SubmissionOutcome], int]:
141
142
  """
142
143
  This runs server-side (see `executor` argument)
143
144
  """
144
145
 
145
- if task_type not in ["non_parallel", "converter_non_parallel"]:
146
+ if task_type not in [
147
+ TaskType.NON_PARALLEL,
148
+ TaskType.CONVERTER_NON_PARALLEL,
149
+ ]:
146
150
  raise ValueError(
147
151
  f"Invalid {task_type=} for `run_v2_task_non_parallel`."
148
152
  )
@@ -166,14 +170,14 @@ def run_v2_task_non_parallel(
166
170
  "zarr_dir": zarr_dir,
167
171
  **(wftask.args_non_parallel or {}),
168
172
  }
169
- if task_type == "non_parallel":
173
+ if task_type == TaskType.NON_PARALLEL:
170
174
  function_kwargs["zarr_urls"] = [img["zarr_url"] for img in images]
171
175
 
172
176
  # Database History operations
173
177
  with next(get_sync_db()) as db:
174
- if task_type == "non_parallel":
178
+ if task_type == TaskType.NON_PARALLEL:
175
179
  zarr_urls = function_kwargs["zarr_urls"]
176
- elif task_type == "converter_non_parallel":
180
+ elif task_type == TaskType.CONVERTER_NON_PARALLEL:
177
181
  zarr_urls = []
178
182
 
179
183
  history_unit = HistoryUnit(
@@ -388,7 +392,7 @@ def run_v2_task_compound(
388
392
  ],
389
393
  dataset_id: int,
390
394
  history_run_id: int,
391
- task_type: Literal["compound", "converter_compound"],
395
+ task_type: Literal[TaskType.COMPOUND, TaskType.CONVERTER_COMPOUND],
392
396
  user_id: int,
393
397
  ) -> tuple[dict[int, SubmissionOutcome], int]:
394
398
  # Get TaskFiles object
@@ -410,10 +414,10 @@ def run_v2_task_compound(
410
414
  "zarr_dir": zarr_dir,
411
415
  **(wftask.args_non_parallel or {}),
412
416
  }
413
- if task_type == "compound":
417
+ if task_type == TaskType.COMPOUND:
414
418
  function_kwargs["zarr_urls"] = [img["zarr_url"] for img in images]
415
419
  input_image_zarr_urls = function_kwargs["zarr_urls"]
416
- elif task_type == "converter_compound":
420
+ elif task_type == TaskType.CONVERTER_COMPOUND:
417
421
  input_image_zarr_urls = []
418
422
 
419
423
  # Create database History entries
@@ -31,6 +31,7 @@ from .task import TaskExportV2 # noqa F401
31
31
  from .task import TaskImportV2 # noqa F401
32
32
  from .task import TaskImportV2Legacy # noqa F401
33
33
  from .task import TaskReadV2 # noqa F401
34
+ from .task import TaskType # noqa F401
34
35
  from .task import TaskUpdateV2 # noqa F401
35
36
  from .task_collection import TaskCollectCustomV2 # noqa F401
36
37
  from .task_collection import TaskCollectPipV2 # noqa F401
@@ -11,7 +11,7 @@ from pydantic import BaseModel
11
11
  from pydantic import ConfigDict
12
12
  from pydantic import Field
13
13
 
14
- from .task import TaskTypeType
14
+ from .task import TaskType
15
15
  from .task_group import TaskGroupV2OriginEnum
16
16
 
17
17
 
@@ -25,7 +25,7 @@ class ProjectDumpV2(BaseModel):
25
25
  class TaskDumpV2(BaseModel):
26
26
  id: int
27
27
  name: str
28
- type: TaskTypeType
28
+ type: TaskType
29
29
 
30
30
  command_non_parallel: str | None = None
31
31
  command_parallel: str | None = None
@@ -4,6 +4,7 @@ from pydantic import BaseModel
4
4
  from pydantic import Field
5
5
  from pydantic import model_validator
6
6
 
7
+ from .task import TaskType
7
8
  from fractal_server.types import DictStrAny
8
9
  from fractal_server.types import HttpUrlStr
9
10
  from fractal_server.types import NonEmptyStr
@@ -55,15 +56,7 @@ class TaskManifestV2(BaseModel):
55
56
  modality: str | None = None
56
57
  tags: list[str] = Field(default_factory=list)
57
58
 
58
- type: None | (
59
- Literal[
60
- "compound",
61
- "converter_compound",
62
- "non_parallel",
63
- "converter_non_parallel",
64
- "parallel",
65
- ]
66
- ) = None
59
+ type: None | TaskType = None
67
60
 
68
61
  @model_validator(mode="after")
69
62
  def validate_executable_args_meta(self):
@@ -1,5 +1,5 @@
1
+ from enum import StrEnum
1
2
  from typing import Any
2
- from typing import Literal
3
3
 
4
4
  from pydantic import BaseModel
5
5
  from pydantic import ConfigDict
@@ -14,18 +14,22 @@ from fractal_server.types import ListUniqueNonEmptyString
14
14
  from fractal_server.types import NonEmptyStr
15
15
  from fractal_server.types import TypeFilters
16
16
 
17
- TaskTypeType = Literal[
18
- "compound",
19
- "converter_compound",
20
- "non_parallel",
21
- "converter_non_parallel",
22
- "parallel",
23
- ]
24
-
25
17
 
26
18
  logger = set_logger(__name__)
27
19
 
28
20
 
21
+ class TaskType(StrEnum):
22
+ """
23
+ Define the available task types.
24
+ """
25
+
26
+ COMPOUND = "compound"
27
+ CONVERTER_COMPOUND = "converter_compound"
28
+ NON_PARALLEL = "non_parallel"
29
+ CONVERTER_NON_PARALLEL = "converter_non_parallel"
30
+ PARALLEL = "parallel"
31
+
32
+
29
33
  class TaskCreateV2(BaseModel):
30
34
  model_config = ConfigDict(extra="forbid")
31
35
 
@@ -51,7 +55,7 @@ class TaskCreateV2(BaseModel):
51
55
  tags: ListUniqueNonEmptyString = Field(default_factory=list)
52
56
  authors: NonEmptyStr | None = None
53
57
 
54
- type: TaskTypeType | None = None
58
+ type: TaskType | None = None
55
59
 
56
60
  @model_validator(mode="after")
57
61
  def validate_commands(self):
@@ -78,11 +82,11 @@ class TaskCreateV2(BaseModel):
78
82
  "Please move to `fractal-task-tools`."
79
83
  )
80
84
  if self.command_non_parallel is None:
81
- self.type = "parallel"
85
+ self.type = TaskType.PARALLEL
82
86
  elif self.command_parallel is None:
83
- self.type = "non_parallel"
87
+ self.type = TaskType.NON_PARALLEL
84
88
  else:
85
- self.type = "compound"
89
+ self.type = TaskType.COMPOUND
86
90
 
87
91
  return self
88
92
 
@@ -90,7 +94,7 @@ class TaskCreateV2(BaseModel):
90
94
  class TaskReadV2(BaseModel):
91
95
  id: int
92
96
  name: str
93
- type: TaskTypeType
97
+ type: TaskType
94
98
  source: str | None = None
95
99
  version: str | None = None
96
100
 
@@ -9,7 +9,7 @@ from .task import TaskExportV2
9
9
  from .task import TaskImportV2
10
10
  from .task import TaskImportV2Legacy
11
11
  from .task import TaskReadV2
12
- from .task import TaskTypeType
12
+ from .task import TaskType
13
13
  from fractal_server.types import DictStrAny
14
14
  from fractal_server.types import TypeFilters
15
15
  from fractal_server.types import WorkflowTaskArgument
@@ -45,7 +45,7 @@ class WorkflowTaskReadV2(BaseModel):
45
45
 
46
46
  type_filters: dict[str, bool]
47
47
 
48
- task_type: TaskTypeType
48
+ task_type: TaskType
49
49
  task_id: int
50
50
  task: TaskReadV2
51
51
 
@@ -0,0 +1,2 @@
1
+ class UnreachableBranchError(RuntimeError):
2
+ pass
fractal_server/utils.py CHANGED
@@ -13,12 +13,10 @@
13
13
  This module provides general purpose utilities that are not specific to any
14
14
  subsystem.
15
15
  """
16
- import asyncio
17
16
  import shlex
18
17
  import subprocess # nosec
19
18
  from datetime import datetime
20
19
  from datetime import timezone
21
- from pathlib import Path
22
20
 
23
21
  from .logger import get_logger
24
22
  from .string_tools import validate_cmd
@@ -31,53 +29,6 @@ def get_timestamp() -> datetime:
31
29
  return datetime.now(tz=timezone.utc)
32
30
 
33
31
 
34
- async def execute_command_async(
35
- *,
36
- command: str,
37
- cwd: Path | None = None,
38
- logger_name: str | None = None,
39
- ) -> str:
40
- """
41
- Execute arbitrary command
42
-
43
- If the command returns a return code different from zero, a RuntimeError
44
- containing the stderr is raised.
45
-
46
- Args:
47
- cwd:
48
- The working directory for the command execution.
49
- command:
50
- The command to execute.
51
-
52
- Returns:
53
- stdout:
54
- The stdout from the command execution.
55
-
56
- Raises:
57
- RuntimeError: if the process exited with non-zero status. The error
58
- string is set to the `stderr` of the process.
59
- """
60
- command_split = shlex.split(command)
61
- cmd, *args = command_split
62
-
63
- logger = get_logger(logger_name)
64
- cwd_kwarg = dict() if cwd is None else dict(cwd=cwd)
65
- proc = await asyncio.create_subprocess_exec(
66
- cmd,
67
- *args,
68
- stdout=asyncio.subprocess.PIPE,
69
- stderr=asyncio.subprocess.PIPE,
70
- **cwd_kwarg,
71
- )
72
- stdout, stderr = await proc.communicate()
73
- logger.debug(f"Subprocess call to: {command}")
74
- logger.debug(stdout.decode("utf-8"))
75
- logger.debug(stderr.decode("utf-8"))
76
- if proc.returncode != 0:
77
- raise RuntimeError(stderr.decode("utf-8"))
78
- return stdout.decode("utf-8")
79
-
80
-
81
32
  def execute_command_sync(
82
33
  *,
83
34
  command: str,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.14.12
3
+ Version: 2.14.14
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  License: BSD-3-Clause
6
6
  Author: Tommaso Comparin