fractal-server 2.0.0a0__tar.gz → 2.0.0a2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/PKG-INFO +1 -1
- fractal_server-2.0.0a2/fractal_server/__init__.py +1 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/__init__.py +0 -1
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/v1/__init__.py +1 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/admin/v2.py +0 -1
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v1/job.py +3 -3
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v2/__init__.py +2 -2
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v2/dataset.py +0 -1
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v2/images.py +4 -9
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v2/project.py +0 -3
- fractal_server-2.0.0a0/fractal_server/app/routes/api/v2/apply.py → fractal_server-2.0.0a2/fractal_server/app/routes/api/v2/submit.py +1 -1
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v2/workflow.py +0 -1
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/executors/slurm/executor.py +23 -10
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/task_files.py +0 -2
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v1/__init__.py +2 -2
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v1/_local/__init__.py +1 -1
- {fractal_server-2.0.0a0/fractal_server/app/runner/executors/local → fractal_server-2.0.0a2/fractal_server/app/runner/v1/_local}/executor.py +2 -2
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v2/__init__.py +0 -1
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v2/_local/__init__.py +1 -3
- fractal_server-2.0.0a2/fractal_server/app/runner/v2/_local/executor.py +100 -0
- fractal_server-2.0.0a2/fractal_server/app/runner/v2/_slurm/__init__.py +139 -0
- fractal_server-2.0.0a2/fractal_server/app/runner/v2/deduplicate_list.py +22 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v2/merge_outputs.py +1 -4
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v2/runner.py +19 -16
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v2/runner_functions.py +14 -12
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v2/runner_functions_low_level.py +1 -1
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v2/dataset.py +2 -1
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v2/job.py +2 -1
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v2/manifest.py +51 -1
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v2/project.py +2 -1
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v2/task.py +2 -3
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v2/workflow.py +2 -1
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v2/workflowtask.py +2 -1
- fractal_server-2.0.0a2/fractal_server/images/__init__.py +2 -0
- fractal_server-2.0.0a2/fractal_server/images/tools.py +85 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/env.py +0 -2
- fractal_server-2.0.0a0/fractal_server/migrations/versions/56af171b0159_v2.py → fractal_server-2.0.0a2/fractal_server/migrations/versions/d71e732236cd_v2.py +29 -7
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/tasks/v2/background_operations.py +0 -1
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/pyproject.toml +2 -3
- fractal_server-2.0.0a0/fractal_server/__init__.py +0 -1
- fractal_server-2.0.0a0/fractal_server/app/runner/executors/local/__init__.py +0 -3
- fractal_server-2.0.0a0/fractal_server/app/runner/v2/_slurm/__init__.py +0 -157
- fractal_server-2.0.0a0/fractal_server/app/runner/v2/deduplicate_list.py +0 -24
- fractal_server-2.0.0a0/fractal_server/images/tools.py +0 -86
- fractal_server-2.0.0a0/fractal_server/migrations/versions/4b35c5cefbe3_tmp_is_v2_compatible.py +0 -39
- fractal_server-2.0.0a0/fractal_server/migrations/versions/876f28db9d4e_tmp_split_task_and_wftask_meta.py +0 -68
- fractal_server-2.0.0a0/fractal_server/migrations/versions/974c802f0dd0_tmp_workflowtaskv2_type_in_db.py +0 -37
- fractal_server-2.0.0a0/fractal_server/migrations/versions/9cd305cd6023_tmp_workflowtaskv2.py +0 -40
- fractal_server-2.0.0a0/fractal_server/migrations/versions/a6231ed6273c_tmp_args_schemas_in_taskv2.py +0 -42
- fractal_server-2.0.0a0/fractal_server/migrations/versions/b9e9eed9d442_tmp_taskv2_type.py +0 -37
- fractal_server-2.0.0a0/fractal_server/migrations/versions/e3e639454d4b_tmp_make_task_meta_non_optional.py +0 -50
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/LICENSE +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/README.md +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/__main__.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/alembic.ini +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/__init__.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/db/__init__.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/linkuserproject.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/security.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/state.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/v1/dataset.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/v1/job.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/v1/project.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/v1/task.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/v1/workflow.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/v2/__init__.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/v2/dataset.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/v2/job.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/v2/project.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/v2/task.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/v2/workflow.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/models/v2/workflowtask.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/__init__.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/admin/v1.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/__init__.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v1/__init__.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v1/_aux_functions.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v1/dataset.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v1/project.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v1/task.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v1/task_collection.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v1/workflow.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v1/workflowtask.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v2/_aux_functions.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v2/job.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v2/task.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v2/task_collection.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v2/workflowtask.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/auth.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/aux/__init__.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/aux/_job.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/aux/_runner.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/.gitignore +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/async_wrap.py +0 -0
- {fractal_server-2.0.0a0/fractal_server/app/runner/v2 → fractal_server-2.0.0a2/fractal_server/app/runner}/components.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/exceptions.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/executors/slurm/__init__.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/executors/slurm/_batching.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/executors/slurm/_check_jobs_status.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/executors/slurm/_executor_wait_thread.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/executors/slurm/_slurm_config.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/executors/slurm/_subprocess_run_as_user.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/executors/slurm/remote.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/filenames.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/set_start_and_last_task_index.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v1/_common.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v1/_local/_local_config.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v1/_local/_submit_setup.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v1/_slurm/__init__.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v1/_slurm/_submit_setup.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v1/_slurm/get_slurm_config.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v1/common.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v1/handle_failed_job.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v2/_local/_local_config.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v2/_local/_submit_setup.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v2/_slurm/_submit_setup.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v2/_slurm/get_slurm_config.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v2/handle_failed_job.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v2/task_interface.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v2/v1_compat.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/__init__.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/_validators.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/json_schemas/manifest.json +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/state.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/user.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v1/__init__.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v1/applyworkflow.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v1/dataset.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v1/dumps.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v1/manifest.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v1/project.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v1/task.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v1/task_collection.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v1/workflow.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v2/__init__.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v2/dumps.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/schemas/v2/task_collection.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/security/__init__.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/config.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/data_migrations/README.md +0 -0
- /fractal_server-2.0.0a0/fractal_server/images/__init__.py → /fractal_server-2.0.0a2/fractal_server/images/models.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/logger.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/main.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/README +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/script.py.mako +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/versions/4c308bcaea2b_add_task_args_schema_and_task_args_.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/versions/4cedeb448a53_workflowtask_foreign_keys_not_nullables.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/versions/50a13d6138fd_initial_schema.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/versions/70e77f1c38b0_add_applyworkflow_first_task_index_and_.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/versions/71eefd1dd202_add_slurm_accounts.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/versions/84bf0fffde30_add_dumps_to_applyworkflow.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/versions/8f79bd162e35_add_docs_info_and_docs_link_to_task_.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/versions/97f444d47249_add_applyworkflow_project_dump.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/versions/99ea79d9e5d2_add_dataset_history.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/versions/9fd26a2b0de4_add_workflow_timestamp_created.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/versions/a7f4d6137b53_add_workflow_dump_to_applyworkflow.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/versions/d4fe3708d309_make_applyworkflow_workflow_dump_non_.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/versions/e75cac726012_make_applyworkflow_start_timestamp_not_.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/versions/efa89c30e0a4_add_project_timestamp_created.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/migrations/versions/f384e1c0cf5d_drop_task_default_args_columns.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/py.typed +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/syringe.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/tasks/__init__.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/tasks/endpoint_operations.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/tasks/utils.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/tasks/v1/_TaskCollectPip.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/tasks/v1/background_operations.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/tasks/v1/get_collection_data.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/tasks/v2/_TaskCollectPip.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/tasks/v2/get_collection_data.py +0 -0
- {fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/utils.py +0 -0
@@ -0,0 +1 @@
|
|
1
|
+
__VERSION__ = "2.0.0a2"
|
@@ -4,6 +4,7 @@
|
|
4
4
|
from .dataset import Dataset # noqa: F401
|
5
5
|
from .dataset import Resource # noqa: F401
|
6
6
|
from .job import ApplyWorkflow # noqa: F403, F401
|
7
|
+
from .job import JobStatusTypeV1 # noqa: F401, F403
|
7
8
|
from .project import Project # noqa: F403, F401
|
8
9
|
from .task import Task # noqa: F403, F401
|
9
10
|
from .workflow import Workflow # noqa: F401, F403
|
@@ -10,9 +10,9 @@ from sqlmodel import select
|
|
10
10
|
|
11
11
|
from ....db import AsyncSession
|
12
12
|
from ....db import get_async_db
|
13
|
-
from ....models import ApplyWorkflow
|
14
|
-
from ....models import JobStatusTypeV1
|
15
|
-
from ....models import Project
|
13
|
+
from ....models.v1 import ApplyWorkflow
|
14
|
+
from ....models.v1 import JobStatusTypeV1
|
15
|
+
from ....models.v1 import Project
|
16
16
|
from ....runner.filenames import WORKFLOW_LOG_FILENAME
|
17
17
|
from ....schemas.v1 import ApplyWorkflowReadV1
|
18
18
|
from ....security import current_active_user
|
{fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v2/__init__.py
RENAMED
@@ -3,11 +3,11 @@
|
|
3
3
|
"""
|
4
4
|
from fastapi import APIRouter
|
5
5
|
|
6
|
-
from .apply import router as runner_router_v2
|
7
6
|
from .dataset import router as dataset_router_v2
|
8
7
|
from .images import router as images_routes_v2
|
9
8
|
from .job import router as job_router_v2
|
10
9
|
from .project import router as project_router_v2
|
10
|
+
from .submit import router as submit_job_router_v2
|
11
11
|
from .task import router as task_router_v2
|
12
12
|
from .task_collection import router as task_collection_router_v2
|
13
13
|
from .workflow import router as workflow_router_v2
|
@@ -19,7 +19,7 @@ router_api_v2.include_router(dataset_router_v2, tags=["V2 Dataset"])
|
|
19
19
|
router_api_v2.include_router(job_router_v2, tags=["V2 Job"])
|
20
20
|
router_api_v2.include_router(images_routes_v2, tags=["V2 Images"])
|
21
21
|
router_api_v2.include_router(project_router_v2, tags=["V2 Project"])
|
22
|
-
router_api_v2.include_router(
|
22
|
+
router_api_v2.include_router(submit_job_router_v2, tags=["V2 Submit Job"])
|
23
23
|
router_api_v2.include_router(task_router_v2, prefix="/task", tags=["V2 Task"])
|
24
24
|
router_api_v2.include_router(
|
25
25
|
task_collection_router_v2, prefix="/task", tags=["V2 Task Collection"]
|
{fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v2/images.py
RENAMED
@@ -17,7 +17,7 @@ from fractal_server.app.security import current_active_user
|
|
17
17
|
from fractal_server.app.security import User
|
18
18
|
from fractal_server.images import Filters
|
19
19
|
from fractal_server.images import SingleImage
|
20
|
-
from fractal_server.images.tools import
|
20
|
+
from fractal_server.images.tools import match_filter
|
21
21
|
|
22
22
|
router = APIRouter()
|
23
23
|
|
@@ -68,7 +68,6 @@ async def post_new_image(
|
|
68
68
|
dataset.images.append(new_image.dict())
|
69
69
|
flag_modified(dataset, "images")
|
70
70
|
|
71
|
-
await db.merge(dataset)
|
72
71
|
await db.commit()
|
73
72
|
|
74
73
|
return Response(status_code=status.HTTP_201_CREATED)
|
@@ -106,9 +105,7 @@ async def query_dataset_images(
|
|
106
105
|
images = [
|
107
106
|
image
|
108
107
|
for image in images
|
109
|
-
if
|
110
|
-
SingleImage(**image), Filters(**dataset.filters)
|
111
|
-
)
|
108
|
+
if match_filter(image, Filters(**dataset.filters))
|
112
109
|
]
|
113
110
|
|
114
111
|
attributes = {}
|
@@ -138,8 +135,8 @@ async def query_dataset_images(
|
|
138
135
|
images = [
|
139
136
|
image
|
140
137
|
for image in images
|
141
|
-
if
|
142
|
-
|
138
|
+
if match_filter(
|
139
|
+
image,
|
143
140
|
Filters(**query.filters.dict()),
|
144
141
|
)
|
145
142
|
]
|
@@ -159,7 +156,6 @@ async def query_dataset_images(
|
|
159
156
|
|
160
157
|
if total_count == 0:
|
161
158
|
page = 1
|
162
|
-
page_size = 0
|
163
159
|
else:
|
164
160
|
last_page = (total_count // page_size) + (total_count % page_size > 0)
|
165
161
|
if page > last_page:
|
@@ -206,7 +202,6 @@ async def delete_dataset_images(
|
|
206
202
|
dataset.images.remove(image_to_remove)
|
207
203
|
flag_modified(dataset, "images")
|
208
204
|
|
209
|
-
await db.merge(dataset)
|
210
205
|
await db.commit()
|
211
206
|
|
212
207
|
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
{fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/routes/api/v2/project.py
RENAMED
@@ -167,7 +167,6 @@ async def delete_project(
|
|
167
167
|
jobs = res.scalars().all()
|
168
168
|
for job in jobs:
|
169
169
|
job.workflow_id = None
|
170
|
-
await db.merge(job)
|
171
170
|
# Delete workflow
|
172
171
|
await db.delete(wf)
|
173
172
|
await db.commit()
|
@@ -184,7 +183,6 @@ async def delete_project(
|
|
184
183
|
jobs = res.scalars().all()
|
185
184
|
for job in jobs:
|
186
185
|
job.dataset_id = None
|
187
|
-
await db.merge(job)
|
188
186
|
# Delete dataset
|
189
187
|
await db.delete(ds)
|
190
188
|
await db.commit()
|
@@ -195,7 +193,6 @@ async def delete_project(
|
|
195
193
|
jobs = res.scalars().all()
|
196
194
|
for job in jobs:
|
197
195
|
job.project_id = None
|
198
|
-
await db.merge(job)
|
199
196
|
|
200
197
|
await db.commit()
|
201
198
|
|
@@ -46,6 +46,7 @@ from ._subprocess_run_as_user import _glob_as_user_strict
|
|
46
46
|
from ._subprocess_run_as_user import _path_exists_as_user
|
47
47
|
from ._subprocess_run_as_user import _run_command_as_user
|
48
48
|
from fractal_server import __VERSION__
|
49
|
+
from fractal_server.app.runner.components import _COMPONENT_KEY_
|
49
50
|
|
50
51
|
|
51
52
|
logger = set_logger(__name__)
|
@@ -544,7 +545,7 @@ class FractalSlurmExecutor(SlurmExecutor):
|
|
544
545
|
single_task_submission: bool = False,
|
545
546
|
args: Optional[Sequence[Any]] = None,
|
546
547
|
kwargs: Optional[dict] = None,
|
547
|
-
components: list[Any] = None,
|
548
|
+
components: Optional[list[Any]] = None,
|
548
549
|
) -> Future:
|
549
550
|
"""
|
550
551
|
Submit a multi-task job to the pool, where each task is handled via the
|
@@ -580,6 +581,10 @@ class FractalSlurmExecutor(SlurmExecutor):
|
|
580
581
|
|
581
582
|
# Define slurm-job-related files
|
582
583
|
if single_task_submission:
|
584
|
+
if components is not None:
|
585
|
+
raise ValueError(
|
586
|
+
f"{single_task_submission=} but components is not None"
|
587
|
+
)
|
583
588
|
job = SlurmJob(
|
584
589
|
slurm_file_prefix=slurm_file_prefix,
|
585
590
|
num_tasks_tot=1,
|
@@ -603,15 +608,23 @@ class FractalSlurmExecutor(SlurmExecutor):
|
|
603
608
|
num_tasks_tot=num_tasks_tot,
|
604
609
|
slurm_config=slurm_config,
|
605
610
|
)
|
606
|
-
|
607
|
-
|
608
|
-
|
609
|
-
|
610
|
-
|
611
|
-
|
612
|
-
|
613
|
-
|
614
|
-
|
611
|
+
|
612
|
+
_prefixes = []
|
613
|
+
for component in components:
|
614
|
+
if isinstance(component, dict):
|
615
|
+
# This is needed for V2
|
616
|
+
actual_component = component.get(_COMPONENT_KEY_, None)
|
617
|
+
else:
|
618
|
+
actual_component = component
|
619
|
+
_prefixes.append(
|
620
|
+
get_task_file_paths(
|
621
|
+
workflow_dir=task_files.workflow_dir,
|
622
|
+
workflow_dir_user=task_files.workflow_dir_user,
|
623
|
+
task_order=task_files.task_order,
|
624
|
+
component=actual_component,
|
625
|
+
).file_prefix
|
626
|
+
)
|
627
|
+
job.wftask_file_prefixes = tuple(_prefixes)
|
615
628
|
|
616
629
|
# Define I/O pickle file names/paths
|
617
630
|
job.input_pickle_files = tuple(
|
@@ -34,8 +34,8 @@ from ...schemas.v1 import JobStatusTypeV1
|
|
34
34
|
from ..exceptions import JobExecutionError
|
35
35
|
from ..exceptions import TaskExecutionError
|
36
36
|
from ..filenames import WORKFLOW_LOG_FILENAME
|
37
|
-
from
|
38
|
-
from
|
37
|
+
from ._local import process_workflow as local_process_workflow
|
38
|
+
from ._slurm import process_workflow as slurm_process_workflow
|
39
39
|
from .common import close_job_logger
|
40
40
|
from .common import validate_workflow_compatibility # noqa: F401
|
41
41
|
from .handle_failed_job import assemble_history_failed_job
|
{fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v1/_local/__init__.py
RENAMED
@@ -25,11 +25,11 @@ from typing import Optional
|
|
25
25
|
|
26
26
|
from ....models import Workflow # FIXME: this is v1 specific
|
27
27
|
from ...async_wrap import async_wrap
|
28
|
-
from ...executors.local.executor import FractalThreadPoolExecutor
|
29
28
|
from ...set_start_and_last_task_index import set_start_and_last_task_index
|
30
29
|
from .._common import execute_tasks # FIXME: this is v1 specific
|
31
30
|
from ..common import TaskParameters # FIXME: this is v1 specific
|
32
31
|
from ._submit_setup import _local_submit_setup
|
32
|
+
from .executor import FractalThreadPoolExecutor
|
33
33
|
|
34
34
|
|
35
35
|
def _process_workflow(
|
@@ -18,8 +18,8 @@ from typing import Iterable
|
|
18
18
|
from typing import Optional
|
19
19
|
from typing import Sequence
|
20
20
|
|
21
|
-
from
|
22
|
-
from
|
21
|
+
from ._local_config import get_default_local_backend_config
|
22
|
+
from ._local_config import LocalBackendConfig
|
23
23
|
|
24
24
|
|
25
25
|
class FractalThreadPoolExecutor(ThreadPoolExecutor):
|
@@ -31,7 +31,6 @@ from ._slurm import process_workflow as slurm_process_workflow
|
|
31
31
|
from .handle_failed_job import assemble_filters_failed_job
|
32
32
|
from .handle_failed_job import assemble_history_failed_job
|
33
33
|
from .handle_failed_job import assemble_images_failed_job
|
34
|
-
from .runner import execute_tasks_v2 # noqa
|
35
34
|
from fractal_server import __VERSION__
|
36
35
|
|
37
36
|
_backends = {}
|
{fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v2/_local/__init__.py
RENAMED
@@ -25,12 +25,10 @@ from typing import Optional
|
|
25
25
|
from ....models.v2 import DatasetV2
|
26
26
|
from ....models.v2 import WorkflowV2
|
27
27
|
from ...async_wrap import async_wrap
|
28
|
-
from ...executors.local.executor import FractalThreadPoolExecutor
|
29
28
|
from ...set_start_and_last_task_index import set_start_and_last_task_index
|
30
29
|
from ..runner import execute_tasks_v2
|
31
30
|
from ._submit_setup import _local_submit_setup
|
32
|
-
|
33
|
-
# from typing import Any
|
31
|
+
from .executor import FractalThreadPoolExecutor
|
34
32
|
|
35
33
|
|
36
34
|
def _process_workflow(
|
@@ -0,0 +1,100 @@
|
|
1
|
+
# Copyright 2022 (C) Friedrich Miescher Institute for Biomedical Research and
|
2
|
+
# University of Zurich
|
3
|
+
#
|
4
|
+
# Original authors:
|
5
|
+
# Tommaso Comparin <tommaso.comparin@exact-lab.it>
|
6
|
+
#
|
7
|
+
# This file is part of Fractal and was originally developed by eXact lab S.r.l.
|
8
|
+
# <exact-lab.it> under contract with Liberali Lab from the Friedrich Miescher
|
9
|
+
# Institute for Biomedical Research and Pelkmans Lab from the University of
|
10
|
+
# Zurich.
|
11
|
+
"""
|
12
|
+
Custom version of Python
|
13
|
+
[ThreadPoolExecutor](https://docs.python.org/3/library/concurrent.futures.html#concurrent.futures.ThreadPoolExecutor)).
|
14
|
+
"""
|
15
|
+
from concurrent.futures import ThreadPoolExecutor
|
16
|
+
from typing import Callable
|
17
|
+
from typing import Iterable
|
18
|
+
from typing import Optional
|
19
|
+
from typing import Sequence
|
20
|
+
|
21
|
+
from ._local_config import get_default_local_backend_config
|
22
|
+
from ._local_config import LocalBackendConfig
|
23
|
+
|
24
|
+
|
25
|
+
class FractalThreadPoolExecutor(ThreadPoolExecutor):
|
26
|
+
"""
|
27
|
+
Custom version of
|
28
|
+
[ThreadPoolExecutor](https://docs.python.org/3/library/concurrent.futures.html#concurrent.futures.ThreadPoolExecutor))
|
29
|
+
that overrides the `submit` and `map` methods
|
30
|
+
"""
|
31
|
+
|
32
|
+
def submit(
|
33
|
+
self,
|
34
|
+
*args,
|
35
|
+
local_backend_config: Optional[LocalBackendConfig] = None,
|
36
|
+
**kwargs,
|
37
|
+
):
|
38
|
+
"""
|
39
|
+
Compared to the `ThreadPoolExecutor` method, here we accept an addition
|
40
|
+
keyword argument (`local_backend_config`), which is then simply
|
41
|
+
ignored.
|
42
|
+
"""
|
43
|
+
return super().submit(*args, **kwargs)
|
44
|
+
|
45
|
+
def map(
|
46
|
+
self,
|
47
|
+
fn: Callable,
|
48
|
+
*iterables: Sequence[Iterable],
|
49
|
+
local_backend_config: Optional[LocalBackendConfig] = None,
|
50
|
+
):
|
51
|
+
"""
|
52
|
+
Custom version of the `Executor.map` method
|
53
|
+
|
54
|
+
The main change with the respect to the original `map` method is that
|
55
|
+
the list of tasks to be executed is split into chunks, and then
|
56
|
+
`super().map` is called (sequentially) on each chunk. The goal of this
|
57
|
+
change is to limit parallelism, e.g. due to limited computational
|
58
|
+
resources.
|
59
|
+
|
60
|
+
Other changes from the `concurrent.futures` `map` method:
|
61
|
+
|
62
|
+
1. Removed `timeout` argument;
|
63
|
+
2. Removed `chunksize`;
|
64
|
+
3. All iterators (both inputs and output ones) are transformed into
|
65
|
+
lists.
|
66
|
+
|
67
|
+
Args:
|
68
|
+
fn: A callable function.
|
69
|
+
iterables: The argument iterables (one iterable per argument of
|
70
|
+
`fn`).
|
71
|
+
local_backend_config: The backend configuration, needed to extract
|
72
|
+
`parallel_tasks_per_job`.
|
73
|
+
"""
|
74
|
+
|
75
|
+
# Preliminary check
|
76
|
+
iterable_lengths = [len(it) for it in iterables]
|
77
|
+
if not len(set(iterable_lengths)) == 1:
|
78
|
+
raise ValueError("Iterables have different lengths.")
|
79
|
+
|
80
|
+
# Set total number of arguments
|
81
|
+
n_elements = len(iterables[0])
|
82
|
+
|
83
|
+
# Set parallel_tasks_per_job
|
84
|
+
if local_backend_config is None:
|
85
|
+
local_backend_config = get_default_local_backend_config()
|
86
|
+
parallel_tasks_per_job = local_backend_config.parallel_tasks_per_job
|
87
|
+
if parallel_tasks_per_job is None:
|
88
|
+
parallel_tasks_per_job = n_elements
|
89
|
+
|
90
|
+
# Execute tasks, in chunks of size parallel_tasks_per_job
|
91
|
+
results = []
|
92
|
+
for ind_chunk in range(0, n_elements, parallel_tasks_per_job):
|
93
|
+
chunk_iterables = [
|
94
|
+
it[ind_chunk : ind_chunk + parallel_tasks_per_job] # noqa
|
95
|
+
for it in iterables
|
96
|
+
]
|
97
|
+
map_iter = super().map(fn, *chunk_iterables)
|
98
|
+
results.extend(list(map_iter))
|
99
|
+
|
100
|
+
return iter(results)
|
@@ -0,0 +1,139 @@
|
|
1
|
+
# Copyright 2022 (C) Friedrich Miescher Institute for Biomedical Research and
|
2
|
+
# University of Zurich
|
3
|
+
#
|
4
|
+
# Original authors:
|
5
|
+
# Jacopo Nespolo <jacopo.nespolo@exact-lab.it>
|
6
|
+
# Tommaso Comparin <tommaso.comparin@exact-lab.it>
|
7
|
+
# Marco Franzon <marco.franzon@exact-lab.it>
|
8
|
+
#
|
9
|
+
# This file is part of Fractal and was originally developed by eXact lab S.r.l.
|
10
|
+
# <exact-lab.it> under contract with Liberali Lab from the Friedrich Miescher
|
11
|
+
# Institute for Biomedical Research and Pelkmans Lab from the University of
|
12
|
+
# Zurich.
|
13
|
+
"""
|
14
|
+
Slurm Bakend
|
15
|
+
|
16
|
+
This backend runs fractal workflows in a SLURM cluster using Clusterfutures
|
17
|
+
Executor objects.
|
18
|
+
"""
|
19
|
+
from pathlib import Path
|
20
|
+
from typing import Any
|
21
|
+
from typing import Optional
|
22
|
+
from typing import Union
|
23
|
+
|
24
|
+
from ....models.v2 import DatasetV2
|
25
|
+
from ....models.v2 import WorkflowV2
|
26
|
+
from ...async_wrap import async_wrap
|
27
|
+
from ...executors.slurm.executor import FractalSlurmExecutor
|
28
|
+
from ...set_start_and_last_task_index import set_start_and_last_task_index
|
29
|
+
from ..runner import execute_tasks_v2
|
30
|
+
from ._submit_setup import _slurm_submit_setup
|
31
|
+
|
32
|
+
# from .._common import execute_tasks
|
33
|
+
# from ..common import async_wrap
|
34
|
+
# from ..common import set_start_and_last_task_index
|
35
|
+
# from ..common import TaskParameters
|
36
|
+
|
37
|
+
|
38
|
+
def _process_workflow(
|
39
|
+
*,
|
40
|
+
workflow: WorkflowV2,
|
41
|
+
dataset: DatasetV2,
|
42
|
+
logger_name: str,
|
43
|
+
workflow_dir: Path,
|
44
|
+
workflow_dir_user: Path,
|
45
|
+
first_task_index: int,
|
46
|
+
last_task_index: int,
|
47
|
+
slurm_user: Optional[str] = None,
|
48
|
+
slurm_account: Optional[str] = None,
|
49
|
+
user_cache_dir: str,
|
50
|
+
worker_init: Optional[Union[str, list[str]]] = None,
|
51
|
+
) -> dict[str, Any]:
|
52
|
+
"""
|
53
|
+
Internal processing routine for the SLURM backend
|
54
|
+
|
55
|
+
This function initialises the a FractalSlurmExecutor, setting logging,
|
56
|
+
workflow working dir and user to impersonate. It then schedules the
|
57
|
+
workflow tasks and returns the new dataset attributes
|
58
|
+
|
59
|
+
Cf. [process_workflow][fractal_server.app.runner._local.process_workflow]
|
60
|
+
|
61
|
+
Returns:
|
62
|
+
new_dataset_attributes:
|
63
|
+
"""
|
64
|
+
|
65
|
+
if not slurm_user:
|
66
|
+
raise RuntimeError(
|
67
|
+
"slurm_user argument is required, for slurm backend"
|
68
|
+
)
|
69
|
+
|
70
|
+
if isinstance(worker_init, str):
|
71
|
+
worker_init = worker_init.split("\n")
|
72
|
+
|
73
|
+
with FractalSlurmExecutor(
|
74
|
+
debug=True,
|
75
|
+
keep_logs=True,
|
76
|
+
slurm_user=slurm_user,
|
77
|
+
user_cache_dir=user_cache_dir,
|
78
|
+
working_dir=workflow_dir,
|
79
|
+
working_dir_user=workflow_dir_user,
|
80
|
+
common_script_lines=worker_init,
|
81
|
+
slurm_account=slurm_account,
|
82
|
+
) as executor:
|
83
|
+
new_dataset_attributes = execute_tasks_v2(
|
84
|
+
wf_task_list=workflow.task_list[
|
85
|
+
first_task_index : (last_task_index + 1) # noqa
|
86
|
+
], # noqa
|
87
|
+
dataset=dataset,
|
88
|
+
executor=executor,
|
89
|
+
workflow_dir=workflow_dir,
|
90
|
+
workflow_dir_user=workflow_dir_user,
|
91
|
+
logger_name=logger_name,
|
92
|
+
submit_setup_call=_slurm_submit_setup,
|
93
|
+
)
|
94
|
+
return new_dataset_attributes
|
95
|
+
|
96
|
+
|
97
|
+
async def process_workflow(
|
98
|
+
*,
|
99
|
+
workflow: WorkflowV2,
|
100
|
+
dataset: DatasetV2,
|
101
|
+
workflow_dir: Path,
|
102
|
+
workflow_dir_user: Optional[Path] = None,
|
103
|
+
first_task_index: Optional[int] = None,
|
104
|
+
last_task_index: Optional[int] = None,
|
105
|
+
logger_name: str,
|
106
|
+
# Slurm-specific
|
107
|
+
user_cache_dir: Optional[str] = None,
|
108
|
+
slurm_user: Optional[str] = None,
|
109
|
+
slurm_account: Optional[str] = None,
|
110
|
+
worker_init: Optional[str] = None,
|
111
|
+
) -> dict:
|
112
|
+
"""
|
113
|
+
Process workflow (SLURM backend public interface)
|
114
|
+
|
115
|
+
Cf. [process_workflow][fractal_server.app.runner._local.process_workflow]
|
116
|
+
"""
|
117
|
+
|
118
|
+
# Set values of first_task_index and last_task_index
|
119
|
+
num_tasks = len(workflow.task_list)
|
120
|
+
first_task_index, last_task_index = set_start_and_last_task_index(
|
121
|
+
num_tasks,
|
122
|
+
first_task_index=first_task_index,
|
123
|
+
last_task_index=last_task_index,
|
124
|
+
)
|
125
|
+
|
126
|
+
new_dataset_attributes = await async_wrap(_process_workflow)(
|
127
|
+
workflow=workflow,
|
128
|
+
dataset=dataset,
|
129
|
+
logger_name=logger_name,
|
130
|
+
workflow_dir=workflow_dir,
|
131
|
+
workflow_dir_user=workflow_dir_user,
|
132
|
+
first_task_index=first_task_index,
|
133
|
+
last_task_index=last_task_index,
|
134
|
+
user_cache_dir=user_cache_dir,
|
135
|
+
slurm_user=slurm_user,
|
136
|
+
slurm_account=slurm_account,
|
137
|
+
worker_init=worker_init,
|
138
|
+
)
|
139
|
+
return new_dataset_attributes
|
@@ -0,0 +1,22 @@
|
|
1
|
+
from typing import TypeVar
|
2
|
+
|
3
|
+
from ....images import SingleImage
|
4
|
+
from .task_interface import InitArgsModel
|
5
|
+
|
6
|
+
T = TypeVar("T", SingleImage, InitArgsModel)
|
7
|
+
|
8
|
+
|
9
|
+
def deduplicate_list(
|
10
|
+
this_list: list[T],
|
11
|
+
) -> list[T]:
|
12
|
+
"""
|
13
|
+
Custom replacement for `set(this_list)`, when items are non-hashable.
|
14
|
+
"""
|
15
|
+
new_list_dict = []
|
16
|
+
new_list_objs = []
|
17
|
+
for this_obj in this_list:
|
18
|
+
this_dict = this_obj.dict()
|
19
|
+
if this_dict not in new_list_dict:
|
20
|
+
new_list_dict.append(this_dict)
|
21
|
+
new_list_objs.append(this_obj)
|
22
|
+
return new_list_objs
|
{fractal_server-2.0.0a0 → fractal_server-2.0.0a2}/fractal_server/app/runner/v2/merge_outputs.py
RENAMED
@@ -2,7 +2,6 @@ from copy import copy
|
|
2
2
|
|
3
3
|
from fractal_server.app.runner.v2.deduplicate_list import deduplicate_list
|
4
4
|
from fractal_server.app.runner.v2.task_interface import TaskOutput
|
5
|
-
from fractal_server.images import SingleImage
|
6
5
|
|
7
6
|
|
8
7
|
def merge_outputs(task_outputs: list[TaskOutput]) -> TaskOutput:
|
@@ -24,9 +23,7 @@ def merge_outputs(task_outputs: list[TaskOutput]) -> TaskOutput:
|
|
24
23
|
raise ValueError(f"{current_new_filters=} but {last_new_filters=}")
|
25
24
|
last_new_filters = copy(current_new_filters)
|
26
25
|
|
27
|
-
final_image_list_updates = deduplicate_list(
|
28
|
-
final_image_list_updates, PydanticModel=SingleImage
|
29
|
-
)
|
26
|
+
final_image_list_updates = deduplicate_list(final_image_list_updates)
|
30
27
|
|
31
28
|
additional_args = {}
|
32
29
|
if last_new_filters is not None:
|
@@ -8,7 +8,7 @@ from typing import Optional
|
|
8
8
|
|
9
9
|
from ....images import Filters
|
10
10
|
from ....images import SingleImage
|
11
|
-
from ....images.tools import
|
11
|
+
from ....images.tools import filter_image_list
|
12
12
|
from ....images.tools import find_image_by_path
|
13
13
|
from ....images.tools import match_filter
|
14
14
|
from ..filenames import FILTERS_FILENAME
|
@@ -52,16 +52,15 @@ def execute_tasks_v2(
|
|
52
52
|
# PRE TASK EXECUTION
|
53
53
|
|
54
54
|
# Get filtered images
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
55
|
+
pre_filters = dict(
|
56
|
+
types=copy(tmp_filters["types"]),
|
57
|
+
attributes=copy(tmp_filters["attributes"]),
|
58
|
+
)
|
59
|
+
pre_filters["types"].update(wftask.input_filters["types"])
|
60
|
+
pre_filters["attributes"].update(wftask.input_filters["attributes"])
|
61
|
+
filtered_images = filter_image_list(
|
60
62
|
images=tmp_images,
|
61
|
-
filters=Filters(
|
62
|
-
types=pre_type_filters,
|
63
|
-
attributes=pre_attribute_filters,
|
64
|
-
),
|
63
|
+
filters=Filters(**pre_filters),
|
65
64
|
)
|
66
65
|
# Verify that filtered images comply with task input_types
|
67
66
|
for image in filtered_images:
|
@@ -183,22 +182,26 @@ def execute_tasks_v2(
|
|
183
182
|
updated_attributes.update(image["attributes"])
|
184
183
|
updated_types.update(image["types"])
|
185
184
|
updated_types.update(task.output_types)
|
186
|
-
new_image =
|
185
|
+
new_image = dict(
|
187
186
|
path=image["path"],
|
188
187
|
origin=image["origin"],
|
189
188
|
attributes=updated_attributes,
|
190
189
|
types=updated_types,
|
191
190
|
)
|
191
|
+
# Validate new image
|
192
|
+
SingleImage(**new_image)
|
192
193
|
# Add image into the dataset image list
|
193
|
-
tmp_images.append(new_image
|
194
|
+
tmp_images.append(new_image)
|
194
195
|
|
195
196
|
# Remove images from tmp_images
|
196
|
-
for
|
197
|
+
for image_path in current_task_output.image_list_removals:
|
197
198
|
image_search = find_image_by_path(
|
198
|
-
images=tmp_images, path=
|
199
|
+
images=tmp_images, path=image_path
|
199
200
|
)
|
200
|
-
if image_search
|
201
|
-
raise
|
201
|
+
if image_search is None:
|
202
|
+
raise ValueError(
|
203
|
+
f"Cannot remove missing image with path {image_path=}"
|
204
|
+
)
|
202
205
|
else:
|
203
206
|
tmp_images.pop(image_search["index"])
|
204
207
|
|