parsl 2024.3.18__py3-none-any.whl → 2025.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- parsl/__init__.py +9 -10
- parsl/addresses.py +26 -6
- parsl/app/app.py +7 -8
- parsl/app/bash.py +15 -8
- parsl/app/errors.py +10 -13
- parsl/app/futures.py +8 -10
- parsl/app/python.py +2 -1
- parsl/benchmark/perf.py +2 -1
- parsl/concurrent/__init__.py +2 -2
- parsl/config.py +53 -10
- parsl/configs/ASPIRE1.py +6 -5
- parsl/configs/Azure.py +9 -8
- parsl/configs/bridges.py +6 -4
- parsl/configs/cc_in2p3.py +3 -3
- parsl/configs/ec2.py +3 -1
- parsl/configs/expanse.py +4 -3
- parsl/configs/frontera.py +3 -4
- parsl/configs/htex_local.py +3 -4
- parsl/configs/illinoiscluster.py +3 -1
- parsl/configs/improv.py +34 -0
- parsl/configs/kubernetes.py +4 -3
- parsl/configs/local_threads.py +5 -1
- parsl/configs/midway.py +5 -3
- parsl/configs/osg.py +4 -2
- parsl/configs/polaris.py +4 -2
- parsl/configs/stampede2.py +6 -5
- parsl/configs/summit.py +3 -3
- parsl/configs/toss3_llnl.py +4 -3
- parsl/configs/vineex_local.py +6 -4
- parsl/configs/wqex_local.py +5 -3
- parsl/curvezmq.py +4 -0
- parsl/data_provider/data_manager.py +4 -3
- parsl/data_provider/file_noop.py +1 -2
- parsl/data_provider/files.py +3 -3
- parsl/data_provider/ftp.py +1 -3
- parsl/data_provider/globus.py +7 -6
- parsl/data_provider/http.py +2 -2
- parsl/data_provider/rsync.py +1 -1
- parsl/data_provider/staging.py +2 -2
- parsl/data_provider/zip.py +135 -0
- parsl/dataflow/dependency_resolvers.py +115 -0
- parsl/dataflow/dflow.py +259 -223
- parsl/dataflow/errors.py +3 -5
- parsl/dataflow/futures.py +27 -14
- parsl/dataflow/memoization.py +5 -5
- parsl/dataflow/rundirs.py +5 -6
- parsl/dataflow/taskrecord.py +4 -5
- parsl/executors/__init__.py +4 -2
- parsl/executors/base.py +45 -15
- parsl/executors/errors.py +13 -0
- parsl/executors/execute_task.py +37 -0
- parsl/executors/flux/execute_parsl_task.py +3 -3
- parsl/executors/flux/executor.py +18 -19
- parsl/executors/flux/flux_instance_manager.py +26 -27
- parsl/executors/high_throughput/errors.py +43 -3
- parsl/executors/high_throughput/executor.py +307 -285
- parsl/executors/high_throughput/interchange.py +137 -168
- parsl/executors/high_throughput/manager_record.py +4 -0
- parsl/executors/high_throughput/manager_selector.py +55 -0
- parsl/executors/high_throughput/monitoring_info.py +2 -1
- parsl/executors/high_throughput/mpi_executor.py +113 -0
- parsl/executors/high_throughput/mpi_prefix_composer.py +10 -11
- parsl/executors/high_throughput/mpi_resource_management.py +6 -17
- parsl/executors/high_throughput/probe.py +9 -7
- parsl/executors/high_throughput/process_worker_pool.py +77 -75
- parsl/executors/high_throughput/zmq_pipes.py +81 -23
- parsl/executors/radical/executor.py +130 -79
- parsl/executors/radical/rpex_resources.py +17 -15
- parsl/executors/radical/rpex_worker.py +4 -3
- parsl/executors/status_handling.py +157 -51
- parsl/executors/taskvine/__init__.py +1 -1
- parsl/executors/taskvine/errors.py +1 -1
- parsl/executors/taskvine/exec_parsl_function.py +2 -2
- parsl/executors/taskvine/executor.py +38 -55
- parsl/executors/taskvine/factory.py +1 -1
- parsl/executors/taskvine/factory_config.py +1 -1
- parsl/executors/taskvine/manager.py +17 -13
- parsl/executors/taskvine/manager_config.py +7 -2
- parsl/executors/threads.py +6 -6
- parsl/executors/workqueue/errors.py +1 -1
- parsl/executors/workqueue/exec_parsl_function.py +6 -5
- parsl/executors/workqueue/executor.py +64 -63
- parsl/executors/workqueue/parsl_coprocess.py +1 -1
- parsl/jobs/error_handlers.py +2 -2
- parsl/jobs/job_status_poller.py +28 -112
- parsl/jobs/states.py +7 -2
- parsl/jobs/strategy.py +43 -31
- parsl/launchers/__init__.py +12 -3
- parsl/launchers/errors.py +1 -1
- parsl/launchers/launchers.py +0 -6
- parsl/log_utils.py +1 -2
- parsl/monitoring/db_manager.py +55 -93
- parsl/monitoring/errors.py +6 -0
- parsl/monitoring/monitoring.py +85 -311
- parsl/monitoring/queries/pandas.py +1 -2
- parsl/monitoring/radios/base.py +13 -0
- parsl/monitoring/radios/filesystem.py +52 -0
- parsl/monitoring/radios/htex.py +57 -0
- parsl/monitoring/radios/multiprocessing.py +17 -0
- parsl/monitoring/radios/udp.py +56 -0
- parsl/monitoring/radios/zmq.py +17 -0
- parsl/monitoring/remote.py +33 -37
- parsl/monitoring/router.py +212 -0
- parsl/monitoring/types.py +5 -6
- parsl/monitoring/visualization/app.py +4 -2
- parsl/monitoring/visualization/models.py +0 -1
- parsl/monitoring/visualization/plots/default/workflow_plots.py +8 -4
- parsl/monitoring/visualization/plots/default/workflow_resource_plots.py +1 -0
- parsl/monitoring/visualization/utils.py +0 -1
- parsl/monitoring/visualization/views.py +16 -9
- parsl/multiprocessing.py +0 -1
- parsl/process_loggers.py +1 -2
- parsl/providers/__init__.py +8 -17
- parsl/providers/aws/aws.py +2 -3
- parsl/providers/azure/azure.py +4 -5
- parsl/providers/base.py +2 -18
- parsl/providers/cluster_provider.py +3 -9
- parsl/providers/condor/condor.py +7 -17
- parsl/providers/errors.py +2 -2
- parsl/providers/googlecloud/googlecloud.py +2 -1
- parsl/providers/grid_engine/grid_engine.py +5 -14
- parsl/providers/kubernetes/kube.py +80 -40
- parsl/providers/local/local.py +13 -26
- parsl/providers/lsf/lsf.py +5 -23
- parsl/providers/pbspro/pbspro.py +5 -17
- parsl/providers/slurm/slurm.py +81 -39
- parsl/providers/torque/torque.py +3 -14
- parsl/serialize/__init__.py +8 -3
- parsl/serialize/base.py +1 -2
- parsl/serialize/concretes.py +5 -4
- parsl/serialize/facade.py +3 -3
- parsl/serialize/proxystore.py +3 -2
- parsl/tests/__init__.py +1 -1
- parsl/tests/configs/azure_single_node.py +4 -5
- parsl/tests/configs/bridges.py +3 -2
- parsl/tests/configs/cc_in2p3.py +1 -3
- parsl/tests/configs/comet.py +2 -1
- parsl/tests/configs/ec2_single_node.py +1 -2
- parsl/tests/configs/ec2_spot.py +1 -2
- parsl/tests/configs/flux_local.py +11 -0
- parsl/tests/configs/frontera.py +2 -3
- parsl/tests/configs/htex_local.py +3 -5
- parsl/tests/configs/htex_local_alternate.py +11 -15
- parsl/tests/configs/htex_local_intask_staging.py +5 -9
- parsl/tests/configs/htex_local_rsync_staging.py +4 -8
- parsl/tests/configs/local_radical.py +1 -3
- parsl/tests/configs/local_radical_mpi.py +2 -2
- parsl/tests/configs/local_threads_checkpoint_periodic.py +8 -10
- parsl/tests/configs/local_threads_monitoring.py +0 -1
- parsl/tests/configs/midway.py +2 -2
- parsl/tests/configs/nscc_singapore.py +3 -3
- parsl/tests/configs/osg_htex.py +1 -1
- parsl/tests/configs/petrelkube.py +3 -2
- parsl/tests/configs/slurm_local.py +24 -0
- parsl/tests/configs/summit.py +1 -0
- parsl/tests/configs/taskvine_ex.py +4 -7
- parsl/tests/configs/user_opts.py +0 -7
- parsl/tests/configs/workqueue_ex.py +4 -6
- parsl/tests/conftest.py +27 -13
- parsl/tests/integration/test_stress/test_python_simple.py +3 -4
- parsl/tests/integration/test_stress/test_python_threads.py +3 -5
- parsl/tests/manual_tests/htex_local.py +4 -6
- parsl/tests/manual_tests/test_basic.py +1 -0
- parsl/tests/manual_tests/test_log_filter.py +3 -1
- parsl/tests/manual_tests/test_memory_limits.py +6 -8
- parsl/tests/manual_tests/test_regression_220.py +2 -1
- parsl/tests/manual_tests/test_udp_simple.py +4 -4
- parsl/tests/manual_tests/test_worker_count.py +3 -2
- parsl/tests/scaling_tests/htex_local.py +2 -4
- parsl/tests/scaling_tests/test_scale.py +0 -9
- parsl/tests/scaling_tests/vineex_condor.py +1 -2
- parsl/tests/scaling_tests/vineex_local.py +1 -2
- parsl/tests/site_tests/site_config_selector.py +1 -6
- parsl/tests/site_tests/test_provider.py +4 -2
- parsl/tests/site_tests/test_site.py +2 -0
- parsl/tests/sites/test_affinity.py +7 -7
- parsl/tests/sites/test_dynamic_executor.py +3 -4
- parsl/tests/sites/test_ec2.py +3 -2
- parsl/tests/sites/test_worker_info.py +4 -5
- parsl/tests/test_aalst_patterns.py +0 -1
- parsl/tests/test_bash_apps/test_apptimeout.py +2 -2
- parsl/tests/test_bash_apps/test_basic.py +10 -4
- parsl/tests/test_bash_apps/test_error_codes.py +5 -7
- parsl/tests/test_bash_apps/test_inputs_default.py +25 -0
- parsl/tests/test_bash_apps/test_kwarg_storage.py +1 -1
- parsl/tests/test_bash_apps/test_memoize.py +2 -8
- parsl/tests/test_bash_apps/test_memoize_ignore_args.py +9 -14
- parsl/tests/test_bash_apps/test_memoize_ignore_args_regr.py +9 -14
- parsl/tests/test_bash_apps/test_multiline.py +1 -1
- parsl/tests/test_bash_apps/test_pipeline.py +1 -1
- parsl/tests/test_bash_apps/test_std_uri.py +123 -0
- parsl/tests/test_bash_apps/test_stdout.py +33 -8
- parsl/tests/test_callables.py +2 -2
- parsl/tests/test_checkpointing/test_periodic.py +21 -39
- parsl/tests/test_checkpointing/test_python_checkpoint_1.py +1 -0
- parsl/tests/test_checkpointing/test_python_checkpoint_2.py +2 -2
- parsl/tests/test_checkpointing/test_python_checkpoint_3.py +0 -1
- parsl/tests/test_checkpointing/test_regression_239.py +1 -1
- parsl/tests/test_checkpointing/test_task_exit.py +2 -3
- parsl/tests/test_docs/test_from_slides.py +5 -2
- parsl/tests/test_docs/test_kwargs.py +4 -1
- parsl/tests/test_docs/test_tutorial_1.py +1 -2
- parsl/tests/test_docs/test_workflow1.py +2 -2
- parsl/tests/test_docs/test_workflow2.py +0 -1
- parsl/tests/test_error_handling/test_rand_fail.py +2 -2
- parsl/tests/test_error_handling/test_resource_spec.py +10 -12
- parsl/tests/test_error_handling/test_retries.py +6 -16
- parsl/tests/test_error_handling/test_retry_handler.py +1 -0
- parsl/tests/test_error_handling/test_retry_handler_failure.py +2 -1
- parsl/tests/test_error_handling/test_serialization_fail.py +1 -1
- parsl/tests/test_error_handling/test_wrap_with_logs.py +1 -0
- parsl/tests/test_execute_task.py +29 -0
- parsl/tests/test_flux.py +1 -1
- parsl/tests/test_htex/test_basic.py +2 -3
- parsl/tests/test_htex/test_block_manager_selector_unit.py +20 -0
- parsl/tests/test_htex/test_command_client_timeout.py +66 -0
- parsl/tests/test_htex/test_connected_blocks.py +3 -2
- parsl/tests/test_htex/test_cpu_affinity_explicit.py +6 -10
- parsl/tests/test_htex/test_disconnected_blocks.py +6 -5
- parsl/tests/test_htex/test_disconnected_blocks_failing_provider.py +71 -0
- parsl/tests/test_htex/test_drain.py +11 -10
- parsl/tests/test_htex/test_htex.py +51 -25
- parsl/tests/test_htex/test_manager_failure.py +0 -1
- parsl/tests/test_htex/test_manager_selector_by_block.py +51 -0
- parsl/tests/test_htex/test_managers_command.py +36 -0
- parsl/tests/test_htex/test_missing_worker.py +2 -12
- parsl/tests/test_htex/test_multiple_disconnected_blocks.py +9 -9
- parsl/tests/test_htex/test_resource_spec_validation.py +45 -0
- parsl/tests/test_htex/test_zmq_binding.py +29 -8
- parsl/tests/test_monitoring/test_app_names.py +5 -5
- parsl/tests/test_monitoring/test_basic.py +73 -25
- parsl/tests/test_monitoring/test_db_locks.py +6 -4
- parsl/tests/test_monitoring/test_fuzz_zmq.py +19 -8
- parsl/tests/test_monitoring/test_htex_init_blocks_vs_monitoring.py +80 -0
- parsl/tests/test_monitoring/test_incomplete_futures.py +5 -4
- parsl/tests/test_monitoring/test_memoization_representation.py +4 -2
- parsl/tests/test_monitoring/test_stdouterr.py +134 -0
- parsl/tests/test_monitoring/test_viz_colouring.py +1 -0
- parsl/tests/test_mpi_apps/test_bad_mpi_config.py +33 -26
- parsl/tests/test_mpi_apps/test_mpi_mode_enabled.py +28 -11
- parsl/tests/test_mpi_apps/test_mpi_prefix.py +4 -4
- parsl/tests/test_mpi_apps/test_mpi_scheduler.py +7 -2
- parsl/tests/test_mpi_apps/test_mpiex.py +64 -0
- parsl/tests/test_mpi_apps/test_resource_spec.py +42 -49
- parsl/tests/test_providers/test_kubernetes_provider.py +102 -0
- parsl/tests/test_providers/test_local_provider.py +3 -132
- parsl/tests/test_providers/test_pbspro_template.py +2 -3
- parsl/tests/test_providers/test_slurm_template.py +2 -3
- parsl/tests/test_providers/test_submiterror_deprecation.py +2 -1
- parsl/tests/test_python_apps/test_context_manager.py +128 -0
- parsl/tests/test_python_apps/test_dep_standard_futures.py +2 -1
- parsl/tests/test_python_apps/test_dependencies_deep.py +59 -0
- parsl/tests/test_python_apps/test_fail.py +0 -25
- parsl/tests/test_python_apps/test_futures.py +2 -1
- parsl/tests/test_python_apps/test_inputs_default.py +22 -0
- parsl/tests/test_python_apps/test_join.py +0 -1
- parsl/tests/test_python_apps/test_lifted.py +11 -7
- parsl/tests/test_python_apps/test_memoize_bad_id_for_memo.py +1 -0
- parsl/tests/test_python_apps/test_outputs.py +1 -1
- parsl/tests/test_python_apps/test_pluggable_future_resolution.py +161 -0
- parsl/tests/test_radical/test_mpi_funcs.py +1 -2
- parsl/tests/test_regression/test_1480.py +2 -1
- parsl/tests/test_regression/test_1653.py +2 -1
- parsl/tests/test_regression/test_226.py +1 -0
- parsl/tests/test_regression/test_2652.py +1 -0
- parsl/tests/test_regression/test_69a.py +0 -1
- parsl/tests/test_regression/test_854.py +4 -2
- parsl/tests/test_regression/test_97_parallelism_0.py +1 -2
- parsl/tests/test_regression/test_98.py +0 -1
- parsl/tests/test_scaling/test_block_error_handler.py +9 -4
- parsl/tests/test_scaling/test_regression_1621.py +11 -15
- parsl/tests/test_scaling/test_regression_3568_scaledown_vs_MISSING.py +84 -0
- parsl/tests/test_scaling/test_regression_3696_oscillation.py +103 -0
- parsl/tests/test_scaling/test_scale_down.py +2 -5
- parsl/tests/test_scaling/test_scale_down_htex_auto_scale.py +5 -8
- parsl/tests/test_scaling/test_scale_down_htex_unregistered.py +71 -0
- parsl/tests/test_scaling/test_shutdown_scalein.py +73 -0
- parsl/tests/test_scaling/test_worker_interchange_bad_messages_3262.py +90 -0
- parsl/tests/test_serialization/test_2555_caching_deserializer.py +1 -1
- parsl/tests/test_serialization/test_3495_deserialize_managerlost.py +47 -0
- parsl/tests/test_serialization/test_basic.py +2 -1
- parsl/tests/test_serialization/test_htex_code_cache.py +3 -4
- parsl/tests/test_serialization/test_pack_resource_spec.py +2 -1
- parsl/tests/test_serialization/test_proxystore_configured.py +10 -6
- parsl/tests/test_serialization/test_proxystore_impl.py +5 -3
- parsl/tests/test_shutdown/test_kill_monitoring.py +64 -0
- parsl/tests/test_staging/staging_provider.py +2 -2
- parsl/tests/test_staging/test_1316.py +3 -4
- parsl/tests/test_staging/test_docs_1.py +2 -1
- parsl/tests/test_staging/test_docs_2.py +2 -1
- parsl/tests/test_staging/test_elaborate_noop_file.py +2 -3
- parsl/tests/{test_data → test_staging}/test_file.py +6 -6
- parsl/tests/{test_data → test_staging}/test_output_chain_filenames.py +3 -0
- parsl/tests/test_staging/test_staging_ftp.py +1 -0
- parsl/tests/test_staging/test_staging_https.py +5 -2
- parsl/tests/test_staging/test_staging_stdout.py +64 -0
- parsl/tests/test_staging/test_zip_in.py +39 -0
- parsl/tests/test_staging/test_zip_out.py +110 -0
- parsl/tests/test_staging/test_zip_to_zip.py +41 -0
- parsl/tests/test_summary.py +2 -2
- parsl/tests/test_thread_parallelism.py +0 -1
- parsl/tests/test_threads/test_configs.py +1 -2
- parsl/tests/test_threads/test_lazy_errors.py +2 -2
- parsl/tests/test_utils/test_execute_wait.py +35 -0
- parsl/tests/test_utils/test_sanitize_dns.py +76 -0
- parsl/tests/unit/test_address.py +20 -0
- parsl/tests/unit/test_file.py +99 -0
- parsl/tests/unit/test_usage_tracking.py +66 -0
- parsl/usage_tracking/api.py +65 -0
- parsl/usage_tracking/levels.py +6 -0
- parsl/usage_tracking/usage.py +104 -62
- parsl/utils.py +137 -4
- parsl/version.py +1 -1
- {parsl-2024.3.18.data → parsl-2025.1.13.data}/scripts/exec_parsl_function.py +6 -5
- parsl-2025.1.13.data/scripts/interchange.py +649 -0
- {parsl-2024.3.18.data → parsl-2025.1.13.data}/scripts/process_worker_pool.py +77 -75
- parsl-2025.1.13.dist-info/METADATA +96 -0
- parsl-2025.1.13.dist-info/RECORD +462 -0
- {parsl-2024.3.18.dist-info → parsl-2025.1.13.dist-info}/WHEEL +1 -1
- parsl/channels/__init__.py +0 -7
- parsl/channels/base.py +0 -141
- parsl/channels/errors.py +0 -113
- parsl/channels/local/local.py +0 -164
- parsl/channels/oauth_ssh/oauth_ssh.py +0 -110
- parsl/channels/ssh/ssh.py +0 -276
- parsl/channels/ssh_il/__init__.py +0 -0
- parsl/channels/ssh_il/ssh_il.py +0 -74
- parsl/configs/ad_hoc.py +0 -35
- parsl/executors/radical/rpex_master.py +0 -42
- parsl/monitoring/radios.py +0 -175
- parsl/providers/ad_hoc/__init__.py +0 -0
- parsl/providers/ad_hoc/ad_hoc.py +0 -248
- parsl/providers/cobalt/__init__.py +0 -0
- parsl/providers/cobalt/cobalt.py +0 -236
- parsl/providers/cobalt/template.py +0 -17
- parsl/tests/configs/ad_hoc_cluster_htex.py +0 -35
- parsl/tests/configs/cooley_htex.py +0 -37
- parsl/tests/configs/htex_ad_hoc_cluster.py +0 -28
- parsl/tests/configs/local_adhoc.py +0 -18
- parsl/tests/configs/swan_htex.py +0 -43
- parsl/tests/configs/theta.py +0 -37
- parsl/tests/integration/test_channels/__init__.py +0 -0
- parsl/tests/integration/test_channels/test_channels.py +0 -17
- parsl/tests/integration/test_channels/test_local_channel.py +0 -42
- parsl/tests/integration/test_channels/test_scp_1.py +0 -45
- parsl/tests/integration/test_channels/test_ssh_1.py +0 -40
- parsl/tests/integration/test_channels/test_ssh_errors.py +0 -46
- parsl/tests/integration/test_channels/test_ssh_file_transport.py +0 -41
- parsl/tests/integration/test_channels/test_ssh_interactive.py +0 -24
- parsl/tests/manual_tests/test_ad_hoc_htex.py +0 -48
- parsl/tests/manual_tests/test_fan_in_out_htex_remote.py +0 -88
- parsl/tests/manual_tests/test_oauth_ssh.py +0 -13
- parsl/tests/sites/test_local_adhoc.py +0 -61
- parsl/tests/test_channels/__init__.py +0 -0
- parsl/tests/test_channels/test_large_output.py +0 -22
- parsl/tests/test_data/__init__.py +0 -0
- parsl/tests/test_mpi_apps/test_mpi_mode_disabled.py +0 -51
- parsl/tests/test_providers/test_cobalt_deprecation_warning.py +0 -16
- parsl-2024.3.18.dist-info/METADATA +0 -98
- parsl-2024.3.18.dist-info/RECORD +0 -449
- parsl/{channels/local → monitoring/radios}/__init__.py +0 -0
- parsl/{channels/oauth_ssh → tests/test_shutdown}/__init__.py +0 -0
- parsl/tests/{test_data → test_staging}/test_file_apps.py +0 -0
- parsl/tests/{test_data → test_staging}/test_file_staging.py +0 -0
- parsl/{channels/ssh → tests/unit}/__init__.py +0 -0
- {parsl-2024.3.18.data → parsl-2025.1.13.data}/scripts/parsl_coprocess.py +1 -1
- {parsl-2024.3.18.dist-info → parsl-2025.1.13.dist-info}/LICENSE +0 -0
- {parsl-2024.3.18.dist-info → parsl-2025.1.13.dist-info}/entry_points.txt +0 -0
- {parsl-2024.3.18.dist-info → parsl-2025.1.13.dist-info}/top_level.txt +0 -0
parsl/channels/ssh_il/ssh_il.py
DELETED
@@ -1,74 +0,0 @@
|
|
1
|
-
import getpass
|
2
|
-
import logging
|
3
|
-
|
4
|
-
import paramiko
|
5
|
-
from parsl.channels.ssh.ssh import SSHChannel
|
6
|
-
|
7
|
-
logger = logging.getLogger(__name__)
|
8
|
-
|
9
|
-
|
10
|
-
class SSHInteractiveLoginChannel(SSHChannel):
|
11
|
-
"""SSH persistent channel. This enables remote execution on sites
|
12
|
-
accessible via ssh. This channel supports interactive login and is appropriate when
|
13
|
-
keys are not set up.
|
14
|
-
"""
|
15
|
-
|
16
|
-
def __init__(self, hostname, username=None, password=None, script_dir=None, envs=None):
|
17
|
-
''' Initialize a persistent connection to the remote system.
|
18
|
-
We should know at this point whether ssh connectivity is possible
|
19
|
-
|
20
|
-
Args:
|
21
|
-
- hostname (String) : Hostname
|
22
|
-
|
23
|
-
KWargs:
|
24
|
-
- username (string) : Username on remote system
|
25
|
-
- password (string) : Password for remote system
|
26
|
-
- script_dir (string) : Full path to a script dir where
|
27
|
-
generated scripts could be sent to.
|
28
|
-
- envs (dict) : A dictionary of env variables to be set when executing commands
|
29
|
-
|
30
|
-
Raises:
|
31
|
-
'''
|
32
|
-
self.hostname = hostname
|
33
|
-
self.username = username
|
34
|
-
self.password = password
|
35
|
-
|
36
|
-
self.ssh_client = paramiko.SSHClient()
|
37
|
-
self.ssh_client.load_system_host_keys()
|
38
|
-
self.ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
39
|
-
|
40
|
-
self.script_dir = script_dir
|
41
|
-
|
42
|
-
self.envs = {}
|
43
|
-
if envs is not None:
|
44
|
-
self.envs = envs
|
45
|
-
|
46
|
-
try:
|
47
|
-
self.ssh_client.connect(
|
48
|
-
hostname, username=username, password=password, allow_agent=True
|
49
|
-
)
|
50
|
-
|
51
|
-
except Exception:
|
52
|
-
logger.debug("Caught the SSHException in SSHInteractive")
|
53
|
-
pass
|
54
|
-
'''
|
55
|
-
except paramiko.BadHostKeyException as e:
|
56
|
-
raise BadHostKeyException(e, self.hostname)
|
57
|
-
|
58
|
-
except paramiko.AuthenticationException as e:
|
59
|
-
raise AuthException(e, self.hostname)
|
60
|
-
|
61
|
-
except paramiko.SSHException as e:
|
62
|
-
logger.debug("Caught the SSHException in SSHInteractive")
|
63
|
-
pass
|
64
|
-
|
65
|
-
except Exception as e:
|
66
|
-
raise SSHException(e, self.hostname)
|
67
|
-
'''
|
68
|
-
|
69
|
-
transport = self.ssh_client.get_transport()
|
70
|
-
|
71
|
-
il_password = getpass.getpass('Enter {0} Logon password :'.format(hostname))
|
72
|
-
transport.auth_password(username, il_password)
|
73
|
-
|
74
|
-
self.sftp_client = paramiko.SFTPClient.from_transport(transport)
|
parsl/configs/ad_hoc.py
DELETED
@@ -1,35 +0,0 @@
|
|
1
|
-
from parsl.providers import AdHocProvider
|
2
|
-
from parsl.channels import SSHChannel
|
3
|
-
from parsl.executors import HighThroughputExecutor
|
4
|
-
from parsl.config import Config
|
5
|
-
from typing import Any, Dict
|
6
|
-
|
7
|
-
user_opts: Dict[str, Dict[str, Any]]
|
8
|
-
user_opts = {'adhoc':
|
9
|
-
{'username': 'YOUR_USERNAME',
|
10
|
-
'script_dir': 'YOUR_SCRIPT_DIR',
|
11
|
-
'remote_hostnames': ['REMOTE_HOST_URL_1', 'REMOTE_HOST_URL_2']
|
12
|
-
}
|
13
|
-
}
|
14
|
-
|
15
|
-
|
16
|
-
config = Config(
|
17
|
-
executors=[
|
18
|
-
HighThroughputExecutor(
|
19
|
-
label='remote_htex',
|
20
|
-
max_workers_per_node=2,
|
21
|
-
worker_logdir_root=user_opts['adhoc']['script_dir'],
|
22
|
-
provider=AdHocProvider(
|
23
|
-
# Command to be run before starting a worker, such as:
|
24
|
-
# 'module load Anaconda; source activate parsl_env'.
|
25
|
-
worker_init='',
|
26
|
-
channels=[SSHChannel(hostname=m,
|
27
|
-
username=user_opts['adhoc']['username'],
|
28
|
-
script_dir=user_opts['adhoc']['script_dir'],
|
29
|
-
) for m in user_opts['adhoc']['remote_hostnames']]
|
30
|
-
)
|
31
|
-
)
|
32
|
-
],
|
33
|
-
# AdHoc Clusters should not be setup with scaling strategy.
|
34
|
-
strategy='none',
|
35
|
-
)
|
@@ -1,42 +0,0 @@
|
|
1
|
-
#!/usr/bin/env python3
|
2
|
-
|
3
|
-
import sys
|
4
|
-
|
5
|
-
import radical.utils as ru
|
6
|
-
import radical.pilot as rp
|
7
|
-
|
8
|
-
|
9
|
-
# ------------------------------------------------------------------------------
|
10
|
-
#
|
11
|
-
if __name__ == '__main__':
|
12
|
-
|
13
|
-
# The purpose of this master is to (a) spawn a set or workers
|
14
|
-
# within the same allocation, (b) to distribute work items to
|
15
|
-
# those workers, and (c) to collect the responses again.
|
16
|
-
cfg_fname = str(sys.argv[1])
|
17
|
-
cfg = ru.Config(cfg=ru.read_json(cfg_fname))
|
18
|
-
cfg.rank = int(sys.argv[2])
|
19
|
-
|
20
|
-
worker_descr = cfg.worker_descr
|
21
|
-
n_workers = cfg.n_workers
|
22
|
-
gpus_per_node = cfg.gpus_per_node
|
23
|
-
cores_per_node = cfg.cores_per_node
|
24
|
-
nodes_per_worker = cfg.nodes_per_worker
|
25
|
-
|
26
|
-
# create a master class instance - this will establish communication
|
27
|
-
# to the pilot agent
|
28
|
-
master = rp.raptor.Master(cfg)
|
29
|
-
|
30
|
-
# insert `n` worker into the agent. The agent will schedule (place)
|
31
|
-
# those workers and execute them.
|
32
|
-
worker_descr['ranks'] = nodes_per_worker * cores_per_node
|
33
|
-
worker_descr['gpus_per_rank'] = nodes_per_worker * gpus_per_node
|
34
|
-
worker_ids = master.submit_workers(
|
35
|
-
[rp.TaskDescription(worker_descr) for _ in range(n_workers)])
|
36
|
-
|
37
|
-
# wait for all workers
|
38
|
-
master.wait_workers()
|
39
|
-
master.start()
|
40
|
-
master.join()
|
41
|
-
|
42
|
-
# ------------------------------------------------------------------------------
|
parsl/monitoring/radios.py
DELETED
@@ -1,175 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
import socket
|
3
|
-
import pickle
|
4
|
-
import uuid
|
5
|
-
import logging
|
6
|
-
|
7
|
-
from abc import ABCMeta, abstractmethod
|
8
|
-
|
9
|
-
from typing import Optional
|
10
|
-
|
11
|
-
from parsl.serialize import serialize
|
12
|
-
|
13
|
-
_db_manager_excepts: Optional[Exception]
|
14
|
-
|
15
|
-
|
16
|
-
logger = logging.getLogger(__name__)
|
17
|
-
|
18
|
-
|
19
|
-
class MonitoringRadio(metaclass=ABCMeta):
|
20
|
-
@abstractmethod
|
21
|
-
def send(self, message: object) -> None:
|
22
|
-
pass
|
23
|
-
|
24
|
-
|
25
|
-
class FilesystemRadio(MonitoringRadio):
|
26
|
-
"""A MonitoringRadio that sends messages over a shared filesystem.
|
27
|
-
|
28
|
-
The messsage directory structure is based on maildir,
|
29
|
-
https://en.wikipedia.org/wiki/Maildir
|
30
|
-
|
31
|
-
The writer creates a message in tmp/ and then when it is fully
|
32
|
-
written, moves it atomically into new/
|
33
|
-
|
34
|
-
The reader ignores tmp/ and only reads and deletes messages from
|
35
|
-
new/
|
36
|
-
|
37
|
-
This avoids a race condition of reading partially written messages.
|
38
|
-
|
39
|
-
This radio is likely to give higher shared filesystem load compared to
|
40
|
-
the UDPRadio, but should be much more reliable.
|
41
|
-
"""
|
42
|
-
|
43
|
-
def __init__(self, *, monitoring_url: str, source_id: int, timeout: int = 10, run_dir: str):
|
44
|
-
logger.info("filesystem based monitoring channel initializing")
|
45
|
-
self.source_id = source_id
|
46
|
-
self.base_path = f"{run_dir}/monitor-fs-radio/"
|
47
|
-
self.tmp_path = f"{self.base_path}/tmp"
|
48
|
-
self.new_path = f"{self.base_path}/new"
|
49
|
-
|
50
|
-
os.makedirs(self.tmp_path, exist_ok=True)
|
51
|
-
os.makedirs(self.new_path, exist_ok=True)
|
52
|
-
|
53
|
-
def send(self, message: object) -> None:
|
54
|
-
logger.info("Sending a monitoring message via filesystem")
|
55
|
-
|
56
|
-
unique_id = str(uuid.uuid4())
|
57
|
-
|
58
|
-
tmp_filename = f"{self.tmp_path}/{unique_id}"
|
59
|
-
new_filename = f"{self.new_path}/{unique_id}"
|
60
|
-
buffer = (message, "NA")
|
61
|
-
|
62
|
-
# this will write the message out then atomically
|
63
|
-
# move it into new/, so that a partially written
|
64
|
-
# file will never be observed in new/
|
65
|
-
with open(tmp_filename, "wb") as f:
|
66
|
-
f.write(serialize(buffer))
|
67
|
-
os.rename(tmp_filename, new_filename)
|
68
|
-
|
69
|
-
|
70
|
-
class HTEXRadio(MonitoringRadio):
|
71
|
-
|
72
|
-
def __init__(self, monitoring_url: str, source_id: int, timeout: int = 10):
|
73
|
-
"""
|
74
|
-
Parameters
|
75
|
-
----------
|
76
|
-
|
77
|
-
monitoring_url : str
|
78
|
-
URL of the form <scheme>://<IP>:<PORT>
|
79
|
-
source_id : str
|
80
|
-
String identifier of the source
|
81
|
-
timeout : int
|
82
|
-
timeout, default=10s
|
83
|
-
"""
|
84
|
-
self.source_id = source_id
|
85
|
-
logger.info("htex-based monitoring channel initialising")
|
86
|
-
|
87
|
-
def send(self, message: object) -> None:
|
88
|
-
""" Sends a message to the UDP receiver
|
89
|
-
|
90
|
-
Parameter
|
91
|
-
---------
|
92
|
-
|
93
|
-
message: object
|
94
|
-
Arbitrary pickle-able object that is to be sent
|
95
|
-
|
96
|
-
Returns:
|
97
|
-
None
|
98
|
-
"""
|
99
|
-
|
100
|
-
import parsl.executors.high_throughput.monitoring_info
|
101
|
-
|
102
|
-
result_queue = parsl.executors.high_throughput.monitoring_info.result_queue
|
103
|
-
|
104
|
-
# this message needs to go in the result queue tagged so that it is treated
|
105
|
-
# i) as a monitoring message by the interchange, and then further more treated
|
106
|
-
# as a RESOURCE_INFO message when received by monitoring (rather than a NODE_INFO
|
107
|
-
# which is the implicit default for messages from the interchange)
|
108
|
-
|
109
|
-
# for the interchange, the outer wrapper, this needs to be a dict:
|
110
|
-
|
111
|
-
interchange_msg = {
|
112
|
-
'type': 'monitoring',
|
113
|
-
'payload': message
|
114
|
-
}
|
115
|
-
|
116
|
-
if result_queue:
|
117
|
-
result_queue.put(pickle.dumps(interchange_msg))
|
118
|
-
else:
|
119
|
-
logger.error("result_queue is uninitialized - cannot put monitoring message")
|
120
|
-
|
121
|
-
return
|
122
|
-
|
123
|
-
|
124
|
-
class UDPRadio(MonitoringRadio):
|
125
|
-
|
126
|
-
def __init__(self, monitoring_url: str, source_id: int, timeout: int = 10):
|
127
|
-
"""
|
128
|
-
Parameters
|
129
|
-
----------
|
130
|
-
|
131
|
-
monitoring_url : str
|
132
|
-
URL of the form <scheme>://<IP>:<PORT>
|
133
|
-
source_id : str
|
134
|
-
String identifier of the source
|
135
|
-
timeout : int
|
136
|
-
timeout, default=10s
|
137
|
-
"""
|
138
|
-
self.monitoring_url = monitoring_url
|
139
|
-
self.sock_timeout = timeout
|
140
|
-
self.source_id = source_id
|
141
|
-
try:
|
142
|
-
self.scheme, self.ip, port = (x.strip('/') for x in monitoring_url.split(':'))
|
143
|
-
self.port = int(port)
|
144
|
-
except Exception:
|
145
|
-
raise Exception("Failed to parse monitoring url: {}".format(monitoring_url))
|
146
|
-
|
147
|
-
self.sock = socket.socket(socket.AF_INET,
|
148
|
-
socket.SOCK_DGRAM,
|
149
|
-
socket.IPPROTO_UDP) # UDP
|
150
|
-
self.sock.settimeout(self.sock_timeout)
|
151
|
-
|
152
|
-
def send(self, message: object) -> None:
|
153
|
-
""" Sends a message to the UDP receiver
|
154
|
-
|
155
|
-
Parameter
|
156
|
-
---------
|
157
|
-
|
158
|
-
message: object
|
159
|
-
Arbitrary pickle-able object that is to be sent
|
160
|
-
|
161
|
-
Returns:
|
162
|
-
None
|
163
|
-
"""
|
164
|
-
try:
|
165
|
-
buffer = pickle.dumps(message)
|
166
|
-
except Exception:
|
167
|
-
logging.exception("Exception during pickling", exc_info=True)
|
168
|
-
return
|
169
|
-
|
170
|
-
try:
|
171
|
-
self.sock.sendto(buffer, (self.ip, self.port))
|
172
|
-
except socket.timeout:
|
173
|
-
logging.error("Could not send message within timeout limit")
|
174
|
-
return
|
175
|
-
return
|
File without changes
|
parsl/providers/ad_hoc/ad_hoc.py
DELETED
@@ -1,248 +0,0 @@
|
|
1
|
-
import logging
|
2
|
-
import os
|
3
|
-
import time
|
4
|
-
|
5
|
-
from parsl.channels import LocalChannel
|
6
|
-
from parsl.jobs.states import JobStatus, JobState
|
7
|
-
from parsl.launchers import SimpleLauncher
|
8
|
-
from parsl.providers.base import ExecutionProvider
|
9
|
-
from parsl.providers.errors import ScriptPathError
|
10
|
-
from parsl.utils import RepresentationMixin
|
11
|
-
|
12
|
-
logger = logging.getLogger(__name__)
|
13
|
-
|
14
|
-
|
15
|
-
class AdHocProvider(ExecutionProvider, RepresentationMixin):
|
16
|
-
""" Ad-hoc execution provider
|
17
|
-
|
18
|
-
This provider is used to provision execution resources over one or more ad hoc nodes
|
19
|
-
that are each accessible over a Channel (say, ssh) but otherwise lack a cluster scheduler.
|
20
|
-
|
21
|
-
Parameters
|
22
|
-
----------
|
23
|
-
|
24
|
-
channels : list of Channel ojects
|
25
|
-
Each channel represents a connection to a remote node
|
26
|
-
|
27
|
-
worker_init : str
|
28
|
-
Command to be run before starting a worker, such as 'module load Anaconda; source activate env'.
|
29
|
-
Since this provider calls the same worker_init across all nodes in the ad-hoc cluster, it is
|
30
|
-
recommended that a single script is made available across nodes such as ~/setup.sh that can
|
31
|
-
be invoked.
|
32
|
-
|
33
|
-
cmd_timeout : int
|
34
|
-
Duration for which the provider will wait for a command to be invoked on a remote system.
|
35
|
-
Defaults to 30s
|
36
|
-
|
37
|
-
parallelism : float
|
38
|
-
Determines the ratio of workers to tasks as managed by the strategy component
|
39
|
-
|
40
|
-
"""
|
41
|
-
|
42
|
-
def __init__(self,
|
43
|
-
channels=[],
|
44
|
-
worker_init='',
|
45
|
-
cmd_timeout=30,
|
46
|
-
parallelism=1,
|
47
|
-
move_files=None):
|
48
|
-
|
49
|
-
self.channels = channels
|
50
|
-
self._label = 'ad-hoc'
|
51
|
-
self.worker_init = worker_init
|
52
|
-
self.cmd_timeout = cmd_timeout
|
53
|
-
self.parallelism = 1
|
54
|
-
self.move_files = move_files
|
55
|
-
self.launcher = SimpleLauncher()
|
56
|
-
self.init_blocks = self.min_blocks = self.max_blocks = len(channels)
|
57
|
-
|
58
|
-
# This will be overridden by the DFK to the rundirs.
|
59
|
-
self.script_dir = "."
|
60
|
-
|
61
|
-
# In ad-hoc mode, nodes_per_block should be 1
|
62
|
-
self.nodes_per_block = 1
|
63
|
-
|
64
|
-
# Dictionary that keeps track of jobs, keyed on job_id
|
65
|
-
self.resources = {}
|
66
|
-
|
67
|
-
self.least_loaded = self._least_loaded()
|
68
|
-
logger.debug("AdHoc provider initialized")
|
69
|
-
|
70
|
-
def _write_submit_script(self, script_string, script_filename):
|
71
|
-
'''
|
72
|
-
Load the template string with config values and write the generated submit script to
|
73
|
-
a submit script file.
|
74
|
-
|
75
|
-
Parameters
|
76
|
-
----------
|
77
|
-
script_string: (string)
|
78
|
-
The template string to be used for the writing submit script
|
79
|
-
|
80
|
-
script_filename: (string)
|
81
|
-
Name of the submit script
|
82
|
-
|
83
|
-
Returns
|
84
|
-
-------
|
85
|
-
None: on success
|
86
|
-
|
87
|
-
Raises
|
88
|
-
------
|
89
|
-
ScriptPathError
|
90
|
-
Unable to write submit script out
|
91
|
-
'''
|
92
|
-
|
93
|
-
try:
|
94
|
-
with open(script_filename, 'w') as f:
|
95
|
-
f.write(script_string)
|
96
|
-
|
97
|
-
except IOError as e:
|
98
|
-
logger.error("Failed writing to submit script: %s", script_filename)
|
99
|
-
raise ScriptPathError(script_filename, e)
|
100
|
-
|
101
|
-
return None
|
102
|
-
|
103
|
-
def _least_loaded(self):
|
104
|
-
""" Find channels that are not in use
|
105
|
-
|
106
|
-
Returns
|
107
|
-
-------
|
108
|
-
channel : Channel object
|
109
|
-
None : When there are no more available channels
|
110
|
-
"""
|
111
|
-
while True:
|
112
|
-
channel_counts = {channel: 0 for channel in self.channels}
|
113
|
-
for job_id in self.resources:
|
114
|
-
channel = self.resources[job_id]['channel']
|
115
|
-
if self.resources[job_id]['status'].state == JobState.RUNNING:
|
116
|
-
channel_counts[channel] = channel_counts.get(channel, 0) + 1
|
117
|
-
else:
|
118
|
-
channel_counts[channel] = channel_counts.get(channel, 0)
|
119
|
-
|
120
|
-
logger.debug("Channel_counts : {}".format(channel_counts))
|
121
|
-
if 0 not in channel_counts.values():
|
122
|
-
yield None
|
123
|
-
|
124
|
-
for channel in channel_counts:
|
125
|
-
if channel_counts[channel] == 0:
|
126
|
-
yield channel
|
127
|
-
|
128
|
-
def submit(self, command, tasks_per_node, job_name="parsl.adhoc"):
|
129
|
-
''' Submits the command onto a channel from the list of channels
|
130
|
-
|
131
|
-
Submit returns an ID that corresponds to the task that was just submitted.
|
132
|
-
|
133
|
-
Parameters
|
134
|
-
----------
|
135
|
-
command: (String)
|
136
|
-
Commandline invocation to be made on the remote side.
|
137
|
-
|
138
|
-
tasks_per_node: (int)
|
139
|
-
command invocations to be launched per node
|
140
|
-
|
141
|
-
job_name: (String)
|
142
|
-
Name of the job. Default : parsl.adhoc
|
143
|
-
|
144
|
-
|
145
|
-
Returns
|
146
|
-
-------
|
147
|
-
None
|
148
|
-
At capacity, cannot provision more
|
149
|
-
|
150
|
-
job_id: (string)
|
151
|
-
Identifier for the job
|
152
|
-
|
153
|
-
'''
|
154
|
-
channel = next(self.least_loaded)
|
155
|
-
if channel is None:
|
156
|
-
logger.warning("All Channels in Ad-Hoc provider are in use")
|
157
|
-
return None
|
158
|
-
|
159
|
-
job_name = "{0}.{1}".format(job_name, time.time())
|
160
|
-
|
161
|
-
# Set script path
|
162
|
-
script_path = "{0}/{1}.sh".format(self.script_dir, job_name)
|
163
|
-
script_path = os.path.abspath(script_path)
|
164
|
-
|
165
|
-
wrap_command = self.worker_init + '\n' + self.launcher(command, tasks_per_node, self.nodes_per_block)
|
166
|
-
|
167
|
-
self._write_submit_script(wrap_command, script_path)
|
168
|
-
|
169
|
-
job_id = None
|
170
|
-
remote_pid = None
|
171
|
-
final_cmd = None
|
172
|
-
|
173
|
-
if (self.move_files is None and not isinstance(channel, LocalChannel)) or (self.move_files):
|
174
|
-
logger.debug("Pushing start script")
|
175
|
-
script_path = channel.push_file(script_path, channel.script_dir)
|
176
|
-
|
177
|
-
# Bash would return until the streams are closed. So we redirect to a outs file
|
178
|
-
final_cmd = 'bash {0} > {0}.out 2>&1 & \n echo "PID:$!" '.format(script_path)
|
179
|
-
retcode, stdout, stderr = channel.execute_wait(final_cmd, self.cmd_timeout)
|
180
|
-
for line in stdout.split('\n'):
|
181
|
-
if line.startswith("PID:"):
|
182
|
-
remote_pid = line.split("PID:")[1].strip()
|
183
|
-
job_id = remote_pid
|
184
|
-
if job_id is None:
|
185
|
-
logger.warning("Channel failed to start remote command/retrieve PID")
|
186
|
-
|
187
|
-
self.resources[job_id] = {'job_id': job_id,
|
188
|
-
'status': JobStatus(JobState.RUNNING),
|
189
|
-
'cmd': final_cmd,
|
190
|
-
'channel': channel,
|
191
|
-
'remote_pid': remote_pid}
|
192
|
-
|
193
|
-
return job_id
|
194
|
-
|
195
|
-
def status(self, job_ids):
|
196
|
-
""" Get status of the list of jobs with job_ids
|
197
|
-
|
198
|
-
Parameters
|
199
|
-
----------
|
200
|
-
job_ids : list of strings
|
201
|
-
List of job id strings
|
202
|
-
|
203
|
-
Returns
|
204
|
-
-------
|
205
|
-
list of JobStatus objects
|
206
|
-
"""
|
207
|
-
for job_id in job_ids:
|
208
|
-
channel = self.resources[job_id]['channel']
|
209
|
-
status_command = "ps --pid {} | grep {}".format(self.resources[job_id]['job_id'],
|
210
|
-
self.resources[job_id]['cmd'].split()[0])
|
211
|
-
retcode, stdout, stderr = channel.execute_wait(status_command)
|
212
|
-
if retcode != 0 and self.resources[job_id]['status'].state == JobState.RUNNING:
|
213
|
-
self.resources[job_id]['status'] = JobStatus(JobState.FAILED)
|
214
|
-
|
215
|
-
return [self.resources[job_id]['status'] for job_id in job_ids]
|
216
|
-
|
217
|
-
def cancel(self, job_ids):
|
218
|
-
""" Cancel a list of jobs with job_ids
|
219
|
-
|
220
|
-
Parameters
|
221
|
-
----------
|
222
|
-
job_ids : list of strings
|
223
|
-
List of job id strings
|
224
|
-
|
225
|
-
Returns
|
226
|
-
-------
|
227
|
-
list of confirmation bools: [True, False...]
|
228
|
-
"""
|
229
|
-
logger.debug("Cancelling jobs: {}".format(job_ids))
|
230
|
-
rets = []
|
231
|
-
for job_id in job_ids:
|
232
|
-
channel = self.resources[job_id]['channel']
|
233
|
-
cmd = "kill -TERM -$(ps -o pgid= {} | grep -o '[0-9]*')".format(self.resources[job_id]['job_id'])
|
234
|
-
retcode, stdout, stderr = channel.execute_wait(cmd)
|
235
|
-
if retcode == 0:
|
236
|
-
rets.append(True)
|
237
|
-
else:
|
238
|
-
rets.append(False)
|
239
|
-
self.resources[job_id]['status'] = JobStatus(JobState.COMPLETED)
|
240
|
-
return rets
|
241
|
-
|
242
|
-
@property
|
243
|
-
def label(self):
|
244
|
-
return self._label
|
245
|
-
|
246
|
-
@property
|
247
|
-
def status_polling_interval(self):
|
248
|
-
return 10
|
File without changes
|