parsl 2024.10.28__py3-none-any.whl → 2024.11.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. parsl/channels/base.py +6 -46
  2. parsl/channels/errors.py +0 -67
  3. parsl/channels/local/local.py +5 -56
  4. parsl/dataflow/dflow.py +6 -61
  5. parsl/executors/high_throughput/executor.py +0 -1
  6. parsl/executors/high_throughput/mpi_resource_management.py +0 -12
  7. parsl/executors/taskvine/manager.py +6 -0
  8. parsl/executors/taskvine/manager_config.py +5 -0
  9. parsl/monitoring/monitoring.py +23 -26
  10. parsl/monitoring/radios.py +4 -17
  11. parsl/monitoring/remote.py +3 -5
  12. parsl/providers/__init__.py +0 -2
  13. parsl/providers/base.py +1 -1
  14. parsl/providers/cluster_provider.py +1 -4
  15. parsl/providers/condor/condor.py +1 -4
  16. parsl/providers/grid_engine/grid_engine.py +1 -4
  17. parsl/providers/lsf/lsf.py +1 -4
  18. parsl/providers/pbspro/pbspro.py +1 -4
  19. parsl/providers/slurm/slurm.py +1 -4
  20. parsl/providers/torque/torque.py +1 -4
  21. parsl/tests/configs/user_opts.py +0 -7
  22. parsl/tests/conftest.py +4 -4
  23. parsl/tests/site_tests/site_config_selector.py +1 -6
  24. parsl/tests/test_bash_apps/test_basic.py +3 -0
  25. parsl/tests/test_bash_apps/test_error_codes.py +4 -0
  26. parsl/tests/test_bash_apps/test_kwarg_storage.py +1 -0
  27. parsl/tests/test_bash_apps/test_memoize.py +2 -6
  28. parsl/tests/test_bash_apps/test_memoize_ignore_args.py +3 -0
  29. parsl/tests/test_bash_apps/test_memoize_ignore_args_regr.py +1 -0
  30. parsl/tests/test_bash_apps/test_multiline.py +1 -0
  31. parsl/tests/test_bash_apps/test_stdout.py +2 -0
  32. parsl/tests/test_channels/test_local_channel.py +0 -19
  33. parsl/tests/test_docs/test_from_slides.py +3 -0
  34. parsl/tests/test_docs/test_kwargs.py +3 -0
  35. parsl/tests/test_monitoring/test_basic.py +13 -1
  36. parsl/tests/test_providers/test_local_provider.py +0 -135
  37. parsl/tests/test_providers/test_pbspro_template.py +2 -1
  38. parsl/tests/test_providers/test_slurm_template.py +2 -1
  39. parsl/tests/test_python_apps/test_outputs.py +1 -0
  40. parsl/tests/test_regression/test_226.py +1 -0
  41. parsl/tests/test_staging/test_docs_1.py +1 -0
  42. parsl/tests/test_staging/test_output_chain_filenames.py +3 -0
  43. parsl/tests/test_staging/test_staging_ftp.py +1 -0
  44. parsl/tests/test_staging/test_staging_https.py +3 -0
  45. parsl/tests/test_staging/test_staging_stdout.py +2 -0
  46. parsl/version.py +1 -1
  47. {parsl-2024.10.28.dist-info → parsl-2024.11.11.dist-info}/METADATA +2 -8
  48. {parsl-2024.10.28.dist-info → parsl-2024.11.11.dist-info}/RECORD +56 -74
  49. {parsl-2024.10.28.dist-info → parsl-2024.11.11.dist-info}/WHEEL +1 -1
  50. parsl/channels/oauth_ssh/__init__.py +0 -0
  51. parsl/channels/oauth_ssh/oauth_ssh.py +0 -119
  52. parsl/channels/ssh/__init__.py +0 -0
  53. parsl/channels/ssh/ssh.py +0 -295
  54. parsl/channels/ssh_il/__init__.py +0 -0
  55. parsl/channels/ssh_il/ssh_il.py +0 -85
  56. parsl/providers/ad_hoc/__init__.py +0 -0
  57. parsl/providers/ad_hoc/ad_hoc.py +0 -252
  58. parsl/providers/cobalt/__init__.py +0 -0
  59. parsl/providers/cobalt/cobalt.py +0 -236
  60. parsl/providers/cobalt/template.py +0 -17
  61. parsl/tests/configs/cooley_htex.py +0 -37
  62. parsl/tests/configs/local_adhoc.py +0 -18
  63. parsl/tests/configs/theta.py +0 -37
  64. parsl/tests/manual_tests/test_fan_in_out_htex_remote.py +0 -88
  65. parsl/tests/sites/test_local_adhoc.py +0 -62
  66. parsl/tests/test_channels/test_dfk_close.py +0 -26
  67. parsl/tests/test_providers/test_cobalt_deprecation_warning.py +0 -18
  68. {parsl-2024.10.28.data → parsl-2024.11.11.data}/scripts/exec_parsl_function.py +0 -0
  69. {parsl-2024.10.28.data → parsl-2024.11.11.data}/scripts/interchange.py +0 -0
  70. {parsl-2024.10.28.data → parsl-2024.11.11.data}/scripts/parsl_coprocess.py +0 -0
  71. {parsl-2024.10.28.data → parsl-2024.11.11.data}/scripts/process_worker_pool.py +0 -0
  72. {parsl-2024.10.28.dist-info → parsl-2024.11.11.dist-info}/LICENSE +0 -0
  73. {parsl-2024.10.28.dist-info → parsl-2024.11.11.dist-info}/entry_points.txt +0 -0
  74. {parsl-2024.10.28.dist-info → parsl-2024.11.11.dist-info}/top_level.txt +0 -0
@@ -1,236 +0,0 @@
1
- import logging
2
- import os
3
- import time
4
- import warnings
5
-
6
- from parsl.channels import LocalChannel
7
- from parsl.jobs.states import JobState, JobStatus
8
- from parsl.launchers import AprunLauncher
9
- from parsl.providers.cluster_provider import ClusterProvider
10
- from parsl.providers.cobalt.template import template_string
11
- from parsl.providers.errors import ScaleOutFailed
12
- from parsl.utils import RepresentationMixin, wtime_to_minutes
13
-
14
- logger = logging.getLogger(__name__)
15
-
16
- translate_table = {
17
- 'QUEUED': JobState.PENDING,
18
- 'STARTING': JobState.PENDING,
19
- 'RUNNING': JobState.RUNNING,
20
- 'EXITING': JobState.COMPLETED,
21
- 'KILLING': JobState.COMPLETED
22
- }
23
-
24
-
25
- class CobaltProvider(ClusterProvider, RepresentationMixin):
26
- """ Cobalt Execution Provider
27
-
28
- WARNING: CobaltProvider is deprecated and will be removed by 2024.04
29
-
30
- This provider uses cobalt to submit (qsub), obtain the status of (qstat), and cancel (qdel)
31
- jobs. Theo script to be used is created from a template file in this
32
- same module.
33
-
34
- Parameters
35
- ----------
36
- channel : Channel
37
- Channel for accessing this provider. Possible channels include
38
- :class:`~parsl.channels.LocalChannel` (the default),
39
- :class:`~parsl.channels.SSHChannel`, or
40
- :class:`~parsl.channels.SSHInteractiveLoginChannel`.
41
- nodes_per_block : int
42
- Nodes to provision per block.
43
- min_blocks : int
44
- Minimum number of blocks to maintain.
45
- max_blocks : int
46
- Maximum number of blocks to maintain.
47
- walltime : str
48
- Walltime requested per block in HH:MM:SS.
49
- account : str
50
- Account that the job will be charged against.
51
- queue : str
52
- Torque queue to request blocks from.
53
- scheduler_options : str
54
- String to prepend to the submit script to the scheduler.
55
- worker_init : str
56
- Command to be run before starting a worker, such as 'module load Anaconda; source activate env'.
57
- launcher : Launcher
58
- Launcher for this provider. Possible launchers include
59
- :class:`~parsl.launchers.AprunLauncher` (the default) or,
60
- :class:`~parsl.launchers.SingleNodeLauncher`
61
- """
62
- def __init__(self,
63
- channel=LocalChannel(),
64
- nodes_per_block=1,
65
- init_blocks=0,
66
- min_blocks=0,
67
- max_blocks=1,
68
- parallelism=1,
69
- walltime="00:10:00",
70
- account=None,
71
- queue=None,
72
- scheduler_options='',
73
- worker_init='',
74
- launcher=AprunLauncher(),
75
- cmd_timeout=10):
76
- label = 'cobalt'
77
- super().__init__(label,
78
- channel=channel,
79
- nodes_per_block=nodes_per_block,
80
- init_blocks=init_blocks,
81
- min_blocks=min_blocks,
82
- max_blocks=max_blocks,
83
- parallelism=parallelism,
84
- walltime=walltime,
85
- launcher=launcher,
86
- cmd_timeout=cmd_timeout)
87
-
88
- self.account = account
89
- self.queue = queue
90
- self.scheduler_options = scheduler_options
91
- self.worker_init = worker_init
92
- warnings.warn("CobaltProvider is deprecated; This will be removed after 2024-04",
93
- DeprecationWarning,
94
- stacklevel=2)
95
-
96
- def _status(self):
97
- """Returns the status list for a list of job_ids
98
-
99
- Args:
100
- self
101
-
102
- Returns:
103
- [status...] : Status list of all jobs
104
- """
105
-
106
- jobs_missing = list(self.resources.keys())
107
-
108
- retcode, stdout, stderr = self.execute_wait("qstat -u $USER")
109
-
110
- # Execute_wait failed. Do no update.
111
- if retcode != 0:
112
- return
113
-
114
- for line in stdout.split('\n'):
115
- if line.startswith('='):
116
- continue
117
-
118
- parts = line.upper().split()
119
- if parts and parts[0] != 'JOBID':
120
- job_id = parts[0]
121
-
122
- if job_id not in self.resources:
123
- continue
124
-
125
- status = translate_table.get(parts[4], JobState.UNKNOWN)
126
-
127
- self.resources[job_id]['status'] = JobStatus(status)
128
- jobs_missing.remove(job_id)
129
-
130
- # squeue does not report on jobs that are not running. So we are filling in the
131
- # blanks for missing jobs, we might lose some information about why the jobs failed.
132
- for missing_job in jobs_missing:
133
- self.resources[missing_job]['status'] = JobStatus(JobState.COMPLETED)
134
-
135
- def submit(self, command, tasks_per_node, job_name="parsl.cobalt"):
136
- """ Submits the command onto an Local Resource Manager job of parallel elements.
137
- Submit returns an ID that corresponds to the task that was just submitted.
138
-
139
- If tasks_per_node < 1 : ! This is illegal. tasks_per_node should be integer
140
-
141
- If tasks_per_node == 1:
142
- A single node is provisioned
143
-
144
- If tasks_per_node > 1 :
145
- tasks_per_node number of nodes are provisioned.
146
-
147
- Args:
148
- - command :(String) Commandline invocation to be made on the remote side.
149
- - tasks_per_node (int) : command invocations to be launched per node
150
-
151
- Kwargs:
152
- - job_name (String): Name for job, must be unique
153
-
154
- Returns:
155
- - None: At capacity, cannot provision more
156
- - job_id: (string) Identifier for the job
157
-
158
- """
159
-
160
- account_opt = '-A {}'.format(self.account) if self.account is not None else ''
161
-
162
- job_name = "parsl.{0}.{1}".format(job_name, time.time())
163
-
164
- script_path = "{0}/{1}.submit".format(self.script_dir, job_name)
165
- script_path = os.path.abspath(script_path)
166
-
167
- job_config = {}
168
- job_config["scheduler_options"] = self.scheduler_options
169
- job_config["worker_init"] = self.worker_init
170
-
171
- logger.debug("Requesting nodes_per_block:%s tasks_per_node:%s",
172
- self.nodes_per_block, tasks_per_node)
173
-
174
- # Wrap the command
175
- job_config["user_script"] = self.launcher(command, tasks_per_node, self.nodes_per_block)
176
-
177
- queue_opt = '-q {}'.format(self.queue) if self.queue is not None else ''
178
-
179
- logger.debug("Writing submit script")
180
- self._write_submit_script(template_string, script_path, job_name, job_config)
181
-
182
- channel_script_path = self.channel.push_file(script_path, self.channel.script_dir)
183
-
184
- command = 'qsub -n {0} {1} -t {2} {3} {4}'.format(
185
- self.nodes_per_block, queue_opt, wtime_to_minutes(self.walltime), account_opt, channel_script_path)
186
- logger.debug("Executing {}".format(command))
187
-
188
- retcode, stdout, stderr = self.execute_wait(command)
189
-
190
- # TODO : FIX this block
191
- if retcode != 0:
192
- logger.error("Failed command: {0}".format(command))
193
- logger.error("Launch failed stdout:\n{0} \nstderr:{1}\n".format(stdout, stderr))
194
-
195
- logger.debug("Retcode:%s STDOUT:%s STDERR:%s", retcode, stdout.strip(), stderr.strip())
196
-
197
- job_id = None
198
-
199
- if retcode == 0:
200
- # We should be getting only one line back
201
- job_id = stdout.strip()
202
- self.resources[job_id] = {'job_id': job_id, 'status': JobStatus(JobState.PENDING)}
203
- else:
204
- logger.error("Submit command failed: {0}".format(stderr))
205
- raise ScaleOutFailed(self.__class__, "Request to submit job to local scheduler failed")
206
-
207
- logger.debug("Returning job id : {0}".format(job_id))
208
- return job_id
209
-
210
- def cancel(self, job_ids):
211
- """ Cancels the jobs specified by a list of job ids
212
-
213
- Args:
214
- job_ids : [<job_id> ...]
215
-
216
- Returns :
217
- [True/False...] : If the cancel operation fails the entire list will be False.
218
- """
219
-
220
- job_id_list = ' '.join(job_ids)
221
- retcode, stdout, stderr = self.execute_wait("qdel {0}".format(job_id_list))
222
- rets = None
223
- if retcode == 0:
224
- for jid in job_ids:
225
- # ???
226
- # self.resources[jid]['status'] = translate_table['KILLING'] # Setting state to cancelled
227
- self.resources[jid]['status'] = JobStatus(JobState.COMPLETED)
228
- rets = [True for i in job_ids]
229
- else:
230
- rets = [False for i in job_ids]
231
-
232
- return rets
233
-
234
- @property
235
- def status_polling_interval(self):
236
- return 60
@@ -1,17 +0,0 @@
1
- template_string = '''#!/bin/bash -el
2
- ${scheduler_options}
3
-
4
- ${worker_init}
5
-
6
- echo "Starting Cobalt job script"
7
-
8
- echo "----Cobalt Nodefile: -----"
9
- cat $$COBALT_NODEFILE
10
- echo "--------------------------"
11
-
12
- export JOBNAME="${jobname}"
13
-
14
- $user_script
15
-
16
- echo "End of Cobalt job"
17
- '''
@@ -1,37 +0,0 @@
1
- # UNTESTED
2
-
3
- from parsl.config import Config
4
- from parsl.executors import HighThroughputExecutor
5
- from parsl.launchers import MpiRunLauncher
6
- from parsl.providers import CobaltProvider
7
-
8
- # If you are a developer running tests, make sure to update parsl/tests/configs/user_opts.py
9
- # If you are a user copying-and-pasting this as an example, make sure to either
10
- # 1) create a local `user_opts.py`, or
11
- # 2) delete the user_opts import below and replace all appearances of `user_opts` with the literal value
12
- # (i.e., user_opts['swan']['username'] -> 'your_username')
13
- from .user_opts import user_opts
14
-
15
- config = Config(
16
- executors=[
17
- HighThroughputExecutor(
18
- label="cooley_htex",
19
- worker_debug=False,
20
- cores_per_worker=1,
21
- encrypted=True,
22
- provider=CobaltProvider(
23
- queue='debug',
24
- account=user_opts['cooley']['account'],
25
- launcher=MpiRunLauncher(), # UNTESTED COMPONENT
26
- scheduler_options=user_opts['cooley']['scheduler_options'],
27
- worker_init=user_opts['cooley']['worker_init'],
28
- init_blocks=1,
29
- max_blocks=1,
30
- min_blocks=1,
31
- nodes_per_block=4,
32
- cmd_timeout=60,
33
- walltime='00:10:00',
34
- ),
35
- )
36
- ]
37
- )
@@ -1,18 +0,0 @@
1
- from parsl.channels import LocalChannel
2
- from parsl.config import Config
3
- from parsl.executors import HighThroughputExecutor
4
- from parsl.providers.ad_hoc.ad_hoc import DeprecatedAdHocProvider
5
-
6
-
7
- def fresh_config():
8
- return Config(
9
- executors=[
10
- HighThroughputExecutor(
11
- label='AdHoc',
12
- encrypted=True,
13
- provider=DeprecatedAdHocProvider(
14
- channels=[LocalChannel(), LocalChannel()]
15
- )
16
- )
17
- ]
18
- )
@@ -1,37 +0,0 @@
1
- from parsl.config import Config
2
- from parsl.executors import HighThroughputExecutor
3
- from parsl.launchers import AprunLauncher
4
- from parsl.providers import CobaltProvider
5
-
6
- from .user_opts import user_opts
7
-
8
-
9
- def fresh_config():
10
- return Config(
11
- executors=[
12
- HighThroughputExecutor(
13
- label='theta_local_htex_multinode',
14
- max_workers_per_node=1,
15
- encrypted=True,
16
- provider=CobaltProvider(
17
- queue=user_opts['theta']['queue'],
18
- account=user_opts['theta']['account'],
19
- launcher=AprunLauncher(overrides="-d 64"),
20
- walltime='00:10:00',
21
- nodes_per_block=2,
22
- init_blocks=1,
23
- max_blocks=1,
24
- # string to prepend to #COBALT blocks in the submit
25
- # script to the scheduler eg: '#COBALT -t 50'
26
- scheduler_options='',
27
- # Command to be run before starting a worker, such as:
28
- # 'module load Anaconda; source activate parsl_env'.
29
- worker_init=user_opts['theta']['worker_init'],
30
- cmd_timeout=120,
31
- ),
32
- )
33
- ],
34
- )
35
-
36
-
37
- config = fresh_config()
@@ -1,88 +0,0 @@
1
- import logging
2
-
3
- import parsl
4
- from parsl.app.app import python_app
5
- from parsl.config import Config
6
- from parsl.executors import HighThroughputExecutor
7
- from parsl.launchers import AprunLauncher
8
- from parsl.monitoring.monitoring import MonitoringHub
9
- from parsl.providers import CobaltProvider
10
-
11
-
12
- def local_setup():
13
- threads_config = Config(
14
- executors=[
15
- HighThroughputExecutor(
16
- label="theta_htex",
17
- # worker_debug=True,
18
- cores_per_worker=4,
19
- encrypted=True,
20
- provider=CobaltProvider(
21
- queue='debug-flat-quad',
22
- account='CSC249ADCD01',
23
- launcher=AprunLauncher(overrides="-d 64"),
24
- worker_init='source activate parsl-issues',
25
- init_blocks=1,
26
- max_blocks=1,
27
- min_blocks=1,
28
- nodes_per_block=4,
29
- cmd_timeout=60,
30
- walltime='00:10:00',
31
- ),
32
- )
33
- ],
34
- monitoring=MonitoringHub(
35
- hub_port=55055,
36
- logging_level=logging.DEBUG,
37
- resource_monitoring_interval=10),
38
- strategy='none')
39
- parsl.load(threads_config)
40
-
41
-
42
- def local_teardown():
43
- parsl.clear()
44
-
45
-
46
- @python_app
47
- def inc(x):
48
- import time
49
- start = time.time()
50
- sleep_duration = 30.0
51
- while True:
52
- x += 1
53
- end = time.time()
54
- if end - start >= sleep_duration:
55
- break
56
- return x
57
-
58
-
59
- @python_app
60
- def add_inc(inputs=[]):
61
- import time
62
- start = time.time()
63
- sleep_duration = 30.0
64
- res = sum(inputs)
65
- while True:
66
- res += 1
67
- end = time.time()
68
- if end - start >= sleep_duration:
69
- break
70
- return res
71
-
72
-
73
- if __name__ == "__main__":
74
-
75
- total = 200
76
- half = int(total / 2)
77
- one_third = int(total / 3)
78
- two_third = int(total / 3 * 2)
79
- futures_1 = [inc(i) for i in range(total)]
80
- futures_2 = [add_inc(inputs=futures_1[0:half]),
81
- add_inc(inputs=futures_1[half:total])]
82
- futures_3 = [inc(futures_2[0]) for _ in range(half)] + [inc(futures_2[1]) for _ in range(half)]
83
- futures_4 = [add_inc(inputs=futures_3[0:one_third]),
84
- add_inc(inputs=futures_3[one_third:two_third]),
85
- add_inc(inputs=futures_3[two_third:total])]
86
-
87
- print([f.result() for f in futures_4])
88
- print("Done")
@@ -1,62 +0,0 @@
1
- import logging
2
-
3
- import pytest
4
-
5
- from parsl import python_app
6
- from parsl.tests.configs.local_adhoc import fresh_config as local_config
7
-
8
- logger = logging.getLogger(__name__)
9
-
10
-
11
- @python_app
12
- def python_app_2():
13
- import os
14
- import threading
15
- import time
16
- time.sleep(1)
17
- return "Hello from PID[{}] TID[{}]".format(os.getpid(), threading.current_thread())
18
-
19
-
20
- @python_app
21
- def python_app_1():
22
- import os
23
- import threading
24
- import time
25
- time.sleep(1)
26
- return "Hello from PID[{}] TID[{}]".format(os.getpid(), threading.current_thread())
27
-
28
-
29
- @python_app
30
- def bash_app(stdout=None, stderr=None):
31
- return 'echo "Hello from $(uname -a)" ; sleep 2'
32
-
33
-
34
- @pytest.mark.local
35
- def test_python(N=2):
36
- """Testing basic python functionality."""
37
-
38
- r1 = {}
39
- r2 = {}
40
- for i in range(0, N):
41
- r1[i] = python_app_1()
42
- r2[i] = python_app_2()
43
- print("Waiting ....")
44
-
45
- for x in r1:
46
- print("python_app_1 : ", r1[x].result())
47
- for x in r2:
48
- print("python_app_2 : ", r2[x].result())
49
-
50
- return
51
-
52
-
53
- @pytest.mark.local
54
- def test_bash():
55
- """Testing basic bash functionality."""
56
-
57
- import os
58
- fname = os.path.basename(__file__)
59
-
60
- x = bash_app(stdout="{0}.out".format(fname))
61
- print("Waiting ....")
62
- print(x.result())
@@ -1,26 +0,0 @@
1
- from unittest.mock import Mock
2
-
3
- import pytest
4
-
5
- import parsl
6
- from parsl.channels.base import Channel
7
- from parsl.executors import HighThroughputExecutor
8
- from parsl.providers import LocalProvider
9
-
10
-
11
- @pytest.mark.local
12
- def test_dfk_close():
13
-
14
- mock_channel = Mock(spec=Channel)
15
-
16
- # block settings all 0 because the mock channel won't be able to
17
- # do anything to make a block exist
18
- p = LocalProvider(channel=mock_channel, init_blocks=0, min_blocks=0, max_blocks=0)
19
-
20
- e = HighThroughputExecutor(provider=p)
21
-
22
- c = parsl.Config(executors=[e])
23
- with parsl.load(c):
24
- pass
25
-
26
- assert mock_channel.close.called
@@ -1,18 +0,0 @@
1
- import warnings
2
-
3
- import pytest
4
-
5
- from parsl.providers import CobaltProvider
6
-
7
-
8
- @pytest.mark.local
9
- def test_deprecation_warning():
10
-
11
- with warnings.catch_warnings(record=True) as w:
12
- warnings.simplefilter("always")
13
-
14
- CobaltProvider()
15
-
16
- assert len(w) == 1
17
- assert issubclass(w[-1].category, DeprecationWarning)
18
- assert "CobaltProvider" in str(w[-1].message)