parsl 2024.11.4__py3-none-any.whl → 2024.11.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. parsl/channels/base.py +6 -46
  2. parsl/channels/errors.py +0 -67
  3. parsl/channels/local/local.py +5 -56
  4. parsl/dataflow/dflow.py +1 -58
  5. parsl/executors/taskvine/manager.py +6 -0
  6. parsl/executors/taskvine/manager_config.py +5 -0
  7. parsl/monitoring/db_manager.py +6 -6
  8. parsl/monitoring/monitoring.py +27 -33
  9. parsl/monitoring/radios.py +1 -3
  10. parsl/monitoring/router.py +11 -11
  11. parsl/providers/cluster_provider.py +1 -4
  12. parsl/providers/condor/condor.py +1 -4
  13. parsl/providers/grid_engine/grid_engine.py +1 -4
  14. parsl/providers/lsf/lsf.py +1 -4
  15. parsl/providers/pbspro/pbspro.py +1 -4
  16. parsl/providers/slurm/slurm.py +26 -7
  17. parsl/providers/torque/torque.py +1 -4
  18. parsl/tests/configs/user_opts.py +0 -7
  19. parsl/tests/conftest.py +0 -4
  20. parsl/tests/test_channels/test_local_channel.py +0 -19
  21. parsl/tests/test_providers/test_local_provider.py +0 -135
  22. parsl/tests/test_providers/test_pbspro_template.py +2 -1
  23. parsl/tests/test_providers/test_slurm_template.py +2 -1
  24. parsl/version.py +1 -1
  25. {parsl-2024.11.4.dist-info → parsl-2024.11.18.dist-info}/METADATA +2 -8
  26. {parsl-2024.11.4.dist-info → parsl-2024.11.18.dist-info}/RECORD +34 -45
  27. {parsl-2024.11.4.dist-info → parsl-2024.11.18.dist-info}/WHEEL +1 -1
  28. parsl/channels/oauth_ssh/__init__.py +0 -0
  29. parsl/channels/oauth_ssh/oauth_ssh.py +0 -119
  30. parsl/channels/ssh/__init__.py +0 -0
  31. parsl/channels/ssh/ssh.py +0 -295
  32. parsl/channels/ssh_il/__init__.py +0 -0
  33. parsl/channels/ssh_il/ssh_il.py +0 -85
  34. parsl/providers/ad_hoc/__init__.py +0 -0
  35. parsl/providers/ad_hoc/ad_hoc.py +0 -252
  36. parsl/tests/configs/local_adhoc.py +0 -18
  37. parsl/tests/sites/test_local_adhoc.py +0 -62
  38. parsl/tests/test_channels/test_dfk_close.py +0 -26
  39. {parsl-2024.11.4.data → parsl-2024.11.18.data}/scripts/exec_parsl_function.py +0 -0
  40. {parsl-2024.11.4.data → parsl-2024.11.18.data}/scripts/interchange.py +0 -0
  41. {parsl-2024.11.4.data → parsl-2024.11.18.data}/scripts/parsl_coprocess.py +0 -0
  42. {parsl-2024.11.4.data → parsl-2024.11.18.data}/scripts/process_worker_pool.py +0 -0
  43. {parsl-2024.11.4.dist-info → parsl-2024.11.18.dist-info}/LICENSE +0 -0
  44. {parsl-2024.11.4.dist-info → parsl-2024.11.18.dist-info}/entry_points.txt +0 -0
  45. {parsl-2024.11.4.dist-info → parsl-2024.11.18.dist-info}/top_level.txt +0 -0
@@ -1,85 +0,0 @@
1
- import getpass
2
- import logging
3
-
4
- from parsl.channels.ssh.ssh import DeprecatedSSHChannel
5
- from parsl.errors import OptionalModuleMissing
6
-
7
- try:
8
- import paramiko
9
- _ssh_enabled = True
10
- except (ImportError, NameError, FileNotFoundError):
11
- _ssh_enabled = False
12
-
13
-
14
- logger = logging.getLogger(__name__)
15
-
16
-
17
- class DeprecatedSSHInteractiveLoginChannel(DeprecatedSSHChannel):
18
- """SSH persistent channel. This enables remote execution on sites
19
- accessible via ssh. This channel supports interactive login and is appropriate when
20
- keys are not set up.
21
- """
22
-
23
- def __init__(self, hostname, username=None, password=None, script_dir=None, envs=None):
24
- ''' Initialize a persistent connection to the remote system.
25
- We should know at this point whether ssh connectivity is possible
26
-
27
- Args:
28
- - hostname (String) : Hostname
29
-
30
- KWargs:
31
- - username (string) : Username on remote system
32
- - password (string) : Password for remote system
33
- - script_dir (string) : Full path to a script dir where
34
- generated scripts could be sent to.
35
- - envs (dict) : A dictionary of env variables to be set when executing commands
36
-
37
- Raises:
38
- '''
39
- if not _ssh_enabled:
40
- raise OptionalModuleMissing(['ssh'],
41
- "SSHInteractiveLoginChannel requires the ssh module and config.")
42
-
43
- self.hostname = hostname
44
- self.username = username
45
- self.password = password
46
-
47
- self.ssh_client = paramiko.SSHClient()
48
- self.ssh_client.load_system_host_keys()
49
- self.ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
50
-
51
- self.script_dir = script_dir
52
-
53
- self.envs = {}
54
- if envs is not None:
55
- self.envs = envs
56
-
57
- try:
58
- self.ssh_client.connect(
59
- hostname, username=username, password=password, allow_agent=True
60
- )
61
-
62
- except Exception:
63
- logger.debug("Caught the SSHException in SSHInteractive")
64
- pass
65
- '''
66
- except paramiko.BadHostKeyException as e:
67
- raise BadHostKeyException(e, self.hostname)
68
-
69
- except paramiko.AuthenticationException as e:
70
- raise AuthException(e, self.hostname)
71
-
72
- except paramiko.SSHException as e:
73
- logger.debug("Caught the SSHException in SSHInteractive")
74
- pass
75
-
76
- except Exception as e:
77
- raise SSHException(e, self.hostname)
78
- '''
79
-
80
- transport = self.ssh_client.get_transport()
81
-
82
- il_password = getpass.getpass('Enter {0} Logon password :'.format(hostname))
83
- transport.auth_password(username, il_password)
84
-
85
- self.sftp_client = paramiko.SFTPClient.from_transport(transport)
File without changes
@@ -1,252 +0,0 @@
1
- import logging
2
- import os
3
- import time
4
-
5
- from parsl.channels import LocalChannel
6
- from parsl.jobs.states import JobState, JobStatus
7
- from parsl.launchers import SimpleLauncher
8
- from parsl.providers.base import ExecutionProvider
9
- from parsl.providers.errors import ScriptPathError
10
- from parsl.utils import RepresentationMixin
11
-
12
- logger = logging.getLogger(__name__)
13
-
14
-
15
- class DeprecatedAdHocProvider(ExecutionProvider, RepresentationMixin):
16
- """ Deprecated ad-hoc execution provider
17
-
18
- The (former) AdHocProvider is deprecated. See
19
- `issue #3515 <https://github.com/Parsl/parsl/issues/3515>`_
20
- for further discussion.
21
-
22
- This provider is used to provision execution resources over one or more ad hoc nodes
23
- that are each accessible over a Channel (say, ssh) but otherwise lack a cluster scheduler.
24
-
25
- Parameters
26
- ----------
27
-
28
- channels : list of Channel ojects
29
- Each channel represents a connection to a remote node
30
-
31
- worker_init : str
32
- Command to be run before starting a worker, such as 'module load Anaconda; source activate env'.
33
- Since this provider calls the same worker_init across all nodes in the ad-hoc cluster, it is
34
- recommended that a single script is made available across nodes such as ~/setup.sh that can
35
- be invoked.
36
-
37
- cmd_timeout : int
38
- Duration for which the provider will wait for a command to be invoked on a remote system.
39
- Defaults to 30s
40
-
41
- parallelism : float
42
- Determines the ratio of workers to tasks as managed by the strategy component
43
-
44
- """
45
-
46
- def __init__(self,
47
- channels=[],
48
- worker_init='',
49
- cmd_timeout=30,
50
- parallelism=1,
51
- move_files=None):
52
-
53
- self.channels = channels
54
- self._label = 'ad-hoc'
55
- self.worker_init = worker_init
56
- self.cmd_timeout = cmd_timeout
57
- self.parallelism = 1
58
- self.move_files = move_files
59
- self.launcher = SimpleLauncher()
60
- self.init_blocks = self.min_blocks = self.max_blocks = len(channels)
61
-
62
- # This will be overridden by the DFK to the rundirs.
63
- self.script_dir = "."
64
-
65
- # In ad-hoc mode, nodes_per_block should be 1
66
- self.nodes_per_block = 1
67
-
68
- # Dictionary that keeps track of jobs, keyed on job_id
69
- self.resources = {}
70
-
71
- self.least_loaded = self._least_loaded()
72
- logger.debug("AdHoc provider initialized")
73
-
74
- def _write_submit_script(self, script_string, script_filename):
75
- '''
76
- Load the template string with config values and write the generated submit script to
77
- a submit script file.
78
-
79
- Parameters
80
- ----------
81
- script_string: (string)
82
- The template string to be used for the writing submit script
83
-
84
- script_filename: (string)
85
- Name of the submit script
86
-
87
- Returns
88
- -------
89
- None: on success
90
-
91
- Raises
92
- ------
93
- ScriptPathError
94
- Unable to write submit script out
95
- '''
96
-
97
- try:
98
- with open(script_filename, 'w') as f:
99
- f.write(script_string)
100
-
101
- except IOError as e:
102
- logger.error("Failed writing to submit script: %s", script_filename)
103
- raise ScriptPathError(script_filename, e)
104
-
105
- return None
106
-
107
- def _least_loaded(self):
108
- """ Find channels that are not in use
109
-
110
- Returns
111
- -------
112
- channel : Channel object
113
- None : When there are no more available channels
114
- """
115
- while True:
116
- channel_counts = {channel: 0 for channel in self.channels}
117
- for job_id in self.resources:
118
- channel = self.resources[job_id]['channel']
119
- if self.resources[job_id]['status'].state == JobState.RUNNING:
120
- channel_counts[channel] = channel_counts.get(channel, 0) + 1
121
- else:
122
- channel_counts[channel] = channel_counts.get(channel, 0)
123
-
124
- logger.debug("Channel_counts : {}".format(channel_counts))
125
- if 0 not in channel_counts.values():
126
- yield None
127
-
128
- for channel in channel_counts:
129
- if channel_counts[channel] == 0:
130
- yield channel
131
-
132
- def submit(self, command, tasks_per_node, job_name="parsl.adhoc"):
133
- ''' Submits the command onto a channel from the list of channels
134
-
135
- Submit returns an ID that corresponds to the task that was just submitted.
136
-
137
- Parameters
138
- ----------
139
- command: (String)
140
- Commandline invocation to be made on the remote side.
141
-
142
- tasks_per_node: (int)
143
- command invocations to be launched per node
144
-
145
- job_name: (String)
146
- Name of the job. Default : parsl.adhoc
147
-
148
-
149
- Returns
150
- -------
151
- None
152
- At capacity, cannot provision more
153
-
154
- job_id: (string)
155
- Identifier for the job
156
-
157
- '''
158
- channel = next(self.least_loaded)
159
- if channel is None:
160
- logger.warning("All Channels in Ad-Hoc provider are in use")
161
- return None
162
-
163
- job_name = "{0}.{1}".format(job_name, time.time())
164
-
165
- # Set script path
166
- script_path = "{0}/{1}.sh".format(self.script_dir, job_name)
167
- script_path = os.path.abspath(script_path)
168
-
169
- wrap_command = self.worker_init + '\n' + self.launcher(command, tasks_per_node, self.nodes_per_block)
170
-
171
- self._write_submit_script(wrap_command, script_path)
172
-
173
- job_id = None
174
- remote_pid = None
175
- final_cmd = None
176
-
177
- if (self.move_files is None and not isinstance(channel, LocalChannel)) or (self.move_files):
178
- logger.debug("Pushing start script")
179
- script_path = channel.push_file(script_path, channel.script_dir)
180
-
181
- # Bash would return until the streams are closed. So we redirect to a outs file
182
- final_cmd = 'bash {0} > {0}.out 2>&1 & \n echo "PID:$!" '.format(script_path)
183
- retcode, stdout, stderr = channel.execute_wait(final_cmd, self.cmd_timeout)
184
- for line in stdout.split('\n'):
185
- if line.startswith("PID:"):
186
- remote_pid = line.split("PID:")[1].strip()
187
- job_id = remote_pid
188
- if job_id is None:
189
- logger.warning("Channel failed to start remote command/retrieve PID")
190
-
191
- self.resources[job_id] = {'job_id': job_id,
192
- 'status': JobStatus(JobState.RUNNING),
193
- 'cmd': final_cmd,
194
- 'channel': channel,
195
- 'remote_pid': remote_pid}
196
-
197
- return job_id
198
-
199
- def status(self, job_ids):
200
- """ Get status of the list of jobs with job_ids
201
-
202
- Parameters
203
- ----------
204
- job_ids : list of strings
205
- List of job id strings
206
-
207
- Returns
208
- -------
209
- list of JobStatus objects
210
- """
211
- for job_id in job_ids:
212
- channel = self.resources[job_id]['channel']
213
- status_command = "ps --pid {} | grep {}".format(self.resources[job_id]['job_id'],
214
- self.resources[job_id]['cmd'].split()[0])
215
- retcode, stdout, stderr = channel.execute_wait(status_command)
216
- if retcode != 0 and self.resources[job_id]['status'].state == JobState.RUNNING:
217
- self.resources[job_id]['status'] = JobStatus(JobState.FAILED)
218
-
219
- return [self.resources[job_id]['status'] for job_id in job_ids]
220
-
221
- def cancel(self, job_ids):
222
- """ Cancel a list of jobs with job_ids
223
-
224
- Parameters
225
- ----------
226
- job_ids : list of strings
227
- List of job id strings
228
-
229
- Returns
230
- -------
231
- list of confirmation bools: [True, False...]
232
- """
233
- logger.debug("Cancelling jobs: {}".format(job_ids))
234
- rets = []
235
- for job_id in job_ids:
236
- channel = self.resources[job_id]['channel']
237
- cmd = "kill -TERM -$(ps -o pgid= {} | grep -o '[0-9]*')".format(self.resources[job_id]['job_id'])
238
- retcode, stdout, stderr = channel.execute_wait(cmd)
239
- if retcode == 0:
240
- rets.append(True)
241
- else:
242
- rets.append(False)
243
- self.resources[job_id]['status'] = JobStatus(JobState.COMPLETED)
244
- return rets
245
-
246
- @property
247
- def label(self):
248
- return self._label
249
-
250
- @property
251
- def status_polling_interval(self):
252
- return 10
@@ -1,18 +0,0 @@
1
- from parsl.channels import LocalChannel
2
- from parsl.config import Config
3
- from parsl.executors import HighThroughputExecutor
4
- from parsl.providers.ad_hoc.ad_hoc import DeprecatedAdHocProvider
5
-
6
-
7
- def fresh_config():
8
- return Config(
9
- executors=[
10
- HighThroughputExecutor(
11
- label='AdHoc',
12
- encrypted=True,
13
- provider=DeprecatedAdHocProvider(
14
- channels=[LocalChannel(), LocalChannel()]
15
- )
16
- )
17
- ]
18
- )
@@ -1,62 +0,0 @@
1
- import logging
2
-
3
- import pytest
4
-
5
- from parsl import python_app
6
- from parsl.tests.configs.local_adhoc import fresh_config as local_config
7
-
8
- logger = logging.getLogger(__name__)
9
-
10
-
11
- @python_app
12
- def python_app_2():
13
- import os
14
- import threading
15
- import time
16
- time.sleep(1)
17
- return "Hello from PID[{}] TID[{}]".format(os.getpid(), threading.current_thread())
18
-
19
-
20
- @python_app
21
- def python_app_1():
22
- import os
23
- import threading
24
- import time
25
- time.sleep(1)
26
- return "Hello from PID[{}] TID[{}]".format(os.getpid(), threading.current_thread())
27
-
28
-
29
- @python_app
30
- def bash_app(stdout=None, stderr=None):
31
- return 'echo "Hello from $(uname -a)" ; sleep 2'
32
-
33
-
34
- @pytest.mark.local
35
- def test_python(N=2):
36
- """Testing basic python functionality."""
37
-
38
- r1 = {}
39
- r2 = {}
40
- for i in range(0, N):
41
- r1[i] = python_app_1()
42
- r2[i] = python_app_2()
43
- print("Waiting ....")
44
-
45
- for x in r1:
46
- print("python_app_1 : ", r1[x].result())
47
- for x in r2:
48
- print("python_app_2 : ", r2[x].result())
49
-
50
- return
51
-
52
-
53
- @pytest.mark.local
54
- def test_bash():
55
- """Testing basic bash functionality."""
56
-
57
- import os
58
- fname = os.path.basename(__file__)
59
-
60
- x = bash_app(stdout="{0}.out".format(fname))
61
- print("Waiting ....")
62
- print(x.result())
@@ -1,26 +0,0 @@
1
- from unittest.mock import Mock
2
-
3
- import pytest
4
-
5
- import parsl
6
- from parsl.channels.base import Channel
7
- from parsl.executors import HighThroughputExecutor
8
- from parsl.providers import LocalProvider
9
-
10
-
11
- @pytest.mark.local
12
- def test_dfk_close():
13
-
14
- mock_channel = Mock(spec=Channel)
15
-
16
- # block settings all 0 because the mock channel won't be able to
17
- # do anything to make a block exist
18
- p = LocalProvider(channel=mock_channel, init_blocks=0, min_blocks=0, max_blocks=0)
19
-
20
- e = HighThroughputExecutor(provider=p)
21
-
22
- c = parsl.Config(executors=[e])
23
- with parsl.load(c):
24
- pass
25
-
26
- assert mock_channel.close.called