parsl 2024.8.12__py3-none-any.whl → 2024.8.26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- parsl/channels/oauth_ssh/oauth_ssh.py +10 -2
- parsl/channels/ssh/ssh.py +16 -6
- parsl/channels/ssh_il/ssh_il.py +12 -2
- parsl/executors/high_throughput/executor.py +18 -27
- parsl/executors/high_throughput/interchange.py +31 -29
- parsl/executors/high_throughput/mpi_executor.py +23 -2
- parsl/executors/high_throughput/mpi_prefix_composer.py +5 -4
- parsl/executors/status_handling.py +5 -2
- parsl/jobs/states.py +6 -1
- parsl/monitoring/db_manager.py +21 -65
- parsl/monitoring/monitoring.py +10 -23
- parsl/monitoring/router.py +12 -39
- parsl/providers/slurm/slurm.py +40 -10
- parsl/tests/test_htex/test_multiple_disconnected_blocks.py +3 -5
- parsl/tests/test_htex/test_resource_spec_validation.py +40 -0
- parsl/tests/test_monitoring/test_htex_init_blocks_vs_monitoring.py +1 -1
- parsl/tests/test_mpi_apps/test_bad_mpi_config.py +29 -14
- parsl/tests/test_mpi_apps/test_mpi_mode_enabled.py +16 -8
- parsl/tests/test_mpi_apps/test_mpiex.py +2 -3
- parsl/tests/test_mpi_apps/test_resource_spec.py +39 -41
- parsl/tests/test_scaling/test_regression_3568_scaledown_vs_MISSING.py +85 -0
- parsl/version.py +1 -1
- {parsl-2024.8.12.data → parsl-2024.8.26.data}/scripts/interchange.py +31 -29
- {parsl-2024.8.12.dist-info → parsl-2024.8.26.dist-info}/METADATA +5 -3
- {parsl-2024.8.12.dist-info → parsl-2024.8.26.dist-info}/RECORD +32 -31
- parsl/tests/test_mpi_apps/test_mpi_mode_disabled.py +0 -47
- {parsl-2024.8.12.data → parsl-2024.8.26.data}/scripts/exec_parsl_function.py +0 -0
- {parsl-2024.8.12.data → parsl-2024.8.26.data}/scripts/parsl_coprocess.py +0 -0
- {parsl-2024.8.12.data → parsl-2024.8.26.data}/scripts/process_worker_pool.py +0 -0
- {parsl-2024.8.12.dist-info → parsl-2024.8.26.dist-info}/LICENSE +0 -0
- {parsl-2024.8.12.dist-info → parsl-2024.8.26.dist-info}/WHEEL +0 -0
- {parsl-2024.8.12.dist-info → parsl-2024.8.26.dist-info}/entry_points.txt +0 -0
- {parsl-2024.8.12.dist-info → parsl-2024.8.26.dist-info}/top_level.txt +0 -0
@@ -1,18 +1,20 @@
|
|
1
1
|
import contextlib
|
2
2
|
import logging
|
3
3
|
import os
|
4
|
+
import queue
|
4
5
|
import typing
|
5
6
|
import unittest
|
6
7
|
from typing import Dict
|
8
|
+
from unittest import mock
|
7
9
|
|
8
10
|
import pytest
|
9
11
|
|
10
|
-
import parsl
|
11
12
|
from parsl.app.app import python_app
|
13
|
+
from parsl.executors.high_throughput.executor import HighThroughputExecutor
|
14
|
+
from parsl.executors.high_throughput.mpi_executor import MPIExecutor
|
12
15
|
from parsl.executors.high_throughput.mpi_prefix_composer import (
|
13
16
|
InvalidResourceSpecification,
|
14
17
|
MissingResourceSpecification,
|
15
|
-
validate_resource_spec,
|
16
18
|
)
|
17
19
|
from parsl.executors.high_throughput.mpi_resource_management import (
|
18
20
|
get_nodes_in_batchjob,
|
@@ -20,6 +22,8 @@ from parsl.executors.high_throughput.mpi_resource_management import (
|
|
20
22
|
get_slurm_hosts_list,
|
21
23
|
identify_scheduler,
|
22
24
|
)
|
25
|
+
from parsl.launchers import SimpleLauncher
|
26
|
+
from parsl.providers import LocalProvider
|
23
27
|
from parsl.tests.configs.htex_local import fresh_config
|
24
28
|
|
25
29
|
EXECUTOR_LABEL = "MPI_TEST"
|
@@ -48,23 +52,6 @@ def get_env_vars(parsl_resource_specification: Dict = {}) -> Dict:
|
|
48
52
|
return parsl_vars
|
49
53
|
|
50
54
|
|
51
|
-
@pytest.mark.local
|
52
|
-
def test_resource_spec_env_vars():
|
53
|
-
resource_spec = {
|
54
|
-
"num_nodes": 4,
|
55
|
-
"ranks_per_node": 2,
|
56
|
-
}
|
57
|
-
|
58
|
-
assert double(5).result() == 10
|
59
|
-
|
60
|
-
future = get_env_vars(parsl_resource_specification=resource_spec)
|
61
|
-
|
62
|
-
result = future.result()
|
63
|
-
assert isinstance(result, Dict)
|
64
|
-
assert result["PARSL_NUM_NODES"] == str(resource_spec["num_nodes"])
|
65
|
-
assert result["PARSL_RANKS_PER_NODE"] == str(resource_spec["ranks_per_node"])
|
66
|
-
|
67
|
-
|
68
55
|
@pytest.mark.local
|
69
56
|
@unittest.mock.patch("subprocess.check_output", return_value=b"c203-031\nc203-032\n")
|
70
57
|
def test_slurm_mocked_mpi_fetch(subprocess_check):
|
@@ -83,16 +70,6 @@ def add_to_path(path: os.PathLike) -> typing.Generator[None, None, None]:
|
|
83
70
|
os.environ["PATH"] = old_path
|
84
71
|
|
85
72
|
|
86
|
-
@pytest.mark.local
|
87
|
-
@pytest.mark.skip
|
88
|
-
def test_slurm_mpi_fetch():
|
89
|
-
logging.warning(f"Current pwd : {os.path.dirname(__file__)}")
|
90
|
-
with add_to_path(os.path.dirname(__file__)):
|
91
|
-
logging.warning(f"PATH: {os.environ['PATH']}")
|
92
|
-
nodeinfo = get_slurm_hosts_list()
|
93
|
-
logging.warning(f"Got : {nodeinfo}")
|
94
|
-
|
95
|
-
|
96
73
|
@contextlib.contextmanager
|
97
74
|
def mock_pbs_nodefile(nodefile: str = "pbs_nodefile") -> typing.Generator[None, None, None]:
|
98
75
|
cwd = os.path.abspath(os.path.dirname(__file__))
|
@@ -122,22 +99,43 @@ def test_top_level():
|
|
122
99
|
|
123
100
|
@pytest.mark.local
|
124
101
|
@pytest.mark.parametrize(
|
125
|
-
"resource_spec,
|
102
|
+
"resource_spec, exception",
|
126
103
|
(
|
127
|
-
|
128
|
-
({"
|
129
|
-
({"
|
130
|
-
({
|
131
|
-
({
|
132
|
-
({"launcher_options": "--debug_foo"}, True, None),
|
133
|
-
({"num_nodes": 2, "BAD_OPT": 1}, True, InvalidResourceSpecification),
|
134
|
-
({}, True, MissingResourceSpecification),
|
104
|
+
|
105
|
+
({"num_nodes": 2, "ranks_per_node": 1}, None),
|
106
|
+
({"launcher_options": "--debug_foo"}, None),
|
107
|
+
({"num_nodes": 2, "BAD_OPT": 1}, InvalidResourceSpecification),
|
108
|
+
({}, MissingResourceSpecification),
|
135
109
|
)
|
136
110
|
)
|
137
|
-
def
|
111
|
+
def test_mpi_resource_spec(resource_spec: Dict, exception):
|
112
|
+
"""Test validation of resource_specification in MPIExecutor"""
|
113
|
+
|
114
|
+
mpi_ex = MPIExecutor(provider=LocalProvider(launcher=SimpleLauncher()))
|
115
|
+
mpi_ex.outgoing_q = mock.Mock(spec=queue.Queue)
|
116
|
+
|
138
117
|
if exception:
|
139
118
|
with pytest.raises(exception):
|
140
|
-
validate_resource_spec(resource_spec
|
119
|
+
mpi_ex.validate_resource_spec(resource_spec)
|
141
120
|
else:
|
142
|
-
result = validate_resource_spec(resource_spec
|
121
|
+
result = mpi_ex.validate_resource_spec(resource_spec)
|
143
122
|
assert result is None
|
123
|
+
|
124
|
+
|
125
|
+
@pytest.mark.local
|
126
|
+
@pytest.mark.parametrize(
|
127
|
+
"resource_spec",
|
128
|
+
(
|
129
|
+
{"num_nodes": 2, "ranks_per_node": 1},
|
130
|
+
{"launcher_options": "--debug_foo"},
|
131
|
+
{"BAD_OPT": 1},
|
132
|
+
)
|
133
|
+
)
|
134
|
+
def test_mpi_resource_spec_passed_to_htex(resource_spec: dict):
|
135
|
+
"""HTEX should reject every resource_spec"""
|
136
|
+
|
137
|
+
htex = HighThroughputExecutor()
|
138
|
+
htex.outgoing_q = mock.Mock(spec=queue.Queue)
|
139
|
+
|
140
|
+
with pytest.raises(InvalidResourceSpecification):
|
141
|
+
htex.validate_resource_spec(resource_spec)
|
@@ -0,0 +1,85 @@
|
|
1
|
+
import time
|
2
|
+
|
3
|
+
import pytest
|
4
|
+
|
5
|
+
import parsl
|
6
|
+
from parsl.channels import LocalChannel
|
7
|
+
from parsl.config import Config
|
8
|
+
from parsl.executors import HighThroughputExecutor
|
9
|
+
from parsl.launchers import WrappedLauncher
|
10
|
+
from parsl.providers import LocalProvider
|
11
|
+
|
12
|
+
|
13
|
+
def local_config():
|
14
|
+
# see the comments inside test_regression for reasoning about why each
|
15
|
+
# of these parameters is set why it is.
|
16
|
+
return Config(
|
17
|
+
max_idletime=1,
|
18
|
+
|
19
|
+
strategy='htex_auto_scale',
|
20
|
+
strategy_period=1,
|
21
|
+
|
22
|
+
executors=[
|
23
|
+
HighThroughputExecutor(
|
24
|
+
label="htex_local",
|
25
|
+
encrypted=True,
|
26
|
+
provider=LocalProvider(
|
27
|
+
init_blocks=1,
|
28
|
+
min_blocks=0,
|
29
|
+
max_blocks=1,
|
30
|
+
launcher=WrappedLauncher(prepend="sleep inf ; "),
|
31
|
+
),
|
32
|
+
)
|
33
|
+
],
|
34
|
+
)
|
35
|
+
|
36
|
+
|
37
|
+
@parsl.python_app
|
38
|
+
def task():
|
39
|
+
return 7
|
40
|
+
|
41
|
+
|
42
|
+
@pytest.mark.local
|
43
|
+
def test_regression(try_assert):
|
44
|
+
# The above config means that we should start scaling out one initial
|
45
|
+
# block, but then scale it back in after a second or so if the executor
|
46
|
+
# is kept idle (which this test does using try_assert).
|
47
|
+
|
48
|
+
# Because of 'sleep inf' in the WrappedLaucher, the block will not ever
|
49
|
+
# register.
|
50
|
+
|
51
|
+
# The bug being tested is about mistreatment of blocks which are scaled in
|
52
|
+
# before they have a chance to register, and the above forces that to
|
53
|
+
# happen.
|
54
|
+
|
55
|
+
# After that scaling in has happened, we should see that we have one block
|
56
|
+
# and it should be in a terminal state. The below try_assert waits for
|
57
|
+
# that to become true.
|
58
|
+
|
59
|
+
# At that time, we should also see htex reporting no blocks registered - as
|
60
|
+
# mentioned above, that is a necessary part of the bug being tested here.
|
61
|
+
|
62
|
+
# Give 10 strategy periods for the above to happen: each step of scale up,
|
63
|
+
# and scale down due to idleness isn't guaranteed to happen in exactly one
|
64
|
+
# scaling step.
|
65
|
+
|
66
|
+
htex = parsl.dfk().executors['htex_local']
|
67
|
+
|
68
|
+
try_assert(lambda: len(htex.status_facade) == 1 and htex.status_facade['0'].terminal,
|
69
|
+
timeout_ms=10000)
|
70
|
+
|
71
|
+
assert htex.connected_blocks() == [], "No block should have connected to interchange"
|
72
|
+
|
73
|
+
# Now we can reconfigure the launcher to let subsequent blocks launch ok,
|
74
|
+
# and run a trivial task. That trivial task will scale up a new block and
|
75
|
+
# run the task successfully.
|
76
|
+
|
77
|
+
# Prior to issue #3568, the bug was that the scale in of the first
|
78
|
+
# block earlier in the test case would have incorrectly been treated as a
|
79
|
+
# failure, and then the block error handler would have treated that failure
|
80
|
+
# as a permanent htex failure, and so the task execution below would raise
|
81
|
+
# a BadStateException rather than attempt to run the task.
|
82
|
+
|
83
|
+
assert htex.provider.launcher.prepend != "", "Pre-req: prepend attribute should exist and be non-empty"
|
84
|
+
htex.provider.launcher.prepend = ""
|
85
|
+
assert task().result() == 7
|
parsl/version.py
CHANGED
@@ -375,7 +375,7 @@ class Interchange:
|
|
375
375
|
|
376
376
|
self.zmq_context.destroy()
|
377
377
|
delta = time.time() - start
|
378
|
-
logger.info("Processed {} tasks in {} seconds"
|
378
|
+
logger.info(f"Processed {self.count} tasks in {delta} seconds")
|
379
379
|
logger.warning("Exiting")
|
380
380
|
|
381
381
|
def process_task_outgoing_incoming(
|
@@ -396,9 +396,8 @@ class Interchange:
|
|
396
396
|
try:
|
397
397
|
msg = json.loads(message[1].decode('utf-8'))
|
398
398
|
except Exception:
|
399
|
-
logger.warning("Got Exception reading message from manager: {!r}"
|
400
|
-
|
401
|
-
logger.debug("Message: \n{!r}\n".format(message[1]))
|
399
|
+
logger.warning(f"Got Exception reading message from manager: {manager_id!r}", exc_info=True)
|
400
|
+
logger.debug("Message:\n %r\n", message[1])
|
402
401
|
return
|
403
402
|
|
404
403
|
# perform a bit of validation on the structure of the deserialized
|
@@ -406,7 +405,7 @@ class Interchange:
|
|
406
405
|
# in obviously malformed cases
|
407
406
|
if not isinstance(msg, dict) or 'type' not in msg:
|
408
407
|
logger.error(f"JSON message was not correctly formatted from manager: {manager_id!r}")
|
409
|
-
logger.debug("Message
|
408
|
+
logger.debug("Message:\n %r\n", message[1])
|
410
409
|
return
|
411
410
|
|
412
411
|
if msg['type'] == 'registration':
|
@@ -425,7 +424,7 @@ class Interchange:
|
|
425
424
|
self.connected_block_history.append(msg['block_id'])
|
426
425
|
|
427
426
|
interesting_managers.add(manager_id)
|
428
|
-
logger.info("Adding manager: {!r} to ready queue"
|
427
|
+
logger.info(f"Adding manager: {manager_id!r} to ready queue")
|
429
428
|
m = self._ready_managers[manager_id]
|
430
429
|
|
431
430
|
# m is a ManagerRecord, but msg is a dict[Any,Any] and so can
|
@@ -434,12 +433,12 @@ class Interchange:
|
|
434
433
|
# later.
|
435
434
|
m.update(msg) # type: ignore[typeddict-item]
|
436
435
|
|
437
|
-
logger.info("Registration info for manager {!r}: {}"
|
436
|
+
logger.info(f"Registration info for manager {manager_id!r}: {msg}")
|
438
437
|
self._send_monitoring_info(monitoring_radio, m)
|
439
438
|
|
440
439
|
if (msg['python_v'].rsplit(".", 1)[0] != self.current_platform['python_v'].rsplit(".", 1)[0] or
|
441
440
|
msg['parsl_v'] != self.current_platform['parsl_v']):
|
442
|
-
logger.error("Manager {!r} has incompatible version info with the interchange"
|
441
|
+
logger.error(f"Manager {manager_id!r} has incompatible version info with the interchange")
|
443
442
|
logger.debug("Setting kill event")
|
444
443
|
kill_event.set()
|
445
444
|
e = VersionMismatch("py.v={} parsl.v={}".format(self.current_platform['python_v'].rsplit(".", 1)[0],
|
@@ -452,16 +451,15 @@ class Interchange:
|
|
452
451
|
self.results_outgoing.send(pkl_package)
|
453
452
|
logger.error("Sent failure reports, shutting down interchange")
|
454
453
|
else:
|
455
|
-
logger.info("Manager {!r} has compatible Parsl version {
|
456
|
-
logger.info("Manager {!r} has compatible Python version {
|
457
|
-
msg['python_v'].rsplit(".", 1)[0]))
|
454
|
+
logger.info(f"Manager {manager_id!r} has compatible Parsl version {msg['parsl_v']}")
|
455
|
+
logger.info(f"Manager {manager_id!r} has compatible Python version {msg['python_v'].rsplit('.', 1)[0]}")
|
458
456
|
elif msg['type'] == 'heartbeat':
|
459
457
|
self._ready_managers[manager_id]['last_heartbeat'] = time.time()
|
460
|
-
logger.debug("Manager
|
458
|
+
logger.debug("Manager %r sent heartbeat via tasks connection", manager_id)
|
461
459
|
self.task_outgoing.send_multipart([manager_id, b'', PKL_HEARTBEAT_CODE])
|
462
460
|
elif msg['type'] == 'drain':
|
463
461
|
self._ready_managers[manager_id]['draining'] = True
|
464
|
-
logger.debug(
|
462
|
+
logger.debug("Manager %r requested drain", manager_id)
|
465
463
|
else:
|
466
464
|
logger.error(f"Unexpected message type received from manager: {msg['type']}")
|
467
465
|
logger.debug("leaving task_outgoing section")
|
@@ -484,9 +482,11 @@ class Interchange:
|
|
484
482
|
def process_tasks_to_send(self, interesting_managers: Set[bytes]) -> None:
|
485
483
|
# Check if there are tasks that could be sent to managers
|
486
484
|
|
487
|
-
logger.debug(
|
488
|
-
total
|
489
|
-
|
485
|
+
logger.debug(
|
486
|
+
"Managers count (interesting/total): %d/%d",
|
487
|
+
len(interesting_managers),
|
488
|
+
len(self._ready_managers)
|
489
|
+
)
|
490
490
|
|
491
491
|
if interesting_managers and not self.pending_task_queue.empty():
|
492
492
|
shuffled_managers = self.manager_selector.sort_managers(self._ready_managers, interesting_managers)
|
@@ -497,7 +497,7 @@ class Interchange:
|
|
497
497
|
tasks_inflight = len(m['tasks'])
|
498
498
|
real_capacity = m['max_capacity'] - tasks_inflight
|
499
499
|
|
500
|
-
if
|
500
|
+
if real_capacity and m["active"] and not m["draining"]:
|
501
501
|
tasks = self.get_tasks(real_capacity)
|
502
502
|
if tasks:
|
503
503
|
self.task_outgoing.send_multipart([manager_id, b'', pickle.dumps(tasks)])
|
@@ -506,19 +506,19 @@ class Interchange:
|
|
506
506
|
tids = [t['task_id'] for t in tasks]
|
507
507
|
m['tasks'].extend(tids)
|
508
508
|
m['idle_since'] = None
|
509
|
-
logger.debug("Sent tasks:
|
509
|
+
logger.debug("Sent tasks: %s to manager %r", tids, manager_id)
|
510
510
|
# recompute real_capacity after sending tasks
|
511
511
|
real_capacity = m['max_capacity'] - tasks_inflight
|
512
512
|
if real_capacity > 0:
|
513
|
-
logger.debug("Manager
|
513
|
+
logger.debug("Manager %r has free capacity %s", manager_id, real_capacity)
|
514
514
|
# ... so keep it in the interesting_managers list
|
515
515
|
else:
|
516
|
-
logger.debug("Manager
|
516
|
+
logger.debug("Manager %r is now saturated", manager_id)
|
517
517
|
interesting_managers.remove(manager_id)
|
518
518
|
else:
|
519
519
|
interesting_managers.remove(manager_id)
|
520
520
|
# logger.debug("Nothing to send to manager {}".format(manager_id))
|
521
|
-
logger.debug("leaving _ready_managers section, with
|
521
|
+
logger.debug("leaving _ready_managers section, with %s managers still interesting", len(interesting_managers))
|
522
522
|
else:
|
523
523
|
logger.debug("either no interesting managers or no tasks, so skipping manager pass")
|
524
524
|
|
@@ -528,9 +528,9 @@ class Interchange:
|
|
528
528
|
logger.debug("entering results_incoming section")
|
529
529
|
manager_id, *all_messages = self.results_incoming.recv_multipart()
|
530
530
|
if manager_id not in self._ready_managers:
|
531
|
-
logger.warning("Received a result from a un-registered manager: {!r}"
|
531
|
+
logger.warning(f"Received a result from a un-registered manager: {manager_id!r}")
|
532
532
|
else:
|
533
|
-
logger.debug(
|
533
|
+
logger.debug("Got %s result items in batch from manager %r", len(all_messages), manager_id)
|
534
534
|
|
535
535
|
b_messages = []
|
536
536
|
|
@@ -548,10 +548,10 @@ class Interchange:
|
|
548
548
|
|
549
549
|
monitoring_radio.send(r['payload'])
|
550
550
|
elif r['type'] == 'heartbeat':
|
551
|
-
logger.debug(
|
551
|
+
logger.debug("Manager %r sent heartbeat via results connection", manager_id)
|
552
552
|
b_messages.append((p_message, r))
|
553
553
|
else:
|
554
|
-
logger.error("Interchange discarding result_queue message of unknown type:
|
554
|
+
logger.error("Interchange discarding result_queue message of unknown type: %s", r["type"])
|
555
555
|
|
556
556
|
got_result = False
|
557
557
|
m = self._ready_managers[manager_id]
|
@@ -560,14 +560,16 @@ class Interchange:
|
|
560
560
|
if r['type'] == 'result':
|
561
561
|
got_result = True
|
562
562
|
try:
|
563
|
-
logger.debug(
|
563
|
+
logger.debug("Removing task %s from manager record %r", r["task_id"], manager_id)
|
564
564
|
m['tasks'].remove(r['task_id'])
|
565
565
|
except Exception:
|
566
566
|
# If we reach here, there's something very wrong.
|
567
|
-
logger.exception(
|
567
|
+
logger.exception(
|
568
|
+
"Ignoring exception removing task_id %s for manager %r with task list %s",
|
568
569
|
r['task_id'],
|
569
570
|
manager_id,
|
570
|
-
m[
|
571
|
+
m["tasks"]
|
572
|
+
)
|
571
573
|
|
572
574
|
b_messages_to_send = []
|
573
575
|
for (b_message, _) in b_messages:
|
@@ -578,7 +580,7 @@ class Interchange:
|
|
578
580
|
self.results_outgoing.send_multipart(b_messages_to_send)
|
579
581
|
logger.debug("Sent messages on results_outgoing")
|
580
582
|
|
581
|
-
logger.debug(
|
583
|
+
logger.debug("Current tasks on manager %r: %s", manager_id, m["tasks"])
|
582
584
|
if len(m['tasks']) == 0 and m['idle_since'] is None:
|
583
585
|
m['idle_since'] = time.time()
|
584
586
|
|
@@ -1,9 +1,9 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: parsl
|
3
|
-
Version: 2024.8.
|
3
|
+
Version: 2024.8.26
|
4
4
|
Summary: Simple data dependent workflows in Python
|
5
5
|
Home-page: https://github.com/Parsl/parsl
|
6
|
-
Download-URL: https://github.com/Parsl/parsl/archive/2024.08.
|
6
|
+
Download-URL: https://github.com/Parsl/parsl/archive/2024.08.26.tar.gz
|
7
7
|
Author: The Parsl Team
|
8
8
|
Author-email: parsl@googlegroups.com
|
9
9
|
License: Apache 2.0
|
@@ -25,7 +25,6 @@ Requires-Dist: globus-sdk
|
|
25
25
|
Requires-Dist: dill
|
26
26
|
Requires-Dist: tblib
|
27
27
|
Requires-Dist: requests
|
28
|
-
Requires-Dist: paramiko
|
29
28
|
Requires-Dist: psutil>=5.5.1
|
30
29
|
Requires-Dist: setproctitle
|
31
30
|
Requires-Dist: filelock<4,>=3.13
|
@@ -57,6 +56,7 @@ Requires-Dist: jsonschema; extra == "all"
|
|
57
56
|
Requires-Dist: proxystore; extra == "all"
|
58
57
|
Requires-Dist: radical.pilot==1.60; extra == "all"
|
59
58
|
Requires-Dist: radical.utils==1.60; extra == "all"
|
59
|
+
Requires-Dist: paramiko; extra == "all"
|
60
60
|
Provides-Extra: aws
|
61
61
|
Requires-Dist: boto3; extra == "aws"
|
62
62
|
Provides-Extra: azure
|
@@ -87,6 +87,8 @@ Requires-Dist: proxystore; extra == "proxystore"
|
|
87
87
|
Provides-Extra: radical-pilot
|
88
88
|
Requires-Dist: radical.pilot==1.60; extra == "radical-pilot"
|
89
89
|
Requires-Dist: radical.utils==1.60; extra == "radical-pilot"
|
90
|
+
Provides-Extra: ssh
|
91
|
+
Requires-Dist: paramiko; extra == "ssh"
|
90
92
|
Provides-Extra: visualization
|
91
93
|
Requires-Dist: pydot; extra == "visualization"
|
92
94
|
Requires-Dist: networkx<2.6,>=2.5; extra == "visualization"
|
@@ -8,7 +8,7 @@ parsl/multiprocessing.py,sha256=MyaEcEq-Qf860u7V98u-PZrPNdtzOZL_NW6EhIJnmfQ,1937
|
|
8
8
|
parsl/process_loggers.py,sha256=uQ7Gd0W72Jz7rrcYlOMfLsAEhkRltxXJL2MgdduJjEw,1136
|
9
9
|
parsl/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
10
10
|
parsl/utils.py,sha256=91FjQiTUY383ueAjkBAgE21My9nba6SP2a2SrbB1r1Q,11250
|
11
|
-
parsl/version.py,sha256=
|
11
|
+
parsl/version.py,sha256=A5qAfJZdMOvpSxHivCOIgDMa32MpnZXa_O6ZHtOGoGs,131
|
12
12
|
parsl/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
13
13
|
parsl/app/app.py,sha256=0gbM4AH2OtFOLsv07I5nglpElcwMSOi-FzdZZfrk7So,8532
|
14
14
|
parsl/app/bash.py,sha256=jm2AvePlCT9DZR7H_4ANDWxatp5dN_22FUlT_gWhZ-g,5528
|
@@ -23,11 +23,11 @@ parsl/channels/errors.py,sha256=Dp0FhtHpygn0IjX8nGurx-WrTJm9aw-Jjz3SSUT-jCc,3283
|
|
23
23
|
parsl/channels/local/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
24
24
|
parsl/channels/local/local.py,sha256=xqH4HnipUN95NgvyB1r33SiqgQKkARgRKmg0_HnumUk,5311
|
25
25
|
parsl/channels/oauth_ssh/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
26
|
-
parsl/channels/oauth_ssh/oauth_ssh.py,sha256=
|
26
|
+
parsl/channels/oauth_ssh/oauth_ssh.py,sha256=6pj3LQAX89p5Lc8NL1Llq2_noi8GS8BItCuRtDp-iCA,3823
|
27
27
|
parsl/channels/ssh/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
28
|
-
parsl/channels/ssh/ssh.py,sha256=
|
28
|
+
parsl/channels/ssh/ssh.py,sha256=3PfE3qYQOCr-BZrCseGiMKYFUILFPmW_CgvV63CWI4M,10494
|
29
29
|
parsl/channels/ssh_il/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
30
|
-
parsl/channels/ssh_il/ssh_il.py,sha256=
|
30
|
+
parsl/channels/ssh_il/ssh_il.py,sha256=acOXJyqCmgC2nl7zrO_uEu3GpJZMN2l-Af5XfmNMLRs,2783
|
31
31
|
parsl/concurrent/__init__.py,sha256=TvIVceJYaJAsxedNBF3Vdo9lEQNHH_j3uxJv0zUjP7w,3288
|
32
32
|
parsl/configs/ASPIRE1.py,sha256=eKnmz0QD3V522emtXMjS6Ppeooe5lzcBgCE6cxunbYY,1718
|
33
33
|
parsl/configs/Azure.py,sha256=CJms3xWmdb-S3CksbHrPF2TfMxJC5I0faqUKCOzVg0k,1268
|
@@ -71,7 +71,7 @@ parsl/dataflow/taskrecord.py,sha256=-FuujdZQ1y5GSc-PJ91QKGT-Kp0lrg70MFDoxpbWI1Q,
|
|
71
71
|
parsl/executors/__init__.py,sha256=Cg8e-F2NUaBD8A9crDAXKCSdoBEwQVIdgm4FlXd-wvk,476
|
72
72
|
parsl/executors/base.py,sha256=5A59mCXPjYNCep9JgfvIjBdZvGV-1mNVHklr-ZIEojg,5200
|
73
73
|
parsl/executors/errors.py,sha256=xVswxgi7vmJcUMCeYDAPK8sQT2kHFFROVoOr0dnmcWE,2098
|
74
|
-
parsl/executors/status_handling.py,sha256=
|
74
|
+
parsl/executors/status_handling.py,sha256=nxbkiGr6f3xDc0nsUeSrMMxlj7UD32K7nOLCLzfthDs,15416
|
75
75
|
parsl/executors/threads.py,sha256=hJt1LzxphqX4fe_9R9Cf1MU0lepWTU_eJe8O665B0Xo,3352
|
76
76
|
parsl/executors/flux/__init__.py,sha256=P9grTTeRPXfqXurFhlSS7XhmE6tTbnCnyQ1f9b-oYHE,136
|
77
77
|
parsl/executors/flux/execute_parsl_task.py,sha256=gRN7F4HhdrKQ-bvn4wXrquBzFOp_9WF88hMIeUaRg5I,1553
|
@@ -79,13 +79,13 @@ parsl/executors/flux/executor.py,sha256=8_xakLUu5zNJAHL0LbeTCFEWqWzRK1eE-3ep4GII
|
|
79
79
|
parsl/executors/flux/flux_instance_manager.py,sha256=5T3Rp7ZM-mlT0Pf0Gxgs5_YmnaPrSF9ec7zvRfLfYJw,2129
|
80
80
|
parsl/executors/high_throughput/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
81
81
|
parsl/executors/high_throughput/errors.py,sha256=Sak8e8UpiEcXefUjMHbhyXc4Rn7kJtOoh7L8wreBQdk,1638
|
82
|
-
parsl/executors/high_throughput/executor.py,sha256=
|
83
|
-
parsl/executors/high_throughput/interchange.py,sha256=
|
82
|
+
parsl/executors/high_throughput/executor.py,sha256=x6DGXdriDIPpfDK6yms7XTUrkwxNCHNfz6X9kJRvt2w,37904
|
83
|
+
parsl/executors/high_throughput/interchange.py,sha256=WP9zseYYb0B8522j8wt3yhO12bzmFIxdCIepEU-4oWA,30877
|
84
84
|
parsl/executors/high_throughput/manager_record.py,sha256=yn3L8TUJFkgm2lX1x0SeS9mkvJowC0s2VIMCFiU7ThM,455
|
85
85
|
parsl/executors/high_throughput/manager_selector.py,sha256=uRaEtcbDO2vXf8vjEcm7bfZVdeUlSPTRc3G4oFRO29M,820
|
86
86
|
parsl/executors/high_throughput/monitoring_info.py,sha256=HC0drp6nlXQpAop5PTUKNjdXMgtZVvrBL0JzZJebPP4,298
|
87
|
-
parsl/executors/high_throughput/mpi_executor.py,sha256=
|
88
|
-
parsl/executors/high_throughput/mpi_prefix_composer.py,sha256=
|
87
|
+
parsl/executors/high_throughput/mpi_executor.py,sha256=khvGz56A8zU8XAY-R4TtqqiJB8B10mkVTXD_9xtrXgo,4696
|
88
|
+
parsl/executors/high_throughput/mpi_prefix_composer.py,sha256=XQAv9MH7pl5rCUOVw1x8qB64n8iT1-smiVLTBSB1Ro0,4878
|
89
89
|
parsl/executors/high_throughput/mpi_resource_management.py,sha256=LFBbJ3BnzTcY_v-jNu30uoIB2Enk4cleN4ygY3dncjY,8194
|
90
90
|
parsl/executors/high_throughput/probe.py,sha256=TNpGTXb4_DEeg_h-LHu4zEKi1-hffboxvKcZUl2OZGk,2751
|
91
91
|
parsl/executors/high_throughput/process_worker_pool.py,sha256=3s-Ouo3ZEhod7hon8euyL37t1DbP5pSVjXyC23DSN_0,43075
|
@@ -113,20 +113,20 @@ parsl/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
113
113
|
parsl/jobs/error_handlers.py,sha256=BBXwUAMJpBm0HxV1P-I6jv7ZF9wcrhnCfzSTlsd2g4w,2319
|
114
114
|
parsl/jobs/errors.py,sha256=cpSQXCrlKtuHsQf7usjF-lX8XsDkFnE5kWpmFjiN6OU,178
|
115
115
|
parsl/jobs/job_status_poller.py,sha256=b37JOqDpSesqeSreEh1HzfVTFnD5Aoy6k8JDXkkPDmk,2192
|
116
|
-
parsl/jobs/states.py,sha256=
|
116
|
+
parsl/jobs/states.py,sha256=dUM8gC4YVpUjLMARJJ_tDERs6oHsoNheAtG6JWPIJt4,5058
|
117
117
|
parsl/jobs/strategy.py,sha256=KYcIpjWVKLYbM0TXhue9Zp2a7I1zkbHx4raPFiDlewA,13799
|
118
118
|
parsl/launchers/__init__.py,sha256=jJeDOWGKJjvpmWTLsj1zSqce_UAhWRc_IO-TzaOAlII,579
|
119
119
|
parsl/launchers/base.py,sha256=CblcvPTJiu-MNLWaRtFe29SZQ0BpTOlaY8CGcHdlHIE,538
|
120
120
|
parsl/launchers/errors.py,sha256=8YMV_CHpBNVa4eXkGE4x5DaFQlZkDCRCHmBktYcY6TA,467
|
121
121
|
parsl/launchers/launchers.py,sha256=VB--fiVv_IQne3DydTMSdGUY0o0g69puAs-Hd3mJ2vo,15464
|
122
122
|
parsl/monitoring/__init__.py,sha256=0ywNz6i0lM1xo_7_BIxhETDGeVd2C_0wwD7qgeaMR4c,83
|
123
|
-
parsl/monitoring/db_manager.py,sha256=
|
123
|
+
parsl/monitoring/db_manager.py,sha256=l7Qiub4JsR6QUzTYUAJ9sVytZOvba2QMBdFH3cGbNIo,33336
|
124
124
|
parsl/monitoring/errors.py,sha256=D6jpYzEzp0d6FmVKGqhvjAxr4ztZfJX2s-aXemH9bBU,148
|
125
125
|
parsl/monitoring/message_type.py,sha256=Khn88afNxcOIciKiCK4GLnn90I5BlRTiOL3zK-P07yQ,401
|
126
|
-
parsl/monitoring/monitoring.py,sha256=
|
126
|
+
parsl/monitoring/monitoring.py,sha256=q_U2zpcd_hy0cxdWNXF_qhNBe1SQDipStvD1LdcWhlo,13098
|
127
127
|
parsl/monitoring/radios.py,sha256=cHdpBOW1ITYvFnOgYjziuZOauq8p7mlSBOvcbIP78mg,6437
|
128
128
|
parsl/monitoring/remote.py,sha256=avIWMvejN0LeIXpt_RCXJxGLbsXhapUab2rS5Tmjca4,13739
|
129
|
-
parsl/monitoring/router.py,sha256=
|
129
|
+
parsl/monitoring/router.py,sha256=8zWTaYIXWsgpMranTTEPhTPqQSmT2ePK8JJmfW8K34s,9256
|
130
130
|
parsl/monitoring/types.py,sha256=_WGizCTgQVOkJ2dvNfsvHpYBj21Ky3bJsmyIskIx10I,631
|
131
131
|
parsl/monitoring/queries/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
132
132
|
parsl/monitoring/queries/pandas.py,sha256=0Z2r0rjTKCemf0eaDkF1irvVHn5g7KC5SYETvQPRxwU,2232
|
@@ -186,7 +186,7 @@ parsl/providers/pbspro/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3
|
|
186
186
|
parsl/providers/pbspro/pbspro.py,sha256=jh9rzSOKRf0LKtqHSaolqVQtRa1jyjcZLsjk8Wp-llg,8794
|
187
187
|
parsl/providers/pbspro/template.py,sha256=y-Dher--t5Eury-c7cAuSZs9FEUXWiruFUI07v81558,315
|
188
188
|
parsl/providers/slurm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
189
|
-
parsl/providers/slurm/slurm.py,sha256=
|
189
|
+
parsl/providers/slurm/slurm.py,sha256=qFG0MNSV6oG5mBYusS8y53DR13Nhq9DxQ6bGfncbJeQ,15719
|
190
190
|
parsl/providers/slurm/template.py,sha256=KpgBEFMc1ps-38jdrk13xUGx9TCivu-iF90jgQDdiEQ,315
|
191
191
|
parsl/providers/torque/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
192
192
|
parsl/providers/torque/template.py,sha256=4qfc2gmlEhRCAD7erFDOs4prJQ43I8s4E8DSUSVQx3A,358
|
@@ -340,7 +340,8 @@ parsl/tests/test_htex/test_htex.py,sha256=5ylQvWgmSLP3lOdoHxqK9wkvAgfgeJx6gihKPk
|
|
340
340
|
parsl/tests/test_htex/test_manager_failure.py,sha256=N-obuSZ8f7XA_XcddoN2LWKSVtpKUZvTHb7BFelS3iQ,1143
|
341
341
|
parsl/tests/test_htex/test_managers_command.py,sha256=Y-eUjtBzwW9erCYdph9bOesbkUvX8QUPqXt27DCgVS8,951
|
342
342
|
parsl/tests/test_htex/test_missing_worker.py,sha256=gyp5i7_t-JHyJGtz_eXZKKBY5w8oqLOIxO6cJgGJMtQ,745
|
343
|
-
parsl/tests/test_htex/test_multiple_disconnected_blocks.py,sha256=
|
343
|
+
parsl/tests/test_htex/test_multiple_disconnected_blocks.py,sha256=2vXZoIx4NuAWYuiNoL5Gxr85w72qZ7Kdb3JGh0FufTg,1867
|
344
|
+
parsl/tests/test_htex/test_resource_spec_validation.py,sha256=k1zQ--46bCyhOnt2UTaYnSh0I2UhwX747ISAfy8xPvk,952
|
344
345
|
parsl/tests/test_htex/test_worker_failure.py,sha256=Uz-RHI-LK78FMjXUvrUFmo4iYfmpDVBUcBxxRb3UG9M,603
|
345
346
|
parsl/tests/test_htex/test_zmq_binding.py,sha256=Bq1HHuMxBE_AcaP1VZ-RqE4euCHO__Du05b2UZ5H1RA,3950
|
346
347
|
parsl/tests/test_monitoring/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -348,19 +349,18 @@ parsl/tests/test_monitoring/test_app_names.py,sha256=ayyxySGWpKSe9dDw2UeJo1dicxj
|
|
348
349
|
parsl/tests/test_monitoring/test_basic.py,sha256=nQERwVH56CjrKc_YSsMxH5UziJDqN2357Vhyd0brbRU,4177
|
349
350
|
parsl/tests/test_monitoring/test_db_locks.py,sha256=3s3c1xhKo230ZZIJ3f1Ca4U7LcEdXnanOGVXQyNlk2U,2895
|
350
351
|
parsl/tests/test_monitoring/test_fuzz_zmq.py,sha256=--3-pQUvXXbkr8v_BEJoPvVvNly1oXvrD2nJh6yl_0M,3436
|
351
|
-
parsl/tests/test_monitoring/test_htex_init_blocks_vs_monitoring.py,sha256=
|
352
|
+
parsl/tests/test_monitoring/test_htex_init_blocks_vs_monitoring.py,sha256=BAnl80waEaE41pvtpYD-AbNgdfF7QBgVwcCN9IsFPTM,2746
|
352
353
|
parsl/tests/test_monitoring/test_incomplete_futures.py,sha256=ZnO1sFSwlWUBHX64C_zwfTVRVC_UFNlU4h0POgx6NEo,2005
|
353
354
|
parsl/tests/test_monitoring/test_memoization_representation.py,sha256=dknv2nO7pNZ1jGxWGsC_AW3rs90gjMIeC5d7pIJ75Xc,2645
|
354
355
|
parsl/tests/test_monitoring/test_stdouterr.py,sha256=9FQSfiaMrOpoSwravZuEwmdgUgI7iG0TPRucsYC_NJA,4498
|
355
356
|
parsl/tests/test_monitoring/test_viz_colouring.py,sha256=83Qdmn3gM0j7IL6kPDcuIsp_nl4zj-liPijyIN632SY,592
|
356
357
|
parsl/tests/test_mpi_apps/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
357
|
-
parsl/tests/test_mpi_apps/test_bad_mpi_config.py,sha256=
|
358
|
-
parsl/tests/test_mpi_apps/
|
359
|
-
parsl/tests/test_mpi_apps/test_mpi_mode_enabled.py,sha256=pV-htWmPNyY7XKN4Qo-twLmH-qreCgFlYwokgZbTS_g,5304
|
358
|
+
parsl/tests/test_mpi_apps/test_bad_mpi_config.py,sha256=QKvEUSrHIBrvqu2fRj1MAqxsYxDfcrdQ7dzWdOZejuU,1320
|
359
|
+
parsl/tests/test_mpi_apps/test_mpi_mode_enabled.py,sha256=9RaRgfweywYvcrTvteJXJwt_RSiyWSjBgii5LCnisJg,5461
|
360
360
|
parsl/tests/test_mpi_apps/test_mpi_prefix.py,sha256=yJslZvYK3JeL9UgxMwF9DDPR9QD4zJLGVjubD0F-utc,1950
|
361
361
|
parsl/tests/test_mpi_apps/test_mpi_scheduler.py,sha256=YdV8A-m67DHk9wxgNpj69wwGEKrFGL20KAC1TzLke3c,6332
|
362
|
-
parsl/tests/test_mpi_apps/test_mpiex.py,sha256=
|
363
|
-
parsl/tests/test_mpi_apps/test_resource_spec.py,sha256=
|
362
|
+
parsl/tests/test_mpi_apps/test_mpiex.py,sha256=N44sOaTOMchmZ3bI_w5h2mjOnS0sGFq8IqzIOpF0MMI,2036
|
363
|
+
parsl/tests/test_mpi_apps/test_resource_spec.py,sha256=aJo_1Nr0t-5pzw_rpDWEVp41RcICWG9sAeFUFXXJoW8,3828
|
364
364
|
parsl/tests/test_providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
365
365
|
parsl/tests/test_providers/test_cobalt_deprecation_warning.py,sha256=UN2W6xJxuLx2euPqArORKFEU2VXez9_PYqq-0rZHanQ,391
|
366
366
|
parsl/tests/test_providers/test_local_provider.py,sha256=R96E1eWgHVkvOQ1Au9wj-gfdWKAqGc-qlygFuxpGFQ8,7160
|
@@ -415,6 +415,7 @@ parsl/tests/test_regression/test_98.py,sha256=E7dituuonKN5uWocZkJYZlaE5x5rDM4MZl
|
|
415
415
|
parsl/tests/test_scaling/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
416
416
|
parsl/tests/test_scaling/test_block_error_handler.py,sha256=OS1IyiK8gjRFI1VzpmOvEnKsPev2vKmC6Z2Hp5LaHpA,6068
|
417
417
|
parsl/tests/test_scaling/test_regression_1621.py,sha256=cAPjJ0p_VLZm9Z6EK7QuOgeO5KpcUXQ0ar698T6uMy4,1944
|
418
|
+
parsl/tests/test_scaling/test_regression_3568_scaledown_vs_MISSING.py,sha256=uL4dmaxqix9K6P-5vDTFqPye1BIeyynJjiYZBx5XI3E,2982
|
418
419
|
parsl/tests/test_scaling/test_scale_down.py,sha256=u8TbbVM2PXgy4Zg7bAkh0C-KQuF1kD_WEsO79R0Y-UE,2820
|
419
420
|
parsl/tests/test_scaling/test_scale_down_htex_auto_scale.py,sha256=A-aDudFnWvOr5Z4m3Z0WW7-MJZ6ZveEneogZEzSom1k,4596
|
420
421
|
parsl/tests/test_scaling/test_scale_down_htex_unregistered.py,sha256=4DYZB9BMDzyC659bf-gmU3ltnRvCgXVrfnnehb7cL5c,2029
|
@@ -459,13 +460,13 @@ parsl/usage_tracking/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
|
|
459
460
|
parsl/usage_tracking/api.py,sha256=iaCY58Dc5J4UM7_dJzEEs871P1p1HdxBMtNGyVdzc9g,1821
|
460
461
|
parsl/usage_tracking/levels.py,sha256=xbfzYEsd55KiZJ-mzNgPebvOH4rRHum04hROzEf41tU,291
|
461
462
|
parsl/usage_tracking/usage.py,sha256=qNEJ7nPimqd3Y7OWFLdYmNwJ6XDKlyfV_fTzasxsQw8,8690
|
462
|
-
parsl-2024.8.
|
463
|
-
parsl-2024.8.
|
464
|
-
parsl-2024.8.
|
465
|
-
parsl-2024.8.
|
466
|
-
parsl-2024.8.
|
467
|
-
parsl-2024.8.
|
468
|
-
parsl-2024.8.
|
469
|
-
parsl-2024.8.
|
470
|
-
parsl-2024.8.
|
471
|
-
parsl-2024.8.
|
463
|
+
parsl-2024.8.26.data/scripts/exec_parsl_function.py,sha256=RUkJ4JSJAjr7YyRZ58zhMdg8cR5dVV9odUl3AuzNf3k,7802
|
464
|
+
parsl-2024.8.26.data/scripts/interchange.py,sha256=2tsbwd055SEnSpWLNNoqMW6o6ohRJFNSgvgN_umsqN8,30864
|
465
|
+
parsl-2024.8.26.data/scripts/parsl_coprocess.py,sha256=zrVjEqQvFOHxsLufPi00xzMONagjVwLZbavPM7bbjK4,5722
|
466
|
+
parsl-2024.8.26.data/scripts/process_worker_pool.py,sha256=78QKnV5KbY_vcteC6k60gpDE4wEk6hsciet_qzs9QoU,43061
|
467
|
+
parsl-2024.8.26.dist-info/LICENSE,sha256=tAkwu8-AdEyGxGoSvJ2gVmQdcicWw3j1ZZueVV74M-E,11357
|
468
|
+
parsl-2024.8.26.dist-info/METADATA,sha256=biMwH3S2mcDlcW_m-xHagXcXYOsw3eE63SCgt2Xbcfw,4121
|
469
|
+
parsl-2024.8.26.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
|
470
|
+
parsl-2024.8.26.dist-info/entry_points.txt,sha256=XqnsWDYoEcLbsMcpnYGKLEnSBmaIe1YoM5YsBdJG2tI,176
|
471
|
+
parsl-2024.8.26.dist-info/top_level.txt,sha256=PIheYoUFQtF2icLsgOykgU-Cjuwr2Oi6On2jo5RYgRM,6
|
472
|
+
parsl-2024.8.26.dist-info/RECORD,,
|
@@ -1,47 +0,0 @@
|
|
1
|
-
from typing import Dict
|
2
|
-
|
3
|
-
import pytest
|
4
|
-
|
5
|
-
import parsl
|
6
|
-
from parsl import python_app
|
7
|
-
from parsl.tests.configs.htex_local import fresh_config
|
8
|
-
|
9
|
-
EXECUTOR_LABEL = "MPI_TEST"
|
10
|
-
|
11
|
-
|
12
|
-
def local_config():
|
13
|
-
config = fresh_config()
|
14
|
-
config.executors[0].label = EXECUTOR_LABEL
|
15
|
-
config.executors[0].max_workers_per_node = 1
|
16
|
-
config.executors[0].enable_mpi_mode = False
|
17
|
-
return config
|
18
|
-
|
19
|
-
|
20
|
-
@python_app
|
21
|
-
def get_env_vars(parsl_resource_specification: Dict = {}) -> Dict:
|
22
|
-
import os
|
23
|
-
|
24
|
-
parsl_vars = {}
|
25
|
-
for key in os.environ:
|
26
|
-
if key.startswith("PARSL_"):
|
27
|
-
parsl_vars[key] = os.environ[key]
|
28
|
-
return parsl_vars
|
29
|
-
|
30
|
-
|
31
|
-
@pytest.mark.local
|
32
|
-
def test_only_resource_specs_set():
|
33
|
-
"""Confirm that resource_spec env vars are set while launch prefixes are not
|
34
|
-
when enable_mpi_mode = False"""
|
35
|
-
resource_spec = {
|
36
|
-
"num_nodes": 4,
|
37
|
-
"ranks_per_node": 2,
|
38
|
-
}
|
39
|
-
|
40
|
-
future = get_env_vars(parsl_resource_specification=resource_spec)
|
41
|
-
|
42
|
-
result = future.result()
|
43
|
-
assert isinstance(result, Dict)
|
44
|
-
assert "PARSL_DEFAULT_PREFIX" not in result
|
45
|
-
assert "PARSL_SRUN_PREFIX" not in result
|
46
|
-
assert result["PARSL_NUM_NODES"] == str(resource_spec["num_nodes"])
|
47
|
-
assert result["PARSL_RANKS_PER_NODE"] == str(resource_spec["ranks_per_node"])
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|