parsl 2023.7.10__py3-none-any.whl → 2023.7.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -595,10 +595,9 @@ class HighThroughputExecutor(BlockProviderExecutor, RepresentationMixin):
595
595
  except TypeError:
596
596
  raise SerializationError(func.__name__)
597
597
 
598
- msg = {"task_id": task_id,
599
- "buffer": fn_buf}
598
+ msg = {"task_id": task_id, "buffer": fn_buf}
600
599
 
601
- # Post task to the the outgoing queue
600
+ # Post task to the outgoing queue
602
601
  self.outgoing_q.put(msg)
603
602
 
604
603
  # Return the future
@@ -370,7 +370,7 @@ class Manager:
370
370
  logger.critical("Exiting")
371
371
 
372
372
  @wrap_with_logs
373
- def worker_watchdog(self, kill_event):
373
+ def worker_watchdog(self, kill_event: threading.Event):
374
374
  """Keeps workers alive.
375
375
 
376
376
  Parameters:
@@ -381,7 +381,7 @@ class Manager:
381
381
 
382
382
  logger.debug("Starting worker watchdog")
383
383
 
384
- while not kill_event.is_set():
384
+ while not kill_event.wait(self.heartbeat_period):
385
385
  for worker_id, p in self.procs.items():
386
386
  if not p.is_alive():
387
387
  logger.error("Worker {} has died".format(worker_id))
@@ -409,7 +409,6 @@ class Manager:
409
409
  name="HTEX-Worker-{}".format(worker_id))
410
410
  self.procs[worker_id] = p
411
411
  logger.info("Worker {} has been restarted".format(worker_id))
412
- time.sleep(self.heartbeat_period)
413
412
 
414
413
  logger.critical("Exiting")
415
414
 
@@ -28,25 +28,6 @@ class PickleSerializer(SerializerBase):
28
28
  return pickle.loads(body)
29
29
 
30
30
 
31
- class PickleCallableSerializer(SerializerBase):
32
- """This serializer is a variant of the PickleSerializer that will
33
- serialize and deserialize callables using an lru_cache, under the
34
- assumption that callables are immutable and so can be cached.
35
- """
36
-
37
- _identifier = b'C1'
38
- _for_code = True
39
- _for_data = False
40
-
41
- @functools.lru_cache
42
- def serialize(self, data: Any) -> bytes:
43
- return pickle.dumps(data)
44
-
45
- @functools.lru_cache
46
- def deserialize(self, body: bytes) -> Any:
47
- return pickle.loads(body)
48
-
49
-
50
31
  class DillSerializer(SerializerBase):
51
32
  """ Dill serialization works on a superset of object including the ones covered by pickle.
52
33
  However for most cases pickle is faster. For most callable objects the additional overhead
parsl/tests/conftest.py CHANGED
@@ -1,12 +1,16 @@
1
1
  import importlib.util
2
+ import itertools
2
3
  import logging
3
4
  import os
4
5
  import pathlib
6
+ import time
7
+ import types
5
8
  import signal
6
9
  import sys
7
10
  import tempfile
8
11
  import threading
9
12
  import traceback
13
+ import typing as t
10
14
  from datetime import datetime
11
15
  from glob import glob
12
16
  from itertools import chain
@@ -157,9 +161,9 @@ def load_dfk_local_module(request, pytestconfig):
157
161
  parsl.load. It should be a Callable that returns a parsl Config object.
158
162
 
159
163
  If local_setup and/or local_teardown are callables (such as functions) in
160
- the test module, they they will be invoked before/after the tests. This
161
- can be used to perform more interesting DFK initialisation not possible
162
- with local_config.
164
+ the test module, they will be invoked before/after the tests. This can
165
+ be used to perform more interesting DFK initialisation not possible with
166
+ local_config.
163
167
  """
164
168
 
165
169
  config = pytestconfig.getoption('config')[0]
@@ -290,3 +294,67 @@ def pytest_ignore_collect(path):
290
294
  return True
291
295
  else:
292
296
  return False
297
+
298
+
299
+ def create_traceback(start: int = 0) -> t.Optional[types.TracebackType]:
300
+ """
301
+ Dynamically create a traceback.
302
+
303
+ Builds a traceback from the top of the stack (the currently executing frame) on
304
+ down to the root frame. Optionally, use start to build from an earlier stack
305
+ frame.
306
+
307
+ N.B. uses `sys._getframe`, which I only know to exist in CPython.
308
+ """
309
+ tb = None
310
+ for depth in itertools.count(start + 1, 1):
311
+ try:
312
+ frame = sys._getframe(depth)
313
+ tb = types.TracebackType(tb, frame, frame.f_lasti, frame.f_lineno)
314
+ except ValueError:
315
+ break
316
+ return tb
317
+
318
+
319
+ @pytest.fixture
320
+ def try_assert():
321
+ def _impl(
322
+ test_func: t.Callable[[], bool],
323
+ fail_msg: str = "",
324
+ timeout_ms: float = 5000,
325
+ attempts: int = 0,
326
+ check_period_ms: int = 20,
327
+ ):
328
+ tb = create_traceback(start=1)
329
+ timeout_s = abs(timeout_ms) / 1000.0
330
+ check_period_s = abs(check_period_ms) / 1000.0
331
+ if attempts > 0:
332
+ for _attempt_no in range(attempts):
333
+ if test_func():
334
+ return
335
+ time.sleep(check_period_s)
336
+ else:
337
+ att_fail = (
338
+ f"\n (Still failing after attempt limit [{attempts}], testing"
339
+ f" every {check_period_ms}ms)"
340
+ )
341
+ exc = AssertionError(f"{str(fail_msg)}{att_fail}".strip())
342
+ raise exc.with_traceback(tb)
343
+
344
+ elif timeout_s > 0:
345
+ end = time.monotonic() + timeout_s
346
+ while time.monotonic() < end:
347
+ if test_func():
348
+ return
349
+ time.sleep(check_period_s)
350
+ att_fail = (
351
+ f"\n (Still failing after timeout [{timeout_ms}ms], with attempts "
352
+ f"every {check_period_ms}ms)"
353
+ )
354
+ exc = AssertionError(f"{str(fail_msg)}{att_fail}".strip())
355
+ raise exc.with_traceback(tb)
356
+
357
+ else:
358
+ raise AssertionError("Bad assert call: no attempts or timeout period")
359
+
360
+ yield _impl
@@ -1,28 +1,21 @@
1
1
  import pytest
2
2
 
3
- import parsl
4
3
  from parsl.app.app import python_app
5
- from parsl.tests.configs.htex_local import fresh_config
6
-
7
4
  from parsl.executors.high_throughput.errors import WorkerLost
8
5
 
9
6
 
10
- def local_setup():
7
+ def local_config():
8
+ from parsl.tests.configs.htex_local import fresh_config
11
9
  config = fresh_config()
12
10
  config.executors[0].poll_period = 1
13
11
  config.executors[0].max_workers = 1
14
- parsl.load(config)
15
-
16
-
17
- def local_teardown():
18
- parsl.dfk().cleanup()
19
- parsl.clear()
12
+ config.executors[0].heartbeat_period = 1
13
+ return config
20
14
 
21
15
 
22
16
  @python_app
23
17
  def kill_worker():
24
- import sys
25
- sys.exit(2)
18
+ raise SystemExit(2)
26
19
 
27
20
 
28
21
  @pytest.mark.local
@@ -1,17 +1,42 @@
1
+ import threading
2
+ import time
3
+
4
+ import pytest
5
+
1
6
  import parsl
7
+ from parsl.tests.configs.local_threads import fresh_config as local_config # noqa
2
8
 
3
9
 
4
10
  @parsl.python_app
5
- def slow_app(delay):
6
- import time
7
- time.sleep(delay)
11
+ def slow_app(evt: threading.Event):
12
+ evt.wait()
8
13
 
9
14
 
15
+ @pytest.mark.local
10
16
  def test_wait_for_tasks():
11
- slow_app(5)
12
- slow_app(10) # This test has a higher task ID, and runs for a longer period
13
- slow_app(3) # This test has a higher task ID, but runs for a shorter period
14
- parsl.dfk().wait_for_current_tasks()
15
- # the regression reported in #1606 is that wait_for_current_tasks
16
- # fails due to tasks being removed from the DFK tasks dict as they
17
- # complete, introduced in #1543.
17
+ """
18
+ gh#1606 reported that wait_for_current_tasks fails due to tasks being removed
19
+ from the DFK tasks dict as they complete; bug introduced in #1543.
20
+ """
21
+ def test_kernel(may_wait: threading.Event):
22
+ e1, e2 = threading.Event(), threading.Event()
23
+
24
+ # app_slow is in *middle* of internal DFK data structure
25
+ app_fast1, app_slow, app_fast2 = slow_app(e1), slow_app(e2), slow_app(e1)
26
+
27
+ may_wait.set() # initiated wait in outer test
28
+ time.sleep(0.01)
29
+
30
+ e1.set()
31
+
32
+ while not all(f.done() for f in (app_fast1, app_fast2)):
33
+ time.sleep(0.01)
34
+
35
+ e2.set()
36
+ app_slow.result()
37
+
38
+ may_continue = threading.Event()
39
+ threading.Thread(target=test_kernel, daemon=True, args=(may_continue,)).start()
40
+
41
+ may_continue.wait()
42
+ parsl.dfk().wait_for_current_tasks() # per sleeps, waits for all 3 tasks
@@ -1,33 +1,27 @@
1
- # this test is intended to ensure that only one block is launched when only
2
- # one app is invoked. this is a regression test.
1
+ import threading
2
+
3
+ import pytest
3
4
 
4
- import logging
5
5
  import parsl
6
6
  from parsl.channels import LocalChannel
7
7
  from parsl.config import Config
8
8
  from parsl.executors import HighThroughputExecutor
9
9
  from parsl.launchers import SimpleLauncher
10
10
  from parsl.providers import LocalProvider
11
- import pytest
12
-
13
-
14
- logger = logging.getLogger(__name__)
15
11
 
16
12
 
17
13
  @parsl.python_app
18
14
  def app():
19
15
  import time
20
- time.sleep(45)
16
+ time.sleep(1)
21
17
 
22
18
 
23
19
  class OneShotLocalProvider(LocalProvider):
24
20
  def __init__(self, *args, **kwargs):
25
- logger.info("OneShotLocalProvider __init__ with MRO: {}".format(type(self).mro()))
26
21
  self.recorded_submits = 0
27
22
  super().__init__(*args, **kwargs)
28
23
 
29
24
  def submit(self, *args, **kwargs):
30
- logger.info("OneShotLocalProvider submit")
31
25
  self.recorded_submits += 1
32
26
  return super().submit(*args, **kwargs)
33
27
 
@@ -35,32 +29,44 @@ class OneShotLocalProvider(LocalProvider):
35
29
 
36
30
 
37
31
  @pytest.mark.local
38
- def test_one_block():
39
-
32
+ def test_one_block(tmpd_cwd):
33
+ """
34
+ this test is intended to ensure that only one block is launched when only
35
+ one app is invoked. this is a regression test.
36
+ """
40
37
  oneshot_provider = OneShotLocalProvider(
41
- channel=LocalChannel(),
42
- init_blocks=0,
43
- min_blocks=0,
44
- max_blocks=10,
45
- launcher=SimpleLauncher(),
46
- )
38
+ channel=LocalChannel(),
39
+ init_blocks=0,
40
+ min_blocks=0,
41
+ max_blocks=10,
42
+ launcher=SimpleLauncher(),
43
+ )
47
44
 
48
45
  config = Config(
49
46
  executors=[
50
47
  HighThroughputExecutor(
51
48
  label="htex_local",
49
+ address="127.0.0.1",
52
50
  worker_debug=True,
53
51
  cores_per_worker=1,
54
52
  provider=oneshot_provider,
53
+ worker_logdir_root=str(tmpd_cwd)
55
54
  )
56
55
  ],
57
56
  strategy='simple',
58
57
  )
59
58
 
60
59
  parsl.load(config)
60
+ dfk = parsl.dfk()
61
+
62
+ def poller():
63
+ import time
64
+ while True:
65
+ dfk.job_status_poller.poll()
66
+ time.sleep(0.1)
61
67
 
62
- f = app()
63
- f.result()
68
+ threading.Thread(target=poller, daemon=True).start()
69
+ app().result()
64
70
  parsl.dfk().cleanup()
65
71
  parsl.clear()
66
72
 
@@ -1,80 +1,97 @@
1
1
  import logging
2
- import parsl
3
- import pytest
4
2
  import time
5
- from parsl import python_app
6
3
 
4
+ import pytest
5
+
6
+ import parsl
7
+
8
+ from parsl import File, python_app
7
9
  from parsl.providers import LocalProvider
8
10
  from parsl.channels import LocalChannel
9
- # from parsl.launchers import SimpleLauncher
10
11
  from parsl.launchers import SingleNodeLauncher
11
-
12
12
  from parsl.config import Config
13
13
  from parsl.executors import HighThroughputExecutor
14
14
 
15
15
  logger = logging.getLogger(__name__)
16
16
 
17
+ _max_blocks = 5
18
+ _min_blocks = 2
19
+
17
20
 
18
21
  def local_config():
19
22
  return Config(
20
23
  executors=[
21
24
  HighThroughputExecutor(
22
- heartbeat_period=2,
23
- heartbeat_threshold=6,
24
- poll_period=1,
25
+ heartbeat_period=1,
26
+ heartbeat_threshold=2,
27
+ poll_period=100,
25
28
  label="htex_local",
29
+ address="127.0.0.1",
26
30
  max_workers=1,
27
31
  provider=LocalProvider(
28
32
  channel=LocalChannel(),
29
33
  init_blocks=0,
30
- max_blocks=5,
31
- min_blocks=2,
34
+ max_blocks=_max_blocks,
35
+ min_blocks=_min_blocks,
32
36
  launcher=SingleNodeLauncher(),
33
37
  ),
34
38
  )
35
39
  ],
36
- max_idletime=5,
40
+ max_idletime=0.5,
37
41
  strategy='htex_auto_scale',
38
42
  )
39
43
 
40
44
 
41
45
  @python_app
42
- def sleeper(t):
46
+ def waiting_app(ident: int, inputs=()):
47
+ import pathlib
43
48
  import time
44
- time.sleep(t)
49
+
50
+ # Approximate an Event by writing to files; the test logic will poll this file
51
+ with open(inputs[0], "a") as f:
52
+ f.write(f"Ready: {ident}\n")
53
+
54
+ # Similarly, use Event approximation (file check!) by polling.
55
+ may_finish_file = pathlib.Path(inputs[1])
56
+ while not may_finish_file.exists():
57
+ time.sleep(0.01)
45
58
 
46
59
 
47
60
  # see issue #1885 for details of failures of this test.
48
61
  # at the time of issue #1885 this test was failing frequently
49
62
  # in CI.
50
63
  @pytest.mark.local
51
- def test_scale_out():
52
- logger.info("start")
64
+ def test_scale_out(tmpd_cwd, try_assert):
53
65
  dfk = parsl.dfk()
54
66
 
55
- logger.info("initial asserts")
56
- assert len(dfk.executors['htex_local'].connected_managers()) == 0, "Expected 0 managers at start"
67
+ num_managers = len(dfk.executors['htex_local'].connected_managers())
68
+
69
+ assert num_managers == 0, "Expected 0 managers at start"
57
70
  assert dfk.executors['htex_local'].outstanding == 0, "Expected 0 tasks at start"
58
71
 
59
- logger.info("launching tasks")
60
- fus = [sleeper(i) for i in [15 for x in range(0, 10)]]
72
+ ntasks = 10
73
+ ready_path = tmpd_cwd / "workers_ready"
74
+ finish_path = tmpd_cwd / "workers_may_continue"
75
+ ready_path.touch()
76
+ inputs = [File(str(ready_path)), File(str(finish_path))]
61
77
 
62
- logger.info("waiting for warm up")
63
- time.sleep(15)
78
+ futs = [waiting_app(i, inputs=inputs) for i in range(ntasks)]
64
79
 
65
- logger.info("asserting 5 managers")
66
- assert len(dfk.executors['htex_local'].connected_managers()) == 5, "Expected 5 managers after some time"
80
+ while ready_path.read_text().count("\n") < _max_blocks:
81
+ time.sleep(0.5)
67
82
 
68
- logger.info("waiting for all futures to complete")
69
- [x.result() for x in fus]
83
+ assert len(dfk.executors['htex_local'].connected_managers()) == _max_blocks
70
84
 
71
- logger.info("asserting 0 outstanding tasks after completion")
72
- assert dfk.executors['htex_local'].outstanding == 0, "Expected 0 outstanding tasks after future completion"
85
+ finish_path.touch() # Approximation of Event, via files
86
+ [x.result() for x in futs]
73
87
 
74
- logger.info("waiting a while for scale down")
75
- time.sleep(25)
88
+ assert dfk.executors['htex_local'].outstanding == 0
76
89
 
77
- logger.info("asserting 2 managers remain")
78
- assert len(dfk.executors['htex_local'].connected_managers()) == 2, "Expected 2 managers when no tasks, lower bound by min_blocks"
90
+ def assert_kernel():
91
+ return len(dfk.executors['htex_local'].connected_managers()) == _min_blocks
79
92
 
80
- logger.info("test passed")
93
+ try_assert(
94
+ assert_kernel,
95
+ fail_msg=f"Expected {_min_blocks} managers when no tasks (min_blocks)",
96
+ timeout_ms=15000,
97
+ )
parsl/version.py CHANGED
@@ -3,4 +3,4 @@
3
3
  Year.Month.Day[alpha/beta/..]
4
4
  Alphas will be numbered like this -> 2024.12.10a0
5
5
  """
6
- VERSION = '2023.07.10'
6
+ VERSION = '2023.07.17'
@@ -370,7 +370,7 @@ class Manager:
370
370
  logger.critical("Exiting")
371
371
 
372
372
  @wrap_with_logs
373
- def worker_watchdog(self, kill_event):
373
+ def worker_watchdog(self, kill_event: threading.Event):
374
374
  """Keeps workers alive.
375
375
 
376
376
  Parameters:
@@ -381,7 +381,7 @@ class Manager:
381
381
 
382
382
  logger.debug("Starting worker watchdog")
383
383
 
384
- while not kill_event.is_set():
384
+ while not kill_event.wait(self.heartbeat_period):
385
385
  for worker_id, p in self.procs.items():
386
386
  if not p.is_alive():
387
387
  logger.error("Worker {} has died".format(worker_id))
@@ -409,7 +409,6 @@ class Manager:
409
409
  name="HTEX-Worker-{}".format(worker_id))
410
410
  self.procs[worker_id] = p
411
411
  logger.info("Worker {} has been restarted".format(worker_id))
412
- time.sleep(self.heartbeat_period)
413
412
 
414
413
  logger.critical("Exiting")
415
414
 
@@ -1,9 +1,9 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: parsl
3
- Version: 2023.7.10
3
+ Version: 2023.7.17
4
4
  Summary: Simple data dependent workflows in Python
5
5
  Home-page: https://github.com/Parsl/parsl
6
- Download-URL: https://github.com/Parsl/parsl/archive/2023.07.10.tar.gz
6
+ Download-URL: https://github.com/Parsl/parsl/archive/2023.07.17.tar.gz
7
7
  Author: The Parsl Team
8
8
  Author-email: parsl@googlegroups.com
9
9
  License: Apache 2.0
@@ -7,7 +7,7 @@ parsl/multiprocessing.py,sha256=uY64wcQmWt2rgylQm4lmr3HE8AxwFGeQQj4l1jKnnrY,1970
7
7
  parsl/process_loggers.py,sha256=1G3Rfrh5wuZNo2X03grG4kTYPGOxz7hHCyG6L_A3b0A,1137
8
8
  parsl/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
9
  parsl/utils.py,sha256=2QVbFSDzWaCnkxkr0qtoKY1zvjfgYQu_ngHtQWdC9zM,11598
10
- parsl/version.py,sha256=1CoFZpCpHfIkS-1qrwH2XKMnhnUtxyLDKU_rg7Qi5BE,131
10
+ parsl/version.py,sha256=b-2lKO3lMbIoSzZ9GmmEztJCqUrJCqgko7R4KWQhTaA,131
11
11
  parsl/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
12
  parsl/app/app.py,sha256=e_7h07nsHJlqF33yZwrmbUzizaSTU-9KKuHtcwQSUx0,7760
13
13
  parsl/app/bash.py,sha256=Kqkss4dhWB6Mid2aax4CswZ_1nAqolIUGJo6mbTW7cQ,5421
@@ -84,12 +84,12 @@ parsl/executors/flux/executor.py,sha256=f1xx7in-MR5l0zNZ-qf0iEMEEbN-fr2hwCRu329j
84
84
  parsl/executors/flux/flux_instance_manager.py,sha256=tTEOATClm9SwdgLeBRWPC6D55iNDuh0YxqJOw3c3eQ4,2036
85
85
  parsl/executors/high_throughput/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
86
86
  parsl/executors/high_throughput/errors.py,sha256=vl69wLuVOplbKxHI9WphEGBExHWkTn5n8T9QhBXuNH0,380
87
- parsl/executors/high_throughput/executor.py,sha256=0mxwM65dKXSV_DnVvQKbthD2qqiFxJ0NvbErOg5dsNc,33191
87
+ parsl/executors/high_throughput/executor.py,sha256=bBYeI-ZjNmG55a6jkNzkl5i76PC1SV-3TaG__XPEZNE,33172
88
88
  parsl/executors/high_throughput/interchange.py,sha256=c-a86vnHdysWp9JteY8R_m1URFLT9tJ6zCD6CCQQVMU,29976
89
89
  parsl/executors/high_throughput/manager_record.py,sha256=T8-JVMfDJU6SJfzJRooD0mO8AHGMXlcn3PBOM0m_vng,366
90
90
  parsl/executors/high_throughput/monitoring_info.py,sha256=3gQpwQjjNDEBz0cQqJZB6hRiwLiWwXs83zkQDmbOwxY,297
91
91
  parsl/executors/high_throughput/probe.py,sha256=lvnuf-vBv57tHvFh-J51F9sDYBES7jCgs6KYgWvmKRs,2749
92
- parsl/executors/high_throughput/process_worker_pool.py,sha256=ToMA-MBZurkcgn4m-H-ckE1-F9ubwCwjHHfhm-LMB5c,33011
92
+ parsl/executors/high_throughput/process_worker_pool.py,sha256=b2xsV0QHEMbYQXhVYb0avfNsd3pdbdkQCG5d2MntTfs,32997
93
93
  parsl/executors/high_throughput/zmq_pipes.py,sha256=yTURJSHGY-n1rI5OfzJbcTa1Ji7jiY8nuisqBWgpEZw,5720
94
94
  parsl/executors/taskvine/__init__.py,sha256=CD_JMWUfpyJtmtZzYGQ8TtUHir4WtDyXIDxViBnAyC0,95
95
95
  parsl/executors/taskvine/errors.py,sha256=Zq5FQsnHqjjiT10JDVxvE0g3CwMjzy9qbIPwQUuqD_c,466
@@ -177,11 +177,11 @@ parsl/providers/torque/template.py,sha256=4qfc2gmlEhRCAD7erFDOs4prJQ43I8s4E8DSUS
177
177
  parsl/providers/torque/torque.py,sha256=O1skXXG1eOZyxuFexL--MumcWbCoFqkg03Luj-twyk4,9500
178
178
  parsl/serialize/__init__.py,sha256=-SQi-Uy4mOUFt_Ils48sk6pojgUX_eNqBxinWdTq3yY,219
179
179
  parsl/serialize/base.py,sha256=2kg7h-nQvgdBLc87qaJSOfrlMZNhjKxR81-nmMFYSMc,1164
180
- parsl/serialize/concretes.py,sha256=8yTaq6X1vp60mDpfkinbdqkKd7GBgWH0sTtFoCirVXE,2586
180
+ parsl/serialize/concretes.py,sha256=eCq7xCoDxP-nR3YAQY9COG_YIg63FT_HWdK7cTm_9VM,2042
181
181
  parsl/serialize/facade.py,sha256=zVxGpgC61iE6v0lBK9xZ3TZYSTePgs3yhha6vb-pNIM,4439
182
182
  parsl/tests/__init__.py,sha256=s_zoz7Ipgykh-QTQvctdpxENrMnmpXY8oe1bJbUmpqY,204
183
183
  parsl/tests/callables_helper.py,sha256=ceP1YYsNtrZgKT6MAIvpgdccEjQ_CpFEOnZBGHKGOx0,30
184
- parsl/tests/conftest.py,sha256=2h9ZdJ8b3aXVUNSwmZdy5t8vOebyvb_9F5vLHN0GTY4,9845
184
+ parsl/tests/conftest.py,sha256=r6-ZxDolwRJfi5oI0P9yQerNMJ_-9ZdFEQuDAfs7zvw,12005
185
185
  parsl/tests/test_aalst_patterns.py,sha256=fi6JHKidV7vMJLv2nnu_-Q0ngGLc89mRm8rFrGIwiUM,9615
186
186
  parsl/tests/test_callables.py,sha256=_QsdS8v2nGgOj4_X69NFHZOGUnqbOrOMCA9pCJColZw,1974
187
187
  parsl/tests/test_flux.py,sha256=st9v55o5ZajK_LQUXh1saLwFh2gpaQFGG5mzdnJMNu0,5098
@@ -312,7 +312,7 @@ parsl/tests/test_error_handling/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm
312
312
  parsl/tests/test_error_handling/test_fail.py,sha256=xx4TGWfL7le4cQ9nvnUkrlmKQJkskhD0l_3W1xwZSEI,282
313
313
  parsl/tests/test_error_handling/test_htex_basic.py,sha256=VRP_-Ro2SYp8TqfjpG_zCBJOZWuVFFCr3E0WKN_blg8,455
314
314
  parsl/tests/test_error_handling/test_htex_missing_worker.py,sha256=Tux0Xla719eup7RdWj8LmxNH-CTscMN0NM4CPuPP1ng,967
315
- parsl/tests/test_error_handling/test_htex_worker_failure.py,sha256=nMDEduJkAOwqQFiT_-XmifP389NTrHwe4nlPaeqlnrc,602
315
+ parsl/tests/test_error_handling/test_htex_worker_failure.py,sha256=CLEZzirs28h81R8j1MIsAZEEkCDwqL68iZvY6dDTT1A,558
316
316
  parsl/tests/test_error_handling/test_python_walltime.py,sha256=mQ2WE9aZQkzJcAdvVxIY19p5srSsWvJLUrFsKb-hLfY,813
317
317
  parsl/tests/test_error_handling/test_rand_fail.py,sha256=OgHDOGWwdSzdpxpQQ0RbDc1k2mLtWxEcQ6akL23EA_w,3864
318
318
  parsl/tests/test_error_handling/test_resource_spec.py,sha256=1oOJC6fnG_TuaGBfGjaucJjxwL9GIUSXc8K-e_zSyus,1185
@@ -358,7 +358,7 @@ parsl/tests/test_python_apps/test_timeout.py,sha256=P6A1z1kQR0zHda5R21fnsKyv44aE
358
358
  parsl/tests/test_python_apps/test_type5.py,sha256=kUyA1NuFu-DDXsJNNvJLZVyewZBt7QAOhcGm2DWFTQw,777
359
359
  parsl/tests/test_regression/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
360
360
  parsl/tests/test_regression/test_1480.py,sha256=HNhuw7OYkBGMhN--XgKIl2JPHUj_hXlgL74oS3FqWk4,545
361
- parsl/tests/test_regression/test_1606_wait_for_current_tasks.py,sha256=Q0z7keqBL_E-gNt_8VnVfEQoecpY7yLG7vyeeCAVP74,518
361
+ parsl/tests/test_regression/test_1606_wait_for_current_tasks.py,sha256=cmnMtErEosFL6sjgy1bwyheL_gevv7Fj1-Jz6h63GwM,1142
362
362
  parsl/tests/test_regression/test_1653.py,sha256=ki75gl4Sn5nm26r_6qpJOqxrN5UjTWzViVikU0-Ef24,563
363
363
  parsl/tests/test_regression/test_221.py,sha256=jOS0EVu_2sbh10eg5hnivPvhNt0my_50vQ7jQYS1Bfg,520
364
364
  parsl/tests/test_regression/test_226.py,sha256=9EumcLPJzvbD0IUD6-LmPFbf86gaA1Kj8J8J4P4XdTY,1087
@@ -369,8 +369,8 @@ parsl/tests/test_regression/test_854.py,sha256=2B4psMwiA6zhpKAZST-d_pqI3wS44BmYA
369
369
  parsl/tests/test_regression/test_97_parallelism_0.py,sha256=PIMMr1SU9E3E27YD7uDj0YsK2_6AvmE433Mpd8q4pc0,1561
370
370
  parsl/tests/test_regression/test_98.py,sha256=ZNTA-USpmH85Mt0nu3KFQ1qqmXsyHtYMZWZY0grzuYA,453
371
371
  parsl/tests/test_scaling/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
372
- parsl/tests/test_scaling/test_regression_1621.py,sha256=QA-qFy_hDuZwPJ5l9srlGrafc4m1PT4dV1IPVRZfZGU,1679
373
- parsl/tests/test_scaling/test_scale_down.py,sha256=3Q7JT6bz2kC118i-WEhWcvVqNNkjrdKf6U-LgWECDeA,2355
372
+ parsl/tests/test_scaling/test_regression_1621.py,sha256=kbHTCsuskxFB_4E8Gd3t8lSGI0yI-5X9n_yIX1tN-u0,1738
373
+ parsl/tests/test_scaling/test_scale_down.py,sha256=4iu73FB0XSBHMNvoatDyeVHEIdT-I6hrmF-XNyGuMVA,2754
374
374
  parsl/tests/test_serialization/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
375
375
  parsl/tests/test_serialization/test_2555_caching_deserializer.py,sha256=J8__b4djA5tErd8FUSXGkGcdXlW2KHbBWRbCTAnV08Q,767
376
376
  parsl/tests/test_serialization/test_basic.py,sha256=51KshqIk2RNr7S2iSkl5tZo40CJBb0h6uby8YPgOGlg,543
@@ -390,12 +390,12 @@ parsl/tests/test_threads/test_configs.py,sha256=QA9YjIMAtZ2jmkfOWqBzEfzQQcFVCDiz
390
390
  parsl/tests/test_threads/test_lazy_errors.py,sha256=nGhYfCMHFZYSy6YJ4gnAmiLl9SfYs0WVnuvj8DXQ9bw,560
391
391
  parsl/usage_tracking/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
392
392
  parsl/usage_tracking/usage.py,sha256=TEuAIm_U_G2ojZxvd0bbVa6gZlU61_mVRa2yJC9mGiI,7555
393
- parsl-2023.7.10.data/scripts/exec_parsl_function.py,sha256=kYJmpgsswNMqV2-_dOmtgZHhb9OuKMvB-UwaNO2A2z0,7759
394
- parsl-2023.7.10.data/scripts/parsl_coprocess.py,sha256=V5yr9c-DvZHaBPvJOUsaAxJo2s1-ZsJqO1EhauYQd2A,5499
395
- parsl-2023.7.10.data/scripts/process_worker_pool.py,sha256=o1Zp2j9KHIwR9OO8Z8mtZX_5gMDtnaJDE00KhVknG7g,32997
396
- parsl-2023.7.10.dist-info/LICENSE,sha256=tAkwu8-AdEyGxGoSvJ2gVmQdcicWw3j1ZZueVV74M-E,11357
397
- parsl-2023.7.10.dist-info/METADATA,sha256=9_UwkE09_wzWPrbbo1UGgILetSqf-M-M_AMv8VieE1I,3635
398
- parsl-2023.7.10.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
399
- parsl-2023.7.10.dist-info/entry_points.txt,sha256=XqnsWDYoEcLbsMcpnYGKLEnSBmaIe1YoM5YsBdJG2tI,176
400
- parsl-2023.7.10.dist-info/top_level.txt,sha256=PIheYoUFQtF2icLsgOykgU-Cjuwr2Oi6On2jo5RYgRM,6
401
- parsl-2023.7.10.dist-info/RECORD,,
393
+ parsl-2023.7.17.data/scripts/exec_parsl_function.py,sha256=kYJmpgsswNMqV2-_dOmtgZHhb9OuKMvB-UwaNO2A2z0,7759
394
+ parsl-2023.7.17.data/scripts/parsl_coprocess.py,sha256=V5yr9c-DvZHaBPvJOUsaAxJo2s1-ZsJqO1EhauYQd2A,5499
395
+ parsl-2023.7.17.data/scripts/process_worker_pool.py,sha256=wWYHy0FF0NqZ2y45UJJ8Omq0ryEVWlQQxIAeJhoGjTs,32983
396
+ parsl-2023.7.17.dist-info/LICENSE,sha256=tAkwu8-AdEyGxGoSvJ2gVmQdcicWw3j1ZZueVV74M-E,11357
397
+ parsl-2023.7.17.dist-info/METADATA,sha256=5ZsGpiatXGATpC5A2ATnlM46nkQoHAOJIXxolf9lqQk,3635
398
+ parsl-2023.7.17.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
399
+ parsl-2023.7.17.dist-info/entry_points.txt,sha256=XqnsWDYoEcLbsMcpnYGKLEnSBmaIe1YoM5YsBdJG2tI,176
400
+ parsl-2023.7.17.dist-info/top_level.txt,sha256=PIheYoUFQtF2icLsgOykgU-Cjuwr2Oi6On2jo5RYgRM,6
401
+ parsl-2023.7.17.dist-info/RECORD,,