jobflow 0.1.17__tar.gz → 0.1.18__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {jobflow-0.1.17/src/jobflow.egg-info → jobflow-0.1.18}/PKG-INFO +27 -20
- {jobflow-0.1.17 → jobflow-0.1.18}/README.md +9 -3
- {jobflow-0.1.17 → jobflow-0.1.18}/pyproject.toml +21 -18
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/core/flow.py +4 -5
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/core/job.py +45 -11
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/core/maker.py +1 -3
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/core/reference.py +1 -1
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/core/store.py +5 -4
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/managers/fireworks.py +15 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/managers/local.py +5 -3
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/utils/__init__.py +1 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/utils/uid.py +2 -1
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/utils/uuid.py +1 -0
- {jobflow-0.1.17 → jobflow-0.1.18/src/jobflow.egg-info}/PKG-INFO +27 -20
- jobflow-0.1.18/src/jobflow.egg-info/requires.txt +52 -0
- jobflow-0.1.17/src/jobflow.egg-info/requires.txt +0 -49
- {jobflow-0.1.17 → jobflow-0.1.18}/LICENSE +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/setup.cfg +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/__init__.py +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/_version.py +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/core/__init__.py +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/core/schemas.py +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/core/state.py +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/managers/__init__.py +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/py.typed +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/settings.py +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/utils/dict_mods.py +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/utils/enum.py +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/utils/find.py +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/utils/graph.py +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow/utils/log.py +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow.egg-info/SOURCES.txt +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow.egg-info/dependency_links.txt +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/src/jobflow.egg-info/top_level.txt +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/tests/test_settings.py +0 -0
- {jobflow-0.1.17 → jobflow-0.1.18}/tests/test_version.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: jobflow
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.18
|
|
4
4
|
Summary: jobflow is a library for writing computational workflows
|
|
5
5
|
Author-email: Alex Ganose <a.ganose@imperial.ac.uk>
|
|
6
6
|
License: modified BSD
|
|
@@ -34,19 +34,20 @@ Requires-Dist: pydash
|
|
|
34
34
|
Provides-Extra: ulid
|
|
35
35
|
Requires-Dist: python-ulid; extra == "ulid"
|
|
36
36
|
Provides-Extra: docs
|
|
37
|
-
Requires-Dist: autodoc_pydantic==2.0
|
|
38
|
-
Requires-Dist: furo==
|
|
39
|
-
Requires-Dist: ipython==8.
|
|
40
|
-
Requires-Dist: myst_parser==
|
|
41
|
-
Requires-Dist: nbsphinx==0.9.
|
|
37
|
+
Requires-Dist: autodoc_pydantic==2.1.0; extra == "docs"
|
|
38
|
+
Requires-Dist: furo==2024.5.6; extra == "docs"
|
|
39
|
+
Requires-Dist: ipython==8.26.0; extra == "docs"
|
|
40
|
+
Requires-Dist: myst_parser==3.0.1; extra == "docs"
|
|
41
|
+
Requires-Dist: nbsphinx==0.9.4; extra == "docs"
|
|
42
42
|
Requires-Dist: sphinx-copybutton==0.5.2; extra == "docs"
|
|
43
|
-
Requires-Dist: sphinx==7.
|
|
43
|
+
Requires-Dist: sphinx==7.4.4; extra == "docs"
|
|
44
44
|
Provides-Extra: dev
|
|
45
45
|
Requires-Dist: pre-commit>=2.12.1; extra == "dev"
|
|
46
|
+
Requires-Dist: typing_extensions; python_version < "3.11" and extra == "dev"
|
|
46
47
|
Provides-Extra: tests
|
|
47
48
|
Requires-Dist: moto==4.2.13; extra == "tests"
|
|
48
|
-
Requires-Dist: pytest-cov==
|
|
49
|
-
Requires-Dist: pytest==
|
|
49
|
+
Requires-Dist: pytest-cov==5.0.0; extra == "tests"
|
|
50
|
+
Requires-Dist: pytest==8.2.2; extra == "tests"
|
|
50
51
|
Provides-Extra: vis
|
|
51
52
|
Requires-Dist: matplotlib; extra == "vis"
|
|
52
53
|
Requires-Dist: pydot; extra == "vis"
|
|
@@ -55,19 +56,21 @@ Requires-Dist: FireWorks; extra == "fireworks"
|
|
|
55
56
|
Provides-Extra: strict
|
|
56
57
|
Requires-Dist: FireWorks==2.0.3; extra == "strict"
|
|
57
58
|
Requires-Dist: PyYAML==6.0.1; extra == "strict"
|
|
58
|
-
Requires-Dist: maggma==0.
|
|
59
|
-
Requires-Dist: matplotlib==3.
|
|
60
|
-
Requires-Dist: monty==
|
|
59
|
+
Requires-Dist: maggma==0.69.0; extra == "strict"
|
|
60
|
+
Requires-Dist: matplotlib==3.9.1; extra == "strict"
|
|
61
|
+
Requires-Dist: monty==2024.7.12; extra == "strict"
|
|
61
62
|
Requires-Dist: moto==4.2.13; extra == "strict"
|
|
62
63
|
Requires-Dist: networkx==3.2.1; extra == "strict"
|
|
63
|
-
Requires-Dist: pydantic-settings==2.
|
|
64
|
-
Requires-Dist: pydantic==2.
|
|
65
|
-
Requires-Dist: pydash==
|
|
64
|
+
Requires-Dist: pydantic-settings==2.3.4; extra == "strict"
|
|
65
|
+
Requires-Dist: pydantic==2.8.2; extra == "strict"
|
|
66
|
+
Requires-Dist: pydash==8.0.1; extra == "strict"
|
|
66
67
|
Requires-Dist: pydot==2.0.0; extra == "strict"
|
|
67
|
-
Requires-Dist:
|
|
68
|
-
Requires-Dist:
|
|
68
|
+
Requires-Dist: python-ulid==2.7.0; extra == "strict"
|
|
69
|
+
Requires-Dist: typing-extensions==4.12.2; extra == "strict"
|
|
69
70
|
|
|
70
|
-
|
|
71
|
+
<div align="center">
|
|
72
|
+
|
|
73
|
+
# 
|
|
71
74
|
|
|
72
75
|
[](https://github.com/materialsproject/jobflow/actions?query=workflow%3Atesting)
|
|
73
76
|
[](https://codecov.io/gh/materialsproject/jobflow/)
|
|
@@ -75,11 +78,14 @@ Requires-Dist: python-ulid==2.2.0; extra == "strict"
|
|
|
75
78
|

|
|
76
79
|
[](https://doi.org/10.21105/joss.05995)
|
|
77
80
|
|
|
81
|
+
</div>
|
|
82
|
+
|
|
78
83
|
[Documentation](https://materialsproject.github.io/jobflow/) | [PyPI](https://pypi.org/project/jobflow/) | [GitHub](https://github.com/materialsproject/jobflow) | [Paper](https://doi.org/10.21105/joss.05995)
|
|
79
84
|
|
|
80
85
|
Jobflow is a free, open-source library for writing and executing workflows. Complex
|
|
81
86
|
workflows can be defined using simple python functions and executed locally or on
|
|
82
|
-
arbitrary computing resources using the [FireWorks][fireworks]
|
|
87
|
+
arbitrary computing resources using the [jobflow-remote][jfr] or [FireWorks][fireworks]
|
|
88
|
+
workflow managers.
|
|
83
89
|
|
|
84
90
|
Some features that distinguish jobflow are dynamic workflows, easy compositing and
|
|
85
91
|
connecting of workflows, and the ability to store workflow outputs across multiple
|
|
@@ -98,7 +104,7 @@ Some of its features include:
|
|
|
98
104
|
way to build complex workflows.
|
|
99
105
|
- Integration with multiple databases (MongoDB, S3, GridFS, and more) through the
|
|
100
106
|
[Maggma][maggma] package.
|
|
101
|
-
- Support for the [FireWorks][fireworks] workflow management
|
|
107
|
+
- Support for the [jobflow-remote][jfr] and [FireWorks][fireworks] workflow management systems, allowing workflow
|
|
102
108
|
execution on multicore machines or through a queue, on a single machine or multiple
|
|
103
109
|
machines.
|
|
104
110
|
- Support for dynamic workflows — workflows that modify themselves or create new ones
|
|
@@ -192,6 +198,7 @@ Jobflow was designed by Alex Ganose, Anubhav Jain, Gian-Marco Rignanese, David W
|
|
|
192
198
|
|
|
193
199
|
[maggma]: https://materialsproject.github.io/maggma/
|
|
194
200
|
[fireworks]: https://materialsproject.github.io/fireworks/
|
|
201
|
+
[jfr]: https://matgenix.github.io/jobflow-remote
|
|
195
202
|
[help-forum]: https://matsci.org/c/fireworks
|
|
196
203
|
[issues]: https://github.com/materialsproject/jobflow/issues
|
|
197
204
|
[changelog]: https://materialsproject.github.io/jobflow/changelog.html
|
|
@@ -1,4 +1,6 @@
|
|
|
1
|
-
|
|
1
|
+
<div align="center">
|
|
2
|
+
|
|
3
|
+
# 
|
|
2
4
|
|
|
3
5
|
[](https://github.com/materialsproject/jobflow/actions?query=workflow%3Atesting)
|
|
4
6
|
[](https://codecov.io/gh/materialsproject/jobflow/)
|
|
@@ -6,11 +8,14 @@
|
|
|
6
8
|

|
|
7
9
|
[](https://doi.org/10.21105/joss.05995)
|
|
8
10
|
|
|
11
|
+
</div>
|
|
12
|
+
|
|
9
13
|
[Documentation](https://materialsproject.github.io/jobflow/) | [PyPI](https://pypi.org/project/jobflow/) | [GitHub](https://github.com/materialsproject/jobflow) | [Paper](https://doi.org/10.21105/joss.05995)
|
|
10
14
|
|
|
11
15
|
Jobflow is a free, open-source library for writing and executing workflows. Complex
|
|
12
16
|
workflows can be defined using simple python functions and executed locally or on
|
|
13
|
-
arbitrary computing resources using the [FireWorks][fireworks]
|
|
17
|
+
arbitrary computing resources using the [jobflow-remote][jfr] or [FireWorks][fireworks]
|
|
18
|
+
workflow managers.
|
|
14
19
|
|
|
15
20
|
Some features that distinguish jobflow are dynamic workflows, easy compositing and
|
|
16
21
|
connecting of workflows, and the ability to store workflow outputs across multiple
|
|
@@ -29,7 +34,7 @@ Some of its features include:
|
|
|
29
34
|
way to build complex workflows.
|
|
30
35
|
- Integration with multiple databases (MongoDB, S3, GridFS, and more) through the
|
|
31
36
|
[Maggma][maggma] package.
|
|
32
|
-
- Support for the [FireWorks][fireworks] workflow management
|
|
37
|
+
- Support for the [jobflow-remote][jfr] and [FireWorks][fireworks] workflow management systems, allowing workflow
|
|
33
38
|
execution on multicore machines or through a queue, on a single machine or multiple
|
|
34
39
|
machines.
|
|
35
40
|
- Support for dynamic workflows — workflows that modify themselves or create new ones
|
|
@@ -123,6 +128,7 @@ Jobflow was designed by Alex Ganose, Anubhav Jain, Gian-Marco Rignanese, David W
|
|
|
123
128
|
|
|
124
129
|
[maggma]: https://materialsproject.github.io/maggma/
|
|
125
130
|
[fireworks]: https://materialsproject.github.io/fireworks/
|
|
131
|
+
[jfr]: https://matgenix.github.io/jobflow-remote
|
|
126
132
|
[help-forum]: https://matsci.org/c/fireworks
|
|
127
133
|
[issues]: https://github.com/materialsproject/jobflow/issues
|
|
128
134
|
[changelog]: https://materialsproject.github.io/jobflow/changelog.html
|
|
@@ -38,32 +38,32 @@ dependencies = [
|
|
|
38
38
|
[project.optional-dependencies]
|
|
39
39
|
ulid = ["python-ulid"]
|
|
40
40
|
docs = [
|
|
41
|
-
"autodoc_pydantic==2.0
|
|
42
|
-
"furo==
|
|
43
|
-
"ipython==8.
|
|
44
|
-
"myst_parser==
|
|
45
|
-
"nbsphinx==0.9.
|
|
41
|
+
"autodoc_pydantic==2.1.0",
|
|
42
|
+
"furo==2024.5.6",
|
|
43
|
+
"ipython==8.26.0",
|
|
44
|
+
"myst_parser==3.0.1",
|
|
45
|
+
"nbsphinx==0.9.4",
|
|
46
46
|
"sphinx-copybutton==0.5.2",
|
|
47
|
-
"sphinx==7.
|
|
47
|
+
"sphinx==7.4.4",
|
|
48
48
|
]
|
|
49
|
-
dev = ["pre-commit>=2.12.1"]
|
|
50
|
-
tests = ["moto==4.2.13", "pytest-cov==
|
|
49
|
+
dev = ["pre-commit>=2.12.1", "typing_extensions; python_version < '3.11'"]
|
|
50
|
+
tests = ["moto==4.2.13", "pytest-cov==5.0.0", "pytest==8.2.2"]
|
|
51
51
|
vis = ["matplotlib", "pydot"]
|
|
52
52
|
fireworks = ["FireWorks"]
|
|
53
53
|
strict = [
|
|
54
54
|
"FireWorks==2.0.3",
|
|
55
55
|
"PyYAML==6.0.1",
|
|
56
|
-
"maggma==0.
|
|
57
|
-
"matplotlib==3.
|
|
58
|
-
"monty==
|
|
56
|
+
"maggma==0.69.0",
|
|
57
|
+
"matplotlib==3.9.1",
|
|
58
|
+
"monty==2024.7.12",
|
|
59
59
|
"moto==4.2.13",
|
|
60
60
|
"networkx==3.2.1",
|
|
61
|
-
"pydantic-settings==2.
|
|
62
|
-
"pydantic==2.
|
|
63
|
-
"pydash==
|
|
61
|
+
"pydantic-settings==2.3.4",
|
|
62
|
+
"pydantic==2.8.2",
|
|
63
|
+
"pydash==8.0.1",
|
|
64
64
|
"pydot==2.0.0",
|
|
65
|
-
"
|
|
66
|
-
"
|
|
65
|
+
"python-ulid==2.7.0",
|
|
66
|
+
"typing-extensions==4.12.2",
|
|
67
67
|
]
|
|
68
68
|
|
|
69
69
|
[project.urls]
|
|
@@ -121,7 +121,8 @@ exclude_lines = [
|
|
|
121
121
|
|
|
122
122
|
[tool.ruff]
|
|
123
123
|
target-version = "py39"
|
|
124
|
-
|
|
124
|
+
|
|
125
|
+
[tool.ruff.lint]
|
|
125
126
|
select = [
|
|
126
127
|
"B", # flake8-bugbear
|
|
127
128
|
"C4", # flake8-comprehensions
|
|
@@ -159,6 +160,7 @@ ignore = [
|
|
|
159
160
|
"DTZ005",
|
|
160
161
|
"FBT001",
|
|
161
162
|
"FBT002",
|
|
163
|
+
"ISC001",
|
|
162
164
|
"PLR0911", # too-many-return-statements
|
|
163
165
|
"PLR0912", # too-many-branches
|
|
164
166
|
"PLR0913", # too-many-arguments
|
|
@@ -169,8 +171,9 @@ ignore = [
|
|
|
169
171
|
]
|
|
170
172
|
pydocstyle.convention = "numpy"
|
|
171
173
|
isort.known-first-party = ["jobflow"]
|
|
174
|
+
ignore-init-module-imports = true
|
|
172
175
|
|
|
173
|
-
[tool.ruff.per-file-ignores]
|
|
176
|
+
[tool.ruff.lint.per-file-ignores]
|
|
174
177
|
# F401: unused import
|
|
175
178
|
"__init__.py" = ["F401"]
|
|
176
179
|
# D: pydocstyle
|
|
@@ -158,9 +158,8 @@ class Flow(MSONable):
|
|
|
158
158
|
self, idx: int | slice, value: Flow | Job | Sequence[Flow | Job]
|
|
159
159
|
) -> None:
|
|
160
160
|
"""Set the job(s) or subflow(s) at the given index/slice."""
|
|
161
|
-
if (
|
|
162
|
-
|
|
163
|
-
or isinstance(value, (tuple, list))
|
|
161
|
+
if not isinstance(value, (Flow, jobflow.Job, tuple, list)) or (
|
|
162
|
+
isinstance(value, (tuple, list))
|
|
164
163
|
and not all(isinstance(v, (Flow, jobflow.Job)) for v in value)
|
|
165
164
|
):
|
|
166
165
|
raise TypeError(
|
|
@@ -583,7 +582,7 @@ class Flow(MSONable):
|
|
|
583
582
|
dict_mod=dict_mod,
|
|
584
583
|
)
|
|
585
584
|
|
|
586
|
-
def append_name(self, append_str: str, prepend: bool = False):
|
|
585
|
+
def append_name(self, append_str: str, prepend: bool = False, dynamic: bool = True):
|
|
587
586
|
"""
|
|
588
587
|
Append a string to the name of the flow and all jobs contained in it.
|
|
589
588
|
|
|
@@ -600,7 +599,7 @@ class Flow(MSONable):
|
|
|
600
599
|
self.name += append_str
|
|
601
600
|
|
|
602
601
|
for job in self:
|
|
603
|
-
job.append_name(append_str, prepend=prepend)
|
|
602
|
+
job.append_name(append_str, prepend=prepend, dynamic=dynamic)
|
|
604
603
|
|
|
605
604
|
def update_metadata(
|
|
606
605
|
self,
|
|
@@ -6,14 +6,17 @@ import logging
|
|
|
6
6
|
import typing
|
|
7
7
|
import warnings
|
|
8
8
|
from dataclasses import dataclass, field
|
|
9
|
+
from typing import cast, overload
|
|
9
10
|
|
|
10
11
|
from monty.json import MSONable, jsanitize
|
|
12
|
+
from typing_extensions import Self
|
|
11
13
|
|
|
12
14
|
from jobflow.core.reference import OnMissing, OutputReference
|
|
13
15
|
from jobflow.utils.uid import suid
|
|
14
16
|
|
|
15
17
|
if typing.TYPE_CHECKING:
|
|
16
18
|
from collections.abc import Hashable, Sequence
|
|
19
|
+
from pathlib import Path
|
|
17
20
|
from typing import Any, Callable
|
|
18
21
|
|
|
19
22
|
from networkx import DiGraph
|
|
@@ -65,7 +68,19 @@ class JobConfig(MSONable):
|
|
|
65
68
|
response_manager_config: dict = field(default_factory=dict)
|
|
66
69
|
|
|
67
70
|
|
|
68
|
-
|
|
71
|
+
@overload
|
|
72
|
+
def job(method: Callable = None) -> Callable[..., Job]:
|
|
73
|
+
pass
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
@overload
|
|
77
|
+
def job(method: Callable = None, **job_kwargs) -> Callable[..., Callable[..., Job]]:
|
|
78
|
+
pass
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def job(
|
|
82
|
+
method: Callable = None, **job_kwargs
|
|
83
|
+
) -> Callable[..., Job] | Callable[..., Callable[..., Job]]:
|
|
69
84
|
"""
|
|
70
85
|
Wrap a function to produce a :obj:`Job`.
|
|
71
86
|
|
|
@@ -313,6 +328,7 @@ class Job(MSONable):
|
|
|
313
328
|
hosts: list[str] = None,
|
|
314
329
|
metadata_updates: list[dict[str, Any]] = None,
|
|
315
330
|
config_updates: list[dict[str, Any]] = None,
|
|
331
|
+
name_updates: list[dict[str, Any]] = None,
|
|
316
332
|
**kwargs,
|
|
317
333
|
):
|
|
318
334
|
from copy import deepcopy
|
|
@@ -337,6 +353,7 @@ class Job(MSONable):
|
|
|
337
353
|
self.config = config
|
|
338
354
|
self.hosts = hosts or []
|
|
339
355
|
self.metadata_updates = metadata_updates or []
|
|
356
|
+
self.name_updates = name_updates or []
|
|
340
357
|
self.config_updates = config_updates or []
|
|
341
358
|
self._kwargs = kwargs
|
|
342
359
|
|
|
@@ -526,7 +543,7 @@ class Job(MSONable):
|
|
|
526
543
|
self.uuid = uuid
|
|
527
544
|
self.output = self.output.set_uuid(uuid)
|
|
528
545
|
|
|
529
|
-
def run(self, store: jobflow.JobStore) -> Response:
|
|
546
|
+
def run(self, store: jobflow.JobStore, job_dir: Path = None) -> Response:
|
|
530
547
|
"""
|
|
531
548
|
Run the job.
|
|
532
549
|
|
|
@@ -581,7 +598,9 @@ class Job(MSONable):
|
|
|
581
598
|
function = types.MethodType(function, bound)
|
|
582
599
|
|
|
583
600
|
response = function(*self.function_args, **self.function_kwargs)
|
|
584
|
-
response = Response.from_job_returns(
|
|
601
|
+
response = Response.from_job_returns(
|
|
602
|
+
response, self.output_schema, job_dir=job_dir
|
|
603
|
+
)
|
|
585
604
|
|
|
586
605
|
if response.replace is not None:
|
|
587
606
|
response.replace = prepare_replace(response.replace, self)
|
|
@@ -604,6 +623,8 @@ class Job(MSONable):
|
|
|
604
623
|
new_jobs.update_metadata(**metadata_update, dynamic=True)
|
|
605
624
|
for config_update in self.config_updates:
|
|
606
625
|
new_jobs.update_config(**config_update, dynamic=True)
|
|
626
|
+
for name_update in self.name_updates:
|
|
627
|
+
new_jobs.append_name(**name_update, dynamic=True)
|
|
607
628
|
|
|
608
629
|
if self.config.response_manager_config:
|
|
609
630
|
passed_config = self.config.response_manager_config
|
|
@@ -872,7 +893,7 @@ class Job(MSONable):
|
|
|
872
893
|
dict_mod=dict_mod,
|
|
873
894
|
)
|
|
874
895
|
|
|
875
|
-
def append_name(self, append_str: str, prepend: bool = False):
|
|
896
|
+
def append_name(self, append_str: str, prepend: bool = False, dynamic: bool = True):
|
|
876
897
|
"""
|
|
877
898
|
Append a string to the name of the job.
|
|
878
899
|
|
|
@@ -882,12 +903,18 @@ class Job(MSONable):
|
|
|
882
903
|
A string to append.
|
|
883
904
|
prepend
|
|
884
905
|
Prepend the name rather than appending it.
|
|
906
|
+
dynamic
|
|
907
|
+
The updates will be propagated to Jobs/Flows dynamically generated at
|
|
908
|
+
runtime.
|
|
885
909
|
"""
|
|
886
910
|
if prepend:
|
|
887
911
|
self.name = append_str + self.name
|
|
888
912
|
else:
|
|
889
913
|
self.name += append_str
|
|
890
914
|
|
|
915
|
+
if dynamic:
|
|
916
|
+
self.name_updates.append({"append_str": append_str, "prepend": prepend})
|
|
917
|
+
|
|
891
918
|
def update_metadata(
|
|
892
919
|
self,
|
|
893
920
|
update: dict[str, Any],
|
|
@@ -1134,8 +1161,8 @@ class Job(MSONable):
|
|
|
1134
1161
|
prepend
|
|
1135
1162
|
Insert the UUIDs at the beginning of the list rather than extending it.
|
|
1136
1163
|
"""
|
|
1137
|
-
if
|
|
1138
|
-
hosts_uuids = [hosts_uuids]
|
|
1164
|
+
if isinstance(hosts_uuids, str):
|
|
1165
|
+
hosts_uuids = [hosts_uuids]
|
|
1139
1166
|
if prepend:
|
|
1140
1167
|
self.hosts[0:0] = hosts_uuids
|
|
1141
1168
|
else:
|
|
@@ -1170,6 +1197,8 @@ class Response(typing.Generic[T]):
|
|
|
1170
1197
|
Stop any children of the current flow.
|
|
1171
1198
|
stop_jobflow
|
|
1172
1199
|
Stop executing all remaining jobs.
|
|
1200
|
+
job_dir
|
|
1201
|
+
The directory where the job was run.
|
|
1173
1202
|
"""
|
|
1174
1203
|
|
|
1175
1204
|
output: T = None
|
|
@@ -1179,13 +1208,15 @@ class Response(typing.Generic[T]):
|
|
|
1179
1208
|
stored_data: dict[Hashable, Any] = None
|
|
1180
1209
|
stop_children: bool = False
|
|
1181
1210
|
stop_jobflow: bool = False
|
|
1211
|
+
job_dir: str | Path = None
|
|
1182
1212
|
|
|
1183
1213
|
@classmethod
|
|
1184
1214
|
def from_job_returns(
|
|
1185
1215
|
cls,
|
|
1186
1216
|
job_returns: Any | None,
|
|
1187
1217
|
output_schema: type[BaseModel] = None,
|
|
1188
|
-
|
|
1218
|
+
job_dir: str | Path = None,
|
|
1219
|
+
) -> Self:
|
|
1189
1220
|
"""
|
|
1190
1221
|
Generate a :obj:`Response` from the outputs of a :obj:`Job`.
|
|
1191
1222
|
|
|
@@ -1199,6 +1230,8 @@ class Response(typing.Generic[T]):
|
|
|
1199
1230
|
output_schema
|
|
1200
1231
|
A pydantic model associated with the job. Used to enforce a schema for the
|
|
1201
1232
|
outputs.
|
|
1233
|
+
job_dir
|
|
1234
|
+
The directory where the job was run.
|
|
1202
1235
|
|
|
1203
1236
|
Raises
|
|
1204
1237
|
------
|
|
@@ -1215,17 +1248,18 @@ class Response(typing.Generic[T]):
|
|
|
1215
1248
|
# only apply output schema if there is no replace.
|
|
1216
1249
|
job_returns.output = apply_schema(job_returns.output, output_schema)
|
|
1217
1250
|
|
|
1218
|
-
|
|
1251
|
+
job_returns.job_dir = job_dir
|
|
1252
|
+
return cast(Self, job_returns)
|
|
1219
1253
|
|
|
1220
1254
|
if isinstance(job_returns, (list, tuple)):
|
|
1221
1255
|
# check that a Response object is not given as one of many outputs
|
|
1222
|
-
for
|
|
1223
|
-
if isinstance(
|
|
1256
|
+
for resp in job_returns:
|
|
1257
|
+
if isinstance(resp, Response):
|
|
1224
1258
|
raise ValueError(
|
|
1225
1259
|
"Response cannot be returned in combination with other outputs."
|
|
1226
1260
|
)
|
|
1227
1261
|
|
|
1228
|
-
return cls(output=apply_schema(job_returns, output_schema))
|
|
1262
|
+
return cls(output=apply_schema(job_returns, output_schema), job_dir=job_dir)
|
|
1229
1263
|
|
|
1230
1264
|
|
|
1231
1265
|
def apply_schema(output: Any, schema: type[BaseModel] | None):
|
|
@@ -269,9 +269,7 @@ def recursive_call(
|
|
|
269
269
|
return False
|
|
270
270
|
if name_filter is not None and name_filter not in nested_obj.name:
|
|
271
271
|
return False
|
|
272
|
-
|
|
273
|
-
return False
|
|
274
|
-
return True
|
|
272
|
+
return class_filter is None or isinstance(nested_obj, class_filter)
|
|
275
273
|
|
|
276
274
|
d = obj.as_dict()
|
|
277
275
|
|
|
@@ -17,6 +17,7 @@ if typing.TYPE_CHECKING:
|
|
|
17
17
|
from typing import Any, Optional, Union
|
|
18
18
|
|
|
19
19
|
from maggma.core import Sort
|
|
20
|
+
from typing_extensions import Self
|
|
20
21
|
|
|
21
22
|
from jobflow.core.schemas import JobStoreDocument
|
|
22
23
|
|
|
@@ -545,9 +546,9 @@ class JobStore(Store):
|
|
|
545
546
|
)
|
|
546
547
|
|
|
547
548
|
@classmethod
|
|
548
|
-
def from_file(cls
|
|
549
|
+
def from_file(cls, db_file: str | Path, **kwargs) -> Self:
|
|
549
550
|
"""
|
|
550
|
-
Create
|
|
551
|
+
Create a JobStore from a database file.
|
|
551
552
|
|
|
552
553
|
Two options are supported for the database file. The file should be in json or
|
|
553
554
|
yaml format.
|
|
@@ -555,7 +556,7 @@ class JobStore(Store):
|
|
|
555
556
|
The simplest format is a monty dumped version of the store, generated using:
|
|
556
557
|
|
|
557
558
|
>>> from monty.serialization import dumpfn
|
|
558
|
-
>>> dumpfn("job_store.yaml"
|
|
559
|
+
>>> dumpfn(job_store, "job_store.yaml")
|
|
559
560
|
|
|
560
561
|
Alternatively, the file can contain the keys docs_store, additional_stores and
|
|
561
562
|
any other keyword arguments supported by the :obj:`JobStore` constructor. The
|
|
@@ -605,7 +606,7 @@ class JobStore(Store):
|
|
|
605
606
|
return cls.from_dict_spec(store_info, **kwargs)
|
|
606
607
|
|
|
607
608
|
@classmethod
|
|
608
|
-
def from_dict_spec(cls
|
|
609
|
+
def from_dict_spec(cls, spec: dict, **kwargs) -> Self:
|
|
609
610
|
"""
|
|
610
611
|
Create an JobStore from a dict specification.
|
|
611
612
|
|
|
@@ -5,6 +5,8 @@ from __future__ import annotations
|
|
|
5
5
|
import typing
|
|
6
6
|
|
|
7
7
|
from fireworks import FiretaskBase, Firework, FWAction, Workflow, explicit_serialize
|
|
8
|
+
from fireworks.utilities.fw_serializers import recursive_serialize, serialize_fw
|
|
9
|
+
from monty.json import jsanitize
|
|
8
10
|
|
|
9
11
|
if typing.TYPE_CHECKING:
|
|
10
12
|
from collections.abc import Sequence
|
|
@@ -197,3 +199,16 @@ class JobFiretask(FiretaskBase):
|
|
|
197
199
|
defuse_workflow=response.stop_jobflow,
|
|
198
200
|
defuse_children=response.stop_children,
|
|
199
201
|
)
|
|
202
|
+
|
|
203
|
+
@serialize_fw
|
|
204
|
+
@recursive_serialize
|
|
205
|
+
def to_dict(self) -> dict:
|
|
206
|
+
"""
|
|
207
|
+
Serialize version of the FireTask.
|
|
208
|
+
|
|
209
|
+
Overrides the original method to explicitly jsanitize the Job
|
|
210
|
+
to handle cases not properly handled by fireworks, like a Callable.
|
|
211
|
+
"""
|
|
212
|
+
d = dict(self)
|
|
213
|
+
d["job"] = jsanitize(d["job"].as_dict())
|
|
214
|
+
return d
|
|
@@ -16,7 +16,7 @@ logger = logging.getLogger(__name__)
|
|
|
16
16
|
def run_locally(
|
|
17
17
|
flow: jobflow.Flow | jobflow.Job | list[jobflow.Job],
|
|
18
18
|
log: bool = True,
|
|
19
|
-
store: jobflow.JobStore = None,
|
|
19
|
+
store: jobflow.JobStore | None = None,
|
|
20
20
|
create_folders: bool = False,
|
|
21
21
|
root_dir: str | Path | None = None,
|
|
22
22
|
ensure_success: bool = False,
|
|
@@ -58,7 +58,7 @@ def run_locally(
|
|
|
58
58
|
The responses of the jobs, as a dict of ``{uuid: {index: response}}``.
|
|
59
59
|
"""
|
|
60
60
|
from collections import defaultdict
|
|
61
|
-
from datetime import datetime
|
|
61
|
+
from datetime import datetime, timezone
|
|
62
62
|
from pathlib import Path
|
|
63
63
|
from random import randint
|
|
64
64
|
|
|
@@ -152,7 +152,7 @@ def run_locally(
|
|
|
152
152
|
|
|
153
153
|
def _get_job_dir():
|
|
154
154
|
if create_folders:
|
|
155
|
-
time_now = datetime.
|
|
155
|
+
time_now = datetime.now(tz=timezone.utc).strftime(SETTINGS.DIRECTORY_FORMAT)
|
|
156
156
|
job_dir = root_dir / f"job_{time_now}-{randint(10000, 99999)}"
|
|
157
157
|
job_dir.mkdir()
|
|
158
158
|
return job_dir
|
|
@@ -165,6 +165,8 @@ def run_locally(
|
|
|
165
165
|
with cd(job_dir):
|
|
166
166
|
response, jobflow_stopped = _run_job(job, parents)
|
|
167
167
|
|
|
168
|
+
if response is not None:
|
|
169
|
+
response.job_dir = job_dir
|
|
168
170
|
encountered_bad_response = encountered_bad_response or response is None
|
|
169
171
|
if jobflow_stopped:
|
|
170
172
|
return False
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
"""Tools for generating UUIDs."""
|
|
2
|
+
|
|
2
3
|
from __future__ import annotations
|
|
3
4
|
|
|
4
5
|
from uuid import UUID
|
|
@@ -11,7 +12,7 @@ except ImportError: # pragma: no cover
|
|
|
11
12
|
"Install it with `pip install jobflow[ulid]` or `pip install python-ulid`."
|
|
12
13
|
)
|
|
13
14
|
|
|
14
|
-
class ULID: # type: ignore
|
|
15
|
+
class ULID: # type: ignore[no-redef]
|
|
15
16
|
"""Fake ULID class for raising import error."""
|
|
16
17
|
|
|
17
18
|
def __init__(self, *args, **kwargs):
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: jobflow
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.18
|
|
4
4
|
Summary: jobflow is a library for writing computational workflows
|
|
5
5
|
Author-email: Alex Ganose <a.ganose@imperial.ac.uk>
|
|
6
6
|
License: modified BSD
|
|
@@ -34,19 +34,20 @@ Requires-Dist: pydash
|
|
|
34
34
|
Provides-Extra: ulid
|
|
35
35
|
Requires-Dist: python-ulid; extra == "ulid"
|
|
36
36
|
Provides-Extra: docs
|
|
37
|
-
Requires-Dist: autodoc_pydantic==2.0
|
|
38
|
-
Requires-Dist: furo==
|
|
39
|
-
Requires-Dist: ipython==8.
|
|
40
|
-
Requires-Dist: myst_parser==
|
|
41
|
-
Requires-Dist: nbsphinx==0.9.
|
|
37
|
+
Requires-Dist: autodoc_pydantic==2.1.0; extra == "docs"
|
|
38
|
+
Requires-Dist: furo==2024.5.6; extra == "docs"
|
|
39
|
+
Requires-Dist: ipython==8.26.0; extra == "docs"
|
|
40
|
+
Requires-Dist: myst_parser==3.0.1; extra == "docs"
|
|
41
|
+
Requires-Dist: nbsphinx==0.9.4; extra == "docs"
|
|
42
42
|
Requires-Dist: sphinx-copybutton==0.5.2; extra == "docs"
|
|
43
|
-
Requires-Dist: sphinx==7.
|
|
43
|
+
Requires-Dist: sphinx==7.4.4; extra == "docs"
|
|
44
44
|
Provides-Extra: dev
|
|
45
45
|
Requires-Dist: pre-commit>=2.12.1; extra == "dev"
|
|
46
|
+
Requires-Dist: typing_extensions; python_version < "3.11" and extra == "dev"
|
|
46
47
|
Provides-Extra: tests
|
|
47
48
|
Requires-Dist: moto==4.2.13; extra == "tests"
|
|
48
|
-
Requires-Dist: pytest-cov==
|
|
49
|
-
Requires-Dist: pytest==
|
|
49
|
+
Requires-Dist: pytest-cov==5.0.0; extra == "tests"
|
|
50
|
+
Requires-Dist: pytest==8.2.2; extra == "tests"
|
|
50
51
|
Provides-Extra: vis
|
|
51
52
|
Requires-Dist: matplotlib; extra == "vis"
|
|
52
53
|
Requires-Dist: pydot; extra == "vis"
|
|
@@ -55,19 +56,21 @@ Requires-Dist: FireWorks; extra == "fireworks"
|
|
|
55
56
|
Provides-Extra: strict
|
|
56
57
|
Requires-Dist: FireWorks==2.0.3; extra == "strict"
|
|
57
58
|
Requires-Dist: PyYAML==6.0.1; extra == "strict"
|
|
58
|
-
Requires-Dist: maggma==0.
|
|
59
|
-
Requires-Dist: matplotlib==3.
|
|
60
|
-
Requires-Dist: monty==
|
|
59
|
+
Requires-Dist: maggma==0.69.0; extra == "strict"
|
|
60
|
+
Requires-Dist: matplotlib==3.9.1; extra == "strict"
|
|
61
|
+
Requires-Dist: monty==2024.7.12; extra == "strict"
|
|
61
62
|
Requires-Dist: moto==4.2.13; extra == "strict"
|
|
62
63
|
Requires-Dist: networkx==3.2.1; extra == "strict"
|
|
63
|
-
Requires-Dist: pydantic-settings==2.
|
|
64
|
-
Requires-Dist: pydantic==2.
|
|
65
|
-
Requires-Dist: pydash==
|
|
64
|
+
Requires-Dist: pydantic-settings==2.3.4; extra == "strict"
|
|
65
|
+
Requires-Dist: pydantic==2.8.2; extra == "strict"
|
|
66
|
+
Requires-Dist: pydash==8.0.1; extra == "strict"
|
|
66
67
|
Requires-Dist: pydot==2.0.0; extra == "strict"
|
|
67
|
-
Requires-Dist:
|
|
68
|
-
Requires-Dist:
|
|
68
|
+
Requires-Dist: python-ulid==2.7.0; extra == "strict"
|
|
69
|
+
Requires-Dist: typing-extensions==4.12.2; extra == "strict"
|
|
69
70
|
|
|
70
|
-
|
|
71
|
+
<div align="center">
|
|
72
|
+
|
|
73
|
+
# 
|
|
71
74
|
|
|
72
75
|
[](https://github.com/materialsproject/jobflow/actions?query=workflow%3Atesting)
|
|
73
76
|
[](https://codecov.io/gh/materialsproject/jobflow/)
|
|
@@ -75,11 +78,14 @@ Requires-Dist: python-ulid==2.2.0; extra == "strict"
|
|
|
75
78
|

|
|
76
79
|
[](https://doi.org/10.21105/joss.05995)
|
|
77
80
|
|
|
81
|
+
</div>
|
|
82
|
+
|
|
78
83
|
[Documentation](https://materialsproject.github.io/jobflow/) | [PyPI](https://pypi.org/project/jobflow/) | [GitHub](https://github.com/materialsproject/jobflow) | [Paper](https://doi.org/10.21105/joss.05995)
|
|
79
84
|
|
|
80
85
|
Jobflow is a free, open-source library for writing and executing workflows. Complex
|
|
81
86
|
workflows can be defined using simple python functions and executed locally or on
|
|
82
|
-
arbitrary computing resources using the [FireWorks][fireworks]
|
|
87
|
+
arbitrary computing resources using the [jobflow-remote][jfr] or [FireWorks][fireworks]
|
|
88
|
+
workflow managers.
|
|
83
89
|
|
|
84
90
|
Some features that distinguish jobflow are dynamic workflows, easy compositing and
|
|
85
91
|
connecting of workflows, and the ability to store workflow outputs across multiple
|
|
@@ -98,7 +104,7 @@ Some of its features include:
|
|
|
98
104
|
way to build complex workflows.
|
|
99
105
|
- Integration with multiple databases (MongoDB, S3, GridFS, and more) through the
|
|
100
106
|
[Maggma][maggma] package.
|
|
101
|
-
- Support for the [FireWorks][fireworks] workflow management
|
|
107
|
+
- Support for the [jobflow-remote][jfr] and [FireWorks][fireworks] workflow management systems, allowing workflow
|
|
102
108
|
execution on multicore machines or through a queue, on a single machine or multiple
|
|
103
109
|
machines.
|
|
104
110
|
- Support for dynamic workflows — workflows that modify themselves or create new ones
|
|
@@ -192,6 +198,7 @@ Jobflow was designed by Alex Ganose, Anubhav Jain, Gian-Marco Rignanese, David W
|
|
|
192
198
|
|
|
193
199
|
[maggma]: https://materialsproject.github.io/maggma/
|
|
194
200
|
[fireworks]: https://materialsproject.github.io/fireworks/
|
|
201
|
+
[jfr]: https://matgenix.github.io/jobflow-remote
|
|
195
202
|
[help-forum]: https://matsci.org/c/fireworks
|
|
196
203
|
[issues]: https://github.com/materialsproject/jobflow/issues
|
|
197
204
|
[changelog]: https://materialsproject.github.io/jobflow/changelog.html
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
PyYAML
|
|
2
|
+
maggma>=0.57.0
|
|
3
|
+
monty>=2023.9.25
|
|
4
|
+
networkx
|
|
5
|
+
pydantic-settings>=2.0.3
|
|
6
|
+
pydantic>=2.0.1
|
|
7
|
+
pydash
|
|
8
|
+
|
|
9
|
+
[dev]
|
|
10
|
+
pre-commit>=2.12.1
|
|
11
|
+
|
|
12
|
+
[dev:python_version < "3.11"]
|
|
13
|
+
typing_extensions
|
|
14
|
+
|
|
15
|
+
[docs]
|
|
16
|
+
autodoc_pydantic==2.1.0
|
|
17
|
+
furo==2024.5.6
|
|
18
|
+
ipython==8.26.0
|
|
19
|
+
myst_parser==3.0.1
|
|
20
|
+
nbsphinx==0.9.4
|
|
21
|
+
sphinx-copybutton==0.5.2
|
|
22
|
+
sphinx==7.4.4
|
|
23
|
+
|
|
24
|
+
[fireworks]
|
|
25
|
+
FireWorks
|
|
26
|
+
|
|
27
|
+
[strict]
|
|
28
|
+
FireWorks==2.0.3
|
|
29
|
+
PyYAML==6.0.1
|
|
30
|
+
maggma==0.69.0
|
|
31
|
+
matplotlib==3.9.1
|
|
32
|
+
monty==2024.7.12
|
|
33
|
+
moto==4.2.13
|
|
34
|
+
networkx==3.2.1
|
|
35
|
+
pydantic-settings==2.3.4
|
|
36
|
+
pydantic==2.8.2
|
|
37
|
+
pydash==8.0.1
|
|
38
|
+
pydot==2.0.0
|
|
39
|
+
python-ulid==2.7.0
|
|
40
|
+
typing-extensions==4.12.2
|
|
41
|
+
|
|
42
|
+
[tests]
|
|
43
|
+
moto==4.2.13
|
|
44
|
+
pytest-cov==5.0.0
|
|
45
|
+
pytest==8.2.2
|
|
46
|
+
|
|
47
|
+
[ulid]
|
|
48
|
+
python-ulid
|
|
49
|
+
|
|
50
|
+
[vis]
|
|
51
|
+
matplotlib
|
|
52
|
+
pydot
|
|
@@ -1,49 +0,0 @@
|
|
|
1
|
-
PyYAML
|
|
2
|
-
maggma>=0.57.0
|
|
3
|
-
monty>=2023.9.25
|
|
4
|
-
networkx
|
|
5
|
-
pydantic-settings>=2.0.3
|
|
6
|
-
pydantic>=2.0.1
|
|
7
|
-
pydash
|
|
8
|
-
|
|
9
|
-
[dev]
|
|
10
|
-
pre-commit>=2.12.1
|
|
11
|
-
|
|
12
|
-
[docs]
|
|
13
|
-
autodoc_pydantic==2.0.1
|
|
14
|
-
furo==2023.9.10
|
|
15
|
-
ipython==8.20.0
|
|
16
|
-
myst_parser==2.0.0
|
|
17
|
-
nbsphinx==0.9.3
|
|
18
|
-
sphinx-copybutton==0.5.2
|
|
19
|
-
sphinx==7.2.6
|
|
20
|
-
|
|
21
|
-
[fireworks]
|
|
22
|
-
FireWorks
|
|
23
|
-
|
|
24
|
-
[strict]
|
|
25
|
-
FireWorks==2.0.3
|
|
26
|
-
PyYAML==6.0.1
|
|
27
|
-
maggma==0.61.0
|
|
28
|
-
matplotlib==3.8.2
|
|
29
|
-
monty==2023.11.3
|
|
30
|
-
moto==4.2.13
|
|
31
|
-
networkx==3.2.1
|
|
32
|
-
pydantic-settings==2.1.0
|
|
33
|
-
pydantic==2.5.3
|
|
34
|
-
pydash==7.0.6
|
|
35
|
-
pydot==2.0.0
|
|
36
|
-
typing-extensions==4.9.0
|
|
37
|
-
python-ulid==2.2.0
|
|
38
|
-
|
|
39
|
-
[tests]
|
|
40
|
-
moto==4.2.13
|
|
41
|
-
pytest-cov==4.1.0
|
|
42
|
-
pytest==7.4.4
|
|
43
|
-
|
|
44
|
-
[ulid]
|
|
45
|
-
python-ulid
|
|
46
|
-
|
|
47
|
-
[vis]
|
|
48
|
-
matplotlib
|
|
49
|
-
pydot
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|