jobflow 0.1.14__py3-none-any.whl → 0.1.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- jobflow/_version.py +3 -3
- jobflow/core/flow.py +23 -22
- jobflow/core/job.py +12 -12
- jobflow/core/maker.py +0 -2
- jobflow/core/reference.py +22 -25
- jobflow/core/schemas.py +34 -0
- jobflow/core/state.py +0 -4
- jobflow/core/store.py +32 -28
- jobflow/managers/fireworks.py +3 -6
- jobflow/managers/local.py +18 -16
- jobflow/settings.py +0 -2
- jobflow/utils/dict_mods.py +0 -3
- jobflow/utils/enum.py +1 -4
- jobflow/utils/find.py +5 -13
- jobflow/utils/graph.py +11 -15
- jobflow/utils/log.py +0 -2
- {jobflow-0.1.14.dist-info → jobflow-0.1.15.dist-info}/METADATA +19 -20
- jobflow-0.1.15.dist-info/RECORD +27 -0
- {jobflow-0.1.14.dist-info → jobflow-0.1.15.dist-info}/WHEEL +1 -1
- jobflow-0.1.14.dist-info/RECORD +0 -26
- {jobflow-0.1.14.dist-info → jobflow-0.1.15.dist-info}/LICENSE +0 -0
- {jobflow-0.1.14.dist-info → jobflow-0.1.15.dist-info}/top_level.txt +0 -0
jobflow/_version.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
from
|
|
1
|
+
from importlib.metadata import PackageNotFoundError, version
|
|
2
2
|
|
|
3
3
|
try:
|
|
4
|
-
__version__ =
|
|
5
|
-
except
|
|
4
|
+
__version__ = version("jobflow")
|
|
5
|
+
except PackageNotFoundError: # pragma: no cover
|
|
6
6
|
# package is not installed
|
|
7
7
|
__version__ = ""
|
jobflow/core/flow.py
CHANGED
|
@@ -5,7 +5,7 @@ from __future__ import annotations
|
|
|
5
5
|
import logging
|
|
6
6
|
import warnings
|
|
7
7
|
from copy import deepcopy
|
|
8
|
-
from typing import TYPE_CHECKING
|
|
8
|
+
from typing import TYPE_CHECKING
|
|
9
9
|
|
|
10
10
|
from monty.json import MSONable
|
|
11
11
|
|
|
@@ -14,13 +14,13 @@ from jobflow.core.reference import find_and_get_references
|
|
|
14
14
|
from jobflow.utils import ValueEnum, contains_flow_or_job, suuid
|
|
15
15
|
|
|
16
16
|
if TYPE_CHECKING:
|
|
17
|
-
from
|
|
17
|
+
from collections.abc import Iterator, Sequence
|
|
18
|
+
from typing import Any, Callable
|
|
18
19
|
|
|
19
20
|
from networkx import DiGraph
|
|
20
21
|
|
|
21
22
|
from jobflow import Job
|
|
22
23
|
|
|
23
|
-
__all__ = ["JobOrder", "Flow", "get_flow"]
|
|
24
24
|
|
|
25
25
|
logger = logging.getLogger(__name__)
|
|
26
26
|
|
|
@@ -166,7 +166,7 @@ class Flow(MSONable):
|
|
|
166
166
|
raise TypeError(
|
|
167
167
|
f"Flow can only contain Job or Flow objects, not {type(value).__name__}"
|
|
168
168
|
)
|
|
169
|
-
jobs = list(self
|
|
169
|
+
jobs = list(self)
|
|
170
170
|
jobs[idx] = value # type: ignore[index, assignment]
|
|
171
171
|
self.jobs = tuple(jobs)
|
|
172
172
|
|
|
@@ -188,10 +188,10 @@ class Flow(MSONable):
|
|
|
188
188
|
|
|
189
189
|
def __sub__(self, other: Flow | Job) -> Flow:
|
|
190
190
|
"""Remove a job or subflow from the flow."""
|
|
191
|
-
if other not in self
|
|
191
|
+
if other not in self:
|
|
192
192
|
raise ValueError(f"{other!r} not found in flow")
|
|
193
193
|
new_flow = deepcopy(self)
|
|
194
|
-
new_flow.jobs = tuple([job for job in new_flow
|
|
194
|
+
new_flow.jobs = tuple([job for job in new_flow if job != other])
|
|
195
195
|
return new_flow
|
|
196
196
|
|
|
197
197
|
def __repr__(self, level: int = 0, prefix: str = "") -> str:
|
|
@@ -201,8 +201,8 @@ class Flow(MSONable):
|
|
|
201
201
|
_prefix = f"{prefix}." if prefix else ""
|
|
202
202
|
job_reprs = "\n".join(
|
|
203
203
|
f"{indent}{_prefix}{i}. "
|
|
204
|
-
f"{
|
|
205
|
-
for i,
|
|
204
|
+
f"{jb.__repr__(level + 1, f'{_prefix}{i}') if isinstance(jb, Flow) else jb}"
|
|
205
|
+
for i, jb in enumerate(self, 1)
|
|
206
206
|
)
|
|
207
207
|
return f"Flow({name=}, {uuid=})\n{job_reprs}"
|
|
208
208
|
|
|
@@ -298,7 +298,7 @@ class Flow(MSONable):
|
|
|
298
298
|
The uuids of all Jobs in the Flow (including nested Flows).
|
|
299
299
|
"""
|
|
300
300
|
uuids: list[str] = []
|
|
301
|
-
for job in self
|
|
301
|
+
for job in self:
|
|
302
302
|
if isinstance(job, Flow):
|
|
303
303
|
uuids.extend(job.job_uuids)
|
|
304
304
|
else:
|
|
@@ -316,7 +316,7 @@ class Flow(MSONable):
|
|
|
316
316
|
The uuids of all Jobs and Flows in the Flow (including nested Flows).
|
|
317
317
|
"""
|
|
318
318
|
uuids: list[str] = []
|
|
319
|
-
for job in self
|
|
319
|
+
for job in self:
|
|
320
320
|
if isinstance(job, Flow):
|
|
321
321
|
uuids.extend(job.all_uuids)
|
|
322
322
|
uuids.append(job.uuid)
|
|
@@ -336,7 +336,7 @@ class Flow(MSONable):
|
|
|
336
336
|
|
|
337
337
|
import networkx as nx
|
|
338
338
|
|
|
339
|
-
graph = nx.compose_all([job.graph for job in self
|
|
339
|
+
graph = nx.compose_all([job.graph for job in self])
|
|
340
340
|
|
|
341
341
|
for node in graph:
|
|
342
342
|
node_props = graph.nodes[node]
|
|
@@ -346,7 +346,7 @@ class Flow(MSONable):
|
|
|
346
346
|
if self.order == JobOrder.LINEAR:
|
|
347
347
|
# add fake edges between jobs to force linear order
|
|
348
348
|
edges = []
|
|
349
|
-
for job_a, job_b in nx.utils.pairwise(self
|
|
349
|
+
for job_a, job_b in nx.utils.pairwise(self):
|
|
350
350
|
if isinstance(job_a, Flow):
|
|
351
351
|
leaves = [v for v, d in job_a.graph.out_degree() if d == 0]
|
|
352
352
|
else:
|
|
@@ -474,7 +474,7 @@ class Flow(MSONable):
|
|
|
474
474
|
>>> flow.update_kwargs({"number": 10}, name_filter="add")
|
|
475
475
|
>>> flow.update_kwargs({"number": 10}, function_filter=add)
|
|
476
476
|
"""
|
|
477
|
-
for job in self
|
|
477
|
+
for job in self:
|
|
478
478
|
job.update_kwargs(
|
|
479
479
|
update,
|
|
480
480
|
name_filter=name_filter,
|
|
@@ -573,7 +573,7 @@ class Flow(MSONable):
|
|
|
573
573
|
... {"number": 10}, class_filter=AddMaker, nested=False
|
|
574
574
|
... )
|
|
575
575
|
"""
|
|
576
|
-
for job in self
|
|
576
|
+
for job in self:
|
|
577
577
|
job.update_maker_kwargs(
|
|
578
578
|
update,
|
|
579
579
|
name_filter=name_filter,
|
|
@@ -598,7 +598,7 @@ class Flow(MSONable):
|
|
|
598
598
|
else:
|
|
599
599
|
self.name += append_str
|
|
600
600
|
|
|
601
|
-
for job in self
|
|
601
|
+
for job in self:
|
|
602
602
|
job.append_name(append_str, prepend=prepend)
|
|
603
603
|
|
|
604
604
|
def update_metadata(
|
|
@@ -647,7 +647,7 @@ class Flow(MSONable):
|
|
|
647
647
|
|
|
648
648
|
>>> flow.update_metadata({"tag": "addition_job"})
|
|
649
649
|
"""
|
|
650
|
-
for job in self
|
|
650
|
+
for job in self:
|
|
651
651
|
job.update_metadata(
|
|
652
652
|
update,
|
|
653
653
|
name_filter=name_filter,
|
|
@@ -717,7 +717,7 @@ class Flow(MSONable):
|
|
|
717
717
|
|
|
718
718
|
>>> flow.update_config({"manager_config": {"_fworker": "myfworker"}})
|
|
719
719
|
"""
|
|
720
|
-
for job in self
|
|
720
|
+
for job in self:
|
|
721
721
|
job.update_config(
|
|
722
722
|
config,
|
|
723
723
|
name_filter=name_filter,
|
|
@@ -756,8 +756,8 @@ class Flow(MSONable):
|
|
|
756
756
|
self.hosts.extend(hosts_uuids)
|
|
757
757
|
else:
|
|
758
758
|
hosts_uuids = [self.uuid]
|
|
759
|
-
for
|
|
760
|
-
|
|
759
|
+
for job in self:
|
|
760
|
+
job.add_hosts_uuids(hosts_uuids, prepend=prepend)
|
|
761
761
|
|
|
762
762
|
def add_jobs(self, jobs: Job | Flow | Sequence[Flow | Job]) -> None:
|
|
763
763
|
"""
|
|
@@ -794,7 +794,8 @@ class Flow(MSONable):
|
|
|
794
794
|
f"current Flow ({self.uuid})"
|
|
795
795
|
)
|
|
796
796
|
job_ids.add(job.uuid)
|
|
797
|
-
job.
|
|
797
|
+
if job.host != self.uuid:
|
|
798
|
+
job.add_hosts_uuids(hosts)
|
|
798
799
|
self._jobs += tuple(jobs)
|
|
799
800
|
|
|
800
801
|
def remove_jobs(self, indices: int | list[int]):
|
|
@@ -810,12 +811,12 @@ class Flow(MSONable):
|
|
|
810
811
|
"""
|
|
811
812
|
if not isinstance(indices, (list, tuple)):
|
|
812
813
|
indices = [indices]
|
|
813
|
-
if any(
|
|
814
|
+
if any(idx < 0 or idx >= len(self) for idx in indices):
|
|
814
815
|
raise ValueError(
|
|
815
816
|
"Only indices between 0 and the number of the jobs are accepted"
|
|
816
817
|
)
|
|
817
818
|
|
|
818
|
-
new_jobs = tuple(
|
|
819
|
+
new_jobs = tuple(job for idx, job in enumerate(self) if idx not in indices)
|
|
819
820
|
uuids: set = set()
|
|
820
821
|
for job in new_jobs:
|
|
821
822
|
if isinstance(job, Flow):
|
jobflow/core/job.py
CHANGED
|
@@ -13,7 +13,8 @@ from jobflow.core.reference import OnMissing, OutputReference
|
|
|
13
13
|
from jobflow.utils.uuid import suuid
|
|
14
14
|
|
|
15
15
|
if typing.TYPE_CHECKING:
|
|
16
|
-
from
|
|
16
|
+
from collections.abc import Hashable, Sequence
|
|
17
|
+
from typing import Any, Callable
|
|
17
18
|
|
|
18
19
|
from networkx import DiGraph
|
|
19
20
|
from pydantic import BaseModel
|
|
@@ -22,8 +23,6 @@ if typing.TYPE_CHECKING:
|
|
|
22
23
|
|
|
23
24
|
logger = logging.getLogger(__name__)
|
|
24
25
|
|
|
25
|
-
__all__ = ["job", "Job", "Response", "JobConfig", "store_inputs"]
|
|
26
|
-
|
|
27
26
|
|
|
28
27
|
@dataclass
|
|
29
28
|
class JobConfig(MSONable):
|
|
@@ -560,6 +559,7 @@ class Job(MSONable):
|
|
|
560
559
|
|
|
561
560
|
from jobflow import CURRENT_JOB
|
|
562
561
|
from jobflow.core.flow import get_flow
|
|
562
|
+
from jobflow.core.schemas import JobStoreDocument
|
|
563
563
|
|
|
564
564
|
index_str = f", {self.index}" if self.index != 1 else ""
|
|
565
565
|
logger.info(f"Starting job - {self.name} ({self.uuid}{index_str})")
|
|
@@ -633,15 +633,15 @@ class Job(MSONable):
|
|
|
633
633
|
) from err
|
|
634
634
|
|
|
635
635
|
save = {k: "output" if v is True else v for k, v in self._kwargs.items()}
|
|
636
|
-
data =
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
636
|
+
data: JobStoreDocument = JobStoreDocument(
|
|
637
|
+
uuid=self.uuid,
|
|
638
|
+
index=self.index,
|
|
639
|
+
output=output,
|
|
640
|
+
completed_at=datetime.now().isoformat(),
|
|
641
|
+
metadata=self.metadata,
|
|
642
|
+
hosts=self.hosts,
|
|
643
|
+
name=self.name,
|
|
644
|
+
)
|
|
645
645
|
store.update(data, key=["uuid", "index"], save=save)
|
|
646
646
|
|
|
647
647
|
CURRENT_JOB.reset()
|
jobflow/core/maker.py
CHANGED
jobflow/core/reference.py
CHANGED
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
|
|
4
4
|
|
|
5
5
|
import contextlib
|
|
6
6
|
import typing
|
|
7
|
-
from typing import Any
|
|
7
|
+
from typing import Any
|
|
8
8
|
|
|
9
9
|
from monty.json import MontyDecoder, MontyEncoder, MSONable, jsanitize
|
|
10
10
|
from pydantic import BaseModel
|
|
@@ -13,15 +13,9 @@ from pydantic.v1.utils import lenient_issubclass
|
|
|
13
13
|
from jobflow.utils.enum import ValueEnum
|
|
14
14
|
|
|
15
15
|
if typing.TYPE_CHECKING:
|
|
16
|
-
import
|
|
16
|
+
from collections.abc import Sequence
|
|
17
17
|
|
|
18
|
-
|
|
19
|
-
"OnMissing",
|
|
20
|
-
"OutputReference",
|
|
21
|
-
"resolve_references",
|
|
22
|
-
"find_and_resolve_references",
|
|
23
|
-
"find_and_get_references",
|
|
24
|
-
]
|
|
18
|
+
import jobflow
|
|
25
19
|
|
|
26
20
|
|
|
27
21
|
class OnMissing(ValueEnum):
|
|
@@ -95,14 +89,14 @@ class OutputReference(MSONable):
|
|
|
95
89
|
uuid: str,
|
|
96
90
|
attributes: tuple[tuple[str, Any], ...] = (),
|
|
97
91
|
output_schema: type[BaseModel] = None,
|
|
98
|
-
):
|
|
92
|
+
) -> None:
|
|
99
93
|
super().__init__()
|
|
100
94
|
self.uuid = uuid
|
|
101
95
|
self.attributes = attributes
|
|
102
96
|
self.output_schema = output_schema
|
|
103
97
|
|
|
104
98
|
for attr_type, attr in attributes:
|
|
105
|
-
if attr_type not in
|
|
99
|
+
if attr_type not in {"a", "i"}:
|
|
106
100
|
raise ValueError(
|
|
107
101
|
f"Unrecognised attribute type '{attr_type}' for attribute '{attr}'"
|
|
108
102
|
)
|
|
@@ -165,11 +159,12 @@ class OutputReference(MSONable):
|
|
|
165
159
|
if on_missing == OnMissing.ERROR and index not in cache[self.uuid]:
|
|
166
160
|
istr = f" ({index})" if index is not None else ""
|
|
167
161
|
raise ValueError(
|
|
168
|
-
f"Could not resolve reference - {self.uuid}{istr} not in store or
|
|
162
|
+
f"Could not resolve reference - {self.uuid}{istr} not in store or "
|
|
163
|
+
f"{index=}, {cache=}"
|
|
169
164
|
)
|
|
170
|
-
|
|
165
|
+
if on_missing == OnMissing.NONE and index not in cache[self.uuid]:
|
|
171
166
|
return None
|
|
172
|
-
|
|
167
|
+
if on_missing == OnMissing.PASS and index not in cache[self.uuid]:
|
|
173
168
|
return self
|
|
174
169
|
|
|
175
170
|
data = cache[self.uuid][index]
|
|
@@ -182,7 +177,11 @@ class OutputReference(MSONable):
|
|
|
182
177
|
|
|
183
178
|
for attr_type, attr in self.attributes:
|
|
184
179
|
# i means index else use attribute access
|
|
185
|
-
data =
|
|
180
|
+
data = (
|
|
181
|
+
data[attr]
|
|
182
|
+
if attr_type == "i" or isinstance(data, dict)
|
|
183
|
+
else getattr(data, attr)
|
|
184
|
+
)
|
|
186
185
|
|
|
187
186
|
return data
|
|
188
187
|
|
|
@@ -206,12 +205,11 @@ class OutputReference(MSONable):
|
|
|
206
205
|
if inplace:
|
|
207
206
|
self.uuid = uuid
|
|
208
207
|
return self
|
|
209
|
-
|
|
210
|
-
from copy import deepcopy
|
|
208
|
+
from copy import deepcopy
|
|
211
209
|
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
210
|
+
new_reference = deepcopy(self)
|
|
211
|
+
new_reference.uuid = uuid
|
|
212
|
+
return new_reference
|
|
215
213
|
|
|
216
214
|
def __getitem__(self, item) -> OutputReference:
|
|
217
215
|
"""Index the reference."""
|
|
@@ -269,7 +267,7 @@ class OutputReference(MSONable):
|
|
|
269
267
|
"""Return a hash of the reference."""
|
|
270
268
|
return hash(str(self))
|
|
271
269
|
|
|
272
|
-
def __eq__(self, other:
|
|
270
|
+
def __eq__(self, other: object) -> bool:
|
|
273
271
|
"""Test for equality against another reference."""
|
|
274
272
|
if isinstance(other, OutputReference):
|
|
275
273
|
return (
|
|
@@ -291,7 +289,7 @@ class OutputReference(MSONable):
|
|
|
291
289
|
"""Serialize the reference as a dict."""
|
|
292
290
|
schema = self.output_schema
|
|
293
291
|
schema_dict = MontyEncoder().default(schema) if schema is not None else None
|
|
294
|
-
|
|
292
|
+
return {
|
|
295
293
|
"@module": self.__class__.__module__,
|
|
296
294
|
"@class": type(self).__name__,
|
|
297
295
|
"@version": None,
|
|
@@ -299,7 +297,6 @@ class OutputReference(MSONable):
|
|
|
299
297
|
"attributes": self.attributes,
|
|
300
298
|
"output_schema": schema_dict,
|
|
301
299
|
}
|
|
302
|
-
return data
|
|
303
300
|
|
|
304
301
|
|
|
305
302
|
def resolve_references(
|
|
@@ -382,7 +379,7 @@ def find_and_get_references(arg: Any) -> tuple[OutputReference, ...]:
|
|
|
382
379
|
# if the argument is a reference then stop there
|
|
383
380
|
return (arg,)
|
|
384
381
|
|
|
385
|
-
|
|
382
|
+
if isinstance(arg, (float, int, str, bool)):
|
|
386
383
|
# argument is a primitive, we won't find a reference here
|
|
387
384
|
return ()
|
|
388
385
|
|
|
@@ -438,7 +435,7 @@ def find_and_resolve_references(
|
|
|
438
435
|
# if the argument is a reference then stop there
|
|
439
436
|
return arg.resolve(store, cache=cache, on_missing=on_missing)
|
|
440
437
|
|
|
441
|
-
|
|
438
|
+
if isinstance(arg, (float, int, str, bool)):
|
|
442
439
|
# argument is a primitive, we won't find a reference here
|
|
443
440
|
return arg
|
|
444
441
|
|
jobflow/core/schemas.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"""A Pydantic model for Jobstore document."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel, Field
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class JobStoreDocument(BaseModel):
|
|
9
|
+
"""A Pydantic model for Jobstore document."""
|
|
10
|
+
|
|
11
|
+
uuid: str = Field(
|
|
12
|
+
None, description="An unique identifier for the job. Generated automatically."
|
|
13
|
+
)
|
|
14
|
+
index: int = Field(
|
|
15
|
+
None,
|
|
16
|
+
description="The index of the job (number of times the job has been replaced).",
|
|
17
|
+
)
|
|
18
|
+
output: Any = Field(
|
|
19
|
+
None,
|
|
20
|
+
description="This is a reference to the future job output.",
|
|
21
|
+
)
|
|
22
|
+
completed_at: str = Field(None, description="The time the job was completed.")
|
|
23
|
+
metadata: dict = Field(
|
|
24
|
+
None,
|
|
25
|
+
description="Metadata information supplied by the user.",
|
|
26
|
+
)
|
|
27
|
+
hosts: list[str] = Field(
|
|
28
|
+
None,
|
|
29
|
+
description="The list of UUIDs of the hosts containing the job.",
|
|
30
|
+
)
|
|
31
|
+
name: str = Field(
|
|
32
|
+
None,
|
|
33
|
+
description="The name of the job.",
|
|
34
|
+
)
|
jobflow/core/state.py
CHANGED
jobflow/core/store.py
CHANGED
|
@@ -11,17 +11,19 @@ from jobflow.core.reference import OnMissing
|
|
|
11
11
|
from jobflow.utils.find import get_root_locations
|
|
12
12
|
|
|
13
13
|
if typing.TYPE_CHECKING:
|
|
14
|
+
from collections.abc import Iterator
|
|
14
15
|
from enum import Enum
|
|
15
16
|
from pathlib import Path
|
|
16
|
-
from typing import Any,
|
|
17
|
+
from typing import Any, Optional, Union
|
|
17
18
|
|
|
18
19
|
from maggma.core import Sort
|
|
19
20
|
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
21
|
+
from jobflow.core.schemas import JobStoreDocument
|
|
22
|
+
|
|
23
|
+
obj_type = Union[str, Enum, type[MSONable], list[Union[Enum, str, type[MSONable]]]]
|
|
24
|
+
save_type = Optional[dict[str, obj_type]]
|
|
25
|
+
load_type = Union[bool, dict[str, Union[bool, obj_type]]]
|
|
23
26
|
|
|
24
|
-
__all__ = ["JobStore"]
|
|
25
27
|
|
|
26
28
|
T = typing.TypeVar("T", bound="JobStore")
|
|
27
29
|
|
|
@@ -249,12 +251,11 @@ class JobStore(Store):
|
|
|
249
251
|
docs = self.query(
|
|
250
252
|
criteria=criteria, properties=properties, load=load, sort=sort, limit=1
|
|
251
253
|
)
|
|
252
|
-
|
|
253
|
-
return d
|
|
254
|
+
return next(docs, None)
|
|
254
255
|
|
|
255
256
|
def update(
|
|
256
257
|
self,
|
|
257
|
-
docs: list[dict] | dict,
|
|
258
|
+
docs: list[dict] | dict | JobStoreDocument | list[JobStoreDocument],
|
|
258
259
|
key: list | str = None,
|
|
259
260
|
save: bool | save_type = None,
|
|
260
261
|
):
|
|
@@ -264,7 +265,7 @@ class JobStore(Store):
|
|
|
264
265
|
Parameters
|
|
265
266
|
----------
|
|
266
267
|
docs
|
|
267
|
-
The document or list of documents to update.
|
|
268
|
+
The Pydantic document or list of Pydantic documents to update.
|
|
268
269
|
key
|
|
269
270
|
Field name(s) to determine uniqueness for a document, can be a list of
|
|
270
271
|
multiple fields, a single field, or None if the Store's key field is to
|
|
@@ -495,7 +496,7 @@ class JobStore(Store):
|
|
|
495
496
|
# this could be fixed but will require more complicated logic just to
|
|
496
497
|
# catch a very unlikely event.
|
|
497
498
|
|
|
498
|
-
if isinstance(which, int) or which in
|
|
499
|
+
if isinstance(which, int) or which in {"last", "first"}:
|
|
499
500
|
sort = -1 if which == "last" else 1
|
|
500
501
|
|
|
501
502
|
criteria: dict[str, Any] = {"uuid": uuid}
|
|
@@ -521,28 +522,27 @@ class JobStore(Store):
|
|
|
521
522
|
return find_and_resolve_references(
|
|
522
523
|
result["output"], self, cache=cache, on_missing=on_missing
|
|
523
524
|
)
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
load=load,
|
|
531
|
-
)
|
|
525
|
+
results = list(
|
|
526
|
+
self.query(
|
|
527
|
+
criteria={"uuid": uuid},
|
|
528
|
+
properties=["output"],
|
|
529
|
+
sort={"index": 1},
|
|
530
|
+
load=load,
|
|
532
531
|
)
|
|
532
|
+
)
|
|
533
533
|
|
|
534
|
-
|
|
535
|
-
|
|
534
|
+
if len(results) == 0:
|
|
535
|
+
raise ValueError(f"UUID: {uuid} has no outputs.")
|
|
536
536
|
|
|
537
|
-
|
|
537
|
+
results = [r["output"] for r in results]
|
|
538
538
|
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
539
|
+
refs = find_and_get_references(results)
|
|
540
|
+
if any(ref.uuid == uuid for ref in refs):
|
|
541
|
+
raise RuntimeError("Reference cycle detected - aborting.")
|
|
542
542
|
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
543
|
+
return find_and_resolve_references(
|
|
544
|
+
results, self, cache=cache, on_missing=on_missing
|
|
545
|
+
)
|
|
546
546
|
|
|
547
547
|
@classmethod
|
|
548
548
|
def from_file(cls: type[T], db_file: str | Path, **kwargs) -> T:
|
|
@@ -661,6 +661,10 @@ class JobStore(Store):
|
|
|
661
661
|
|
|
662
662
|
all_stores = {s.__name__: s for s in all_subclasses(maggma.stores.Store)}
|
|
663
663
|
|
|
664
|
+
# add ssh tunnel support
|
|
665
|
+
tunnel = maggma.stores.ssh_tunnel.SSHTunnel
|
|
666
|
+
all_stores[tunnel.__name__] = tunnel
|
|
667
|
+
|
|
664
668
|
docs_store_info = spec["docs_store"]
|
|
665
669
|
docs_store = _construct_store(docs_store_info, all_stores)
|
|
666
670
|
|
|
@@ -760,7 +764,7 @@ def _filter_blobs(
|
|
|
760
764
|
|
|
761
765
|
new_blobs = []
|
|
762
766
|
new_locations = []
|
|
763
|
-
for
|
|
767
|
+
for store_load in load.values():
|
|
764
768
|
for blob, location in zip(blob_infos, locations):
|
|
765
769
|
if store_load is True:
|
|
766
770
|
new_blobs.append(blob)
|
jobflow/managers/fireworks.py
CHANGED
|
@@ -7,11 +7,10 @@ import typing
|
|
|
7
7
|
from fireworks import FiretaskBase, Firework, FWAction, Workflow, explicit_serialize
|
|
8
8
|
|
|
9
9
|
if typing.TYPE_CHECKING:
|
|
10
|
-
from
|
|
10
|
+
from collections.abc import Sequence
|
|
11
11
|
|
|
12
12
|
import jobflow
|
|
13
|
-
|
|
14
|
-
__all__ = ["flow_to_workflow", "job_to_firework", "JobFiretask"]
|
|
13
|
+
from jobflow.core.job import Job
|
|
15
14
|
|
|
16
15
|
|
|
17
16
|
def flow_to_workflow(
|
|
@@ -148,7 +147,6 @@ class JobFiretask(FiretaskBase):
|
|
|
148
147
|
def run_task(self, fw_spec):
|
|
149
148
|
"""Run the job and handle any dynamic firework submissions."""
|
|
150
149
|
from jobflow import SETTINGS, initialize_logger
|
|
151
|
-
from jobflow.core.job import Job
|
|
152
150
|
|
|
153
151
|
job: Job = self.get("job")
|
|
154
152
|
store = self.get("store")
|
|
@@ -192,11 +190,10 @@ class JobFiretask(FiretaskBase):
|
|
|
192
190
|
else:
|
|
193
191
|
detours = [detour_wf]
|
|
194
192
|
|
|
195
|
-
|
|
193
|
+
return FWAction(
|
|
196
194
|
stored_data=response.stored_data,
|
|
197
195
|
detours=detours,
|
|
198
196
|
additions=additions,
|
|
199
197
|
defuse_workflow=response.stop_jobflow,
|
|
200
198
|
defuse_children=response.stop_children,
|
|
201
199
|
)
|
|
202
|
-
return fwa
|
jobflow/managers/local.py
CHANGED
|
@@ -6,12 +6,10 @@ import logging
|
|
|
6
6
|
import typing
|
|
7
7
|
|
|
8
8
|
if typing.TYPE_CHECKING:
|
|
9
|
-
|
|
9
|
+
from pathlib import Path
|
|
10
10
|
|
|
11
11
|
import jobflow
|
|
12
12
|
|
|
13
|
-
__all__ = ["run_locally"]
|
|
14
|
-
|
|
15
13
|
logger = logging.getLogger(__name__)
|
|
16
14
|
|
|
17
15
|
|
|
@@ -20,6 +18,7 @@ def run_locally(
|
|
|
20
18
|
log: bool = True,
|
|
21
19
|
store: jobflow.JobStore = None,
|
|
22
20
|
create_folders: bool = False,
|
|
21
|
+
root_dir: str | Path | None = None,
|
|
23
22
|
ensure_success: bool = False,
|
|
24
23
|
allow_external_references: bool = False,
|
|
25
24
|
) -> dict[str, dict[int, jobflow.Response]]:
|
|
@@ -28,25 +27,29 @@ def run_locally(
|
|
|
28
27
|
|
|
29
28
|
Parameters
|
|
30
29
|
----------
|
|
31
|
-
flow
|
|
30
|
+
flow : Flow | Job | list[Job]
|
|
32
31
|
A job or flow.
|
|
33
|
-
log
|
|
32
|
+
log : bool
|
|
34
33
|
Whether to print log messages.
|
|
35
|
-
store
|
|
34
|
+
store : JobStore
|
|
36
35
|
A job store. If a job store is not specified then
|
|
37
36
|
:obj:`JobflowSettings.JOB_STORE` will be used. By default this is a maggma
|
|
38
37
|
``MemoryStore`` but can be customised by setting the jobflow configuration file.
|
|
39
|
-
create_folders
|
|
38
|
+
create_folders : bool
|
|
40
39
|
Whether to run each job in a new folder.
|
|
41
|
-
|
|
40
|
+
root_dir : str | Path | None
|
|
41
|
+
The root directory to run the jobs in or where to create new subfolders if
|
|
42
|
+
``create_folders`` is True. If None then the current working
|
|
43
|
+
directory will be used.
|
|
44
|
+
ensure_success : bool
|
|
42
45
|
Raise an error if the flow was not executed successfully.
|
|
43
|
-
allow_external_references
|
|
46
|
+
allow_external_references : bool
|
|
44
47
|
If False all the references to other outputs should be from other Jobs
|
|
45
48
|
of the Flow.
|
|
46
49
|
|
|
47
50
|
Returns
|
|
48
51
|
-------
|
|
49
|
-
|
|
52
|
+
dict[str, dict[int, Response]]
|
|
50
53
|
The responses of the jobs, as a dict of ``{uuid: {index: response}}``.
|
|
51
54
|
"""
|
|
52
55
|
from collections import defaultdict
|
|
@@ -63,6 +66,9 @@ def run_locally(
|
|
|
63
66
|
if store is None:
|
|
64
67
|
store = SETTINGS.JOB_STORE
|
|
65
68
|
|
|
69
|
+
root_dir = Path.cwd() if root_dir is None else Path(root_dir).resolve()
|
|
70
|
+
root_dir.mkdir(exist_ok=True)
|
|
71
|
+
|
|
66
72
|
store.connect()
|
|
67
73
|
|
|
68
74
|
if log:
|
|
@@ -75,8 +81,6 @@ def run_locally(
|
|
|
75
81
|
responses: dict[str, dict[int, jobflow.Response]] = defaultdict(dict)
|
|
76
82
|
stop_jobflow = False
|
|
77
83
|
|
|
78
|
-
root_dir = Path.cwd()
|
|
79
|
-
|
|
80
84
|
def _run_job(job: jobflow.Job, parents):
|
|
81
85
|
nonlocal stop_jobflow
|
|
82
86
|
|
|
@@ -134,8 +138,7 @@ def run_locally(
|
|
|
134
138
|
|
|
135
139
|
if not all(diversion_responses):
|
|
136
140
|
return None, False
|
|
137
|
-
|
|
138
|
-
return response, False
|
|
141
|
+
return response, False
|
|
139
142
|
|
|
140
143
|
def _get_job_dir():
|
|
141
144
|
if create_folders:
|
|
@@ -143,8 +146,7 @@ def run_locally(
|
|
|
143
146
|
job_dir = root_dir / f"job_{time_now}-{randint(10000, 99999)}"
|
|
144
147
|
job_dir.mkdir()
|
|
145
148
|
return job_dir
|
|
146
|
-
|
|
147
|
-
return root_dir
|
|
149
|
+
return root_dir
|
|
148
150
|
|
|
149
151
|
def _run(root_flow):
|
|
150
152
|
encountered_bad_response = False
|
jobflow/settings.py
CHANGED
jobflow/utils/dict_mods.py
CHANGED
jobflow/utils/enum.py
CHANGED
|
@@ -2,8 +2,6 @@
|
|
|
2
2
|
|
|
3
3
|
from enum import Enum
|
|
4
4
|
|
|
5
|
-
__all__ = ["ValueEnum"]
|
|
6
|
-
|
|
7
5
|
|
|
8
6
|
class ValueEnum(Enum):
|
|
9
7
|
"""Enum that serializes to string as the value and can be compared against a str."""
|
|
@@ -16,8 +14,7 @@ class ValueEnum(Enum):
|
|
|
16
14
|
"""Compare to another enum for equality."""
|
|
17
15
|
if type(self) == type(other) and self.value == other.value:
|
|
18
16
|
return True
|
|
19
|
-
|
|
20
|
-
return str(self.value) == str(other)
|
|
17
|
+
return str(self.value) == str(other)
|
|
21
18
|
|
|
22
19
|
def __hash__(self):
|
|
23
20
|
"""Get a hash of the enum."""
|
jobflow/utils/find.py
CHANGED
|
@@ -5,17 +5,11 @@ from __future__ import annotations
|
|
|
5
5
|
import typing
|
|
6
6
|
|
|
7
7
|
if typing.TYPE_CHECKING:
|
|
8
|
-
from
|
|
8
|
+
from collections.abc import Hashable
|
|
9
|
+
from typing import Any
|
|
9
10
|
|
|
10
11
|
from monty.json import MSONable
|
|
11
12
|
|
|
12
|
-
__all__ = [
|
|
13
|
-
"find_key",
|
|
14
|
-
"find_key_value",
|
|
15
|
-
"update_in_dictionary",
|
|
16
|
-
"contains_flow_or_job",
|
|
17
|
-
]
|
|
18
|
-
|
|
19
13
|
|
|
20
14
|
def find_key(
|
|
21
15
|
d: dict[Hashable, Any] | list[Any],
|
|
@@ -78,10 +72,8 @@ def find_key(
|
|
|
78
72
|
if (
|
|
79
73
|
inspect.isclass(key)
|
|
80
74
|
and issubclass(key, MSONable)
|
|
81
|
-
and "@module"
|
|
82
|
-
and obj
|
|
83
|
-
and "@class" in obj
|
|
84
|
-
and obj["@class"] == key.__name__
|
|
75
|
+
and obj.get("@module") == key.__module__
|
|
76
|
+
and obj.get("@class") == key.__name__
|
|
85
77
|
):
|
|
86
78
|
found_items.add(path)
|
|
87
79
|
found = True
|
|
@@ -209,7 +201,7 @@ def contains_flow_or_job(obj: Any) -> bool:
|
|
|
209
201
|
# if the argument is an flow or job then stop there
|
|
210
202
|
return True
|
|
211
203
|
|
|
212
|
-
|
|
204
|
+
if isinstance(obj, (float, int, str, bool)):
|
|
213
205
|
# argument is a primitive, we won't find an flow or job here
|
|
214
206
|
return False
|
|
215
207
|
|
jobflow/utils/graph.py
CHANGED
|
@@ -8,19 +8,15 @@ import networkx as nx
|
|
|
8
8
|
from monty.dev import requires
|
|
9
9
|
|
|
10
10
|
try:
|
|
11
|
-
import matplotlib
|
|
11
|
+
import matplotlib as mpl
|
|
12
12
|
except ImportError:
|
|
13
|
-
|
|
13
|
+
mpl = None
|
|
14
14
|
|
|
15
15
|
import typing
|
|
16
16
|
|
|
17
17
|
if typing.TYPE_CHECKING:
|
|
18
|
-
pass
|
|
19
|
-
|
|
20
18
|
import jobflow
|
|
21
19
|
|
|
22
|
-
__all__ = ["itergraph", "draw_graph", "to_pydot", "to_mermaid"]
|
|
23
|
-
|
|
24
20
|
|
|
25
21
|
def itergraph(graph: nx.DiGraph):
|
|
26
22
|
"""
|
|
@@ -56,7 +52,7 @@ def itergraph(graph: nx.DiGraph):
|
|
|
56
52
|
yield from nx.topological_sort(subgraph)
|
|
57
53
|
|
|
58
54
|
|
|
59
|
-
@requires(
|
|
55
|
+
@requires(mpl, "matplotlib must be installed to plot flow graphs.")
|
|
60
56
|
def draw_graph(
|
|
61
57
|
graph: nx.DiGraph,
|
|
62
58
|
layout_function: typing.Callable = None,
|
|
@@ -155,20 +151,20 @@ def to_pydot(flow: jobflow.Flow):
|
|
|
155
151
|
nx_graph = flow.graph
|
|
156
152
|
pydot_graph = pydot.Dot(f'"{flow.name}"', graph_type="digraph")
|
|
157
153
|
|
|
158
|
-
for n,
|
|
159
|
-
|
|
160
|
-
p = pydot.Node(str(n), **
|
|
154
|
+
for n, node_data in nx_graph.nodes(data=True):
|
|
155
|
+
str_node_data = {k: str(v) for k, v in node_data.items()}
|
|
156
|
+
p = pydot.Node(str(n), **str_node_data)
|
|
161
157
|
pydot_graph.add_node(p)
|
|
162
158
|
|
|
163
|
-
for u, v,
|
|
164
|
-
|
|
165
|
-
edge = pydot.Edge(str(u), str(v), label=
|
|
159
|
+
for u, v, edge_data in nx_graph.edges(data=True):
|
|
160
|
+
str_edge_data = {k: str(v) for k, v in edge_data.items()}
|
|
161
|
+
edge = pydot.Edge(str(u), str(v), label=str_edge_data["properties"])
|
|
166
162
|
pydot_graph.add_edge(edge)
|
|
167
163
|
|
|
168
164
|
def add_cluster(nested_flow, outer_graph):
|
|
169
165
|
cluster = pydot.Cluster(nested_flow.uuid)
|
|
170
166
|
cluster.set_label(nested_flow.name)
|
|
171
|
-
for job in nested_flow
|
|
167
|
+
for job in nested_flow:
|
|
172
168
|
if isinstance(job, Flow):
|
|
173
169
|
add_cluster(job, cluster)
|
|
174
170
|
else:
|
|
@@ -236,7 +232,7 @@ def to_mermaid(flow: jobflow.Flow | jobflow.Job, show_flow_boxes: bool = False)
|
|
|
236
232
|
def add_subgraph(nested_flow, indent_level=1):
|
|
237
233
|
prefix = " " * indent_level
|
|
238
234
|
|
|
239
|
-
for job in nested_flow
|
|
235
|
+
for job in nested_flow:
|
|
240
236
|
if isinstance(job, Flow):
|
|
241
237
|
if show_flow_boxes:
|
|
242
238
|
lines.append(f"{prefix}subgraph {job.uuid} [{job.name}]")
|
jobflow/utils/log.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: jobflow
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.15
|
|
4
4
|
Summary: jobflow is a library for writing computational workflows
|
|
5
5
|
Author-email: Alex Ganose <alexganose@gmail.com>
|
|
6
6
|
License: modified BSD
|
|
@@ -15,29 +15,28 @@ Classifier: Intended Audience :: Science/Research
|
|
|
15
15
|
Classifier: Intended Audience :: System Administrators
|
|
16
16
|
Classifier: Operating System :: OS Independent
|
|
17
17
|
Classifier: Programming Language :: Python :: 3
|
|
18
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
19
18
|
Classifier: Programming Language :: Python :: 3.10
|
|
20
|
-
Classifier: Programming Language :: Python :: 3.
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
21
20
|
Classifier: Programming Language :: Python :: 3.9
|
|
22
21
|
Classifier: Topic :: Database :: Front-Ends
|
|
23
22
|
Classifier: Topic :: Other/Nonlisted Topic
|
|
24
23
|
Classifier: Topic :: Scientific/Engineering
|
|
25
|
-
Requires-Python: >=3.
|
|
24
|
+
Requires-Python: >=3.9
|
|
26
25
|
Description-Content-Type: text/markdown
|
|
27
26
|
License-File: LICENSE
|
|
28
27
|
Requires-Dist: PyYAML
|
|
29
28
|
Requires-Dist: maggma >=0.57.0
|
|
30
29
|
Requires-Dist: monty >=2023.9.25
|
|
31
30
|
Requires-Dist: networkx
|
|
32
|
-
Requires-Dist: pydantic >=2.0.1
|
|
33
31
|
Requires-Dist: pydantic-settings >=2.0.3
|
|
32
|
+
Requires-Dist: pydantic >=2.0.1
|
|
34
33
|
Requires-Dist: pydash
|
|
35
34
|
Provides-Extra: dev
|
|
36
35
|
Requires-Dist: pre-commit >=2.12.1 ; extra == 'dev'
|
|
37
36
|
Provides-Extra: docs
|
|
38
37
|
Requires-Dist: autodoc-pydantic ==2.0.1 ; extra == 'docs'
|
|
39
38
|
Requires-Dist: furo ==2023.9.10 ; extra == 'docs'
|
|
40
|
-
Requires-Dist: ipython ==8.
|
|
39
|
+
Requires-Dist: ipython ==8.18.1 ; extra == 'docs'
|
|
41
40
|
Requires-Dist: myst-parser ==2.0.0 ; extra == 'docs'
|
|
42
41
|
Requires-Dist: nbsphinx ==0.9.3 ; extra == 'docs'
|
|
43
42
|
Requires-Dist: sphinx-copybutton ==0.5.2 ; extra == 'docs'
|
|
@@ -47,30 +46,30 @@ Requires-Dist: FireWorks ; extra == 'fireworks'
|
|
|
47
46
|
Provides-Extra: strict
|
|
48
47
|
Requires-Dist: FireWorks ==2.0.3 ; extra == 'strict'
|
|
49
48
|
Requires-Dist: PyYAML ==6.0.1 ; extra == 'strict'
|
|
50
|
-
Requires-Dist: maggma ==0.
|
|
51
|
-
Requires-Dist: matplotlib ==3.
|
|
52
|
-
Requires-Dist: monty ==2023.
|
|
53
|
-
Requires-Dist: moto ==4.2.
|
|
54
|
-
Requires-Dist: networkx ==3.1 ; extra == 'strict'
|
|
55
|
-
Requires-Dist: pydantic ==2.
|
|
56
|
-
Requires-Dist: pydantic
|
|
49
|
+
Requires-Dist: maggma ==0.58.0 ; extra == 'strict'
|
|
50
|
+
Requires-Dist: matplotlib ==3.8.2 ; extra == 'strict'
|
|
51
|
+
Requires-Dist: monty ==2023.11.3 ; extra == 'strict'
|
|
52
|
+
Requires-Dist: moto ==4.2.11 ; extra == 'strict'
|
|
53
|
+
Requires-Dist: networkx ==3.2.1 ; extra == 'strict'
|
|
54
|
+
Requires-Dist: pydantic-settings ==2.1.0 ; extra == 'strict'
|
|
55
|
+
Requires-Dist: pydantic ==2.5.2 ; extra == 'strict'
|
|
57
56
|
Requires-Dist: pydash ==7.0.6 ; extra == 'strict'
|
|
58
57
|
Requires-Dist: pydot ==1.4.2 ; extra == 'strict'
|
|
59
58
|
Requires-Dist: typing-extensions ==4.8.0 ; extra == 'strict'
|
|
60
59
|
Provides-Extra: tests
|
|
60
|
+
Requires-Dist: moto ==4.2.11 ; extra == 'tests'
|
|
61
61
|
Requires-Dist: pytest-cov ==4.1.0 ; extra == 'tests'
|
|
62
|
-
Requires-Dist: pytest ==7.4.
|
|
63
|
-
Requires-Dist: moto ==4.2.4 ; extra == 'tests'
|
|
62
|
+
Requires-Dist: pytest ==7.4.3 ; extra == 'tests'
|
|
64
63
|
Provides-Extra: vis
|
|
65
64
|
Requires-Dist: matplotlib ; extra == 'vis'
|
|
66
65
|
Requires-Dist: pydot ; extra == 'vis'
|
|
67
66
|
|
|
68
67
|
# jobflow
|
|
69
68
|
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
69
|
+
[](https://github.com/materialsproject/jobflow/actions?query=workflow%3Atesting)
|
|
70
|
+
[](https://codecov.io/gh/materialsproject/jobflow/)
|
|
71
|
+
[](https://pypi.org/project/jobflow/)
|
|
72
|
+

|
|
74
73
|
|
|
75
74
|
[Documentation](https://materialsproject.github.io/jobflow/) | [PyPI](https://pypi.org/project/jobflow/) | [GitHub](https://github.com/materialsproject/jobflow)
|
|
76
75
|
|
|
@@ -143,7 +142,7 @@ the jobs is determined automatically and can be visualised using the flow graph.
|
|
|
143
142
|
|
|
144
143
|
## Installation
|
|
145
144
|
|
|
146
|
-
|
|
145
|
+
`jobflow` is a Python 3.9+ library and can be installed using `pip`.
|
|
147
146
|
|
|
148
147
|
```bash
|
|
149
148
|
pip install jobflow
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
jobflow/__init__.py,sha256=l7o10BaqEQWw5aZziWRg40PsIAgQ4lrlluXs9hIv2mg,570
|
|
2
|
+
jobflow/_version.py,sha256=Ym07PBD7sAmpqVpX8tuzWma3P_Hv6KXbDKXWkw8OwaI,205
|
|
3
|
+
jobflow/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
+
jobflow/settings.py,sha256=e_8QwC05MUkDRenAutnqg8eAy7pQ-firtmCob7BKA3g,5474
|
|
5
|
+
jobflow/core/__init__.py,sha256=3sx5t1Gysejc4c_fPrhvCjPUg0p_384Zko8ms2c_NnY,98
|
|
6
|
+
jobflow/core/flow.py,sha256=c_rG9lzOQ00ckzN40gDHLm5CCvFlAeVE75kKLvvdJ2U,28879
|
|
7
|
+
jobflow/core/job.py,sha256=bPZMcmiW-Qqs-qNnlypbwZCgS_IN9kM0sybYw3koXOw,46466
|
|
8
|
+
jobflow/core/maker.py,sha256=DKfFXe91v9rRFtgUrm8FMiEcLArmb6jnK7nysuDdZts,11185
|
|
9
|
+
jobflow/core/reference.py,sha256=x6RXt-yxbQHoea2gb5SR4GWBVD1U2LaNwPxUGPdvGZo,16252
|
|
10
|
+
jobflow/core/schemas.py,sha256=Oi5-PnZpI8S9jSY7Q4f8H7xUybbRZDXlgugeVewVsrA,968
|
|
11
|
+
jobflow/core/state.py,sha256=IGJTtmpotDKEcgDEnsT5x20ZeyvQT68Mr3teTjkgYnM,709
|
|
12
|
+
jobflow/core/store.py,sha256=Bdm92-NqCPpID183DV9F9pMnG9PQLMpFOKUJkbzccNo,26982
|
|
13
|
+
jobflow/managers/__init__.py,sha256=KkA5cVDe2os2_2aTa8eiB9SnkGLZNybcci-Lo4tbaWM,55
|
|
14
|
+
jobflow/managers/fireworks.py,sha256=5IKDkE-dppvbhDWTfJKCMmqvxg50zBgCqm6qUqsVZtc,6654
|
|
15
|
+
jobflow/managers/local.py,sha256=J2GHodXMqryME0EfccJTGgbjXuuwUmtrVA9RnCkDcy8,5355
|
|
16
|
+
jobflow/utils/__init__.py,sha256=meuvfuk05U594rx4YB6BoBnoQxBMjCA2hKX3TSfZsB8,328
|
|
17
|
+
jobflow/utils/dict_mods.py,sha256=g50aMw-mK3RjXp_hHJBR9xUaWRYXoqqmPTMCPDDluz4,6052
|
|
18
|
+
jobflow/utils/enum.py,sha256=rFDdqQr-844Vzj9G9vuzPTWAJG60pQVNzgjyugkNwtc,713
|
|
19
|
+
jobflow/utils/find.py,sha256=Qaxh0TxQtXznK4Wy72klLFfFB3NUfUI4TF6-NEAzxcU,6162
|
|
20
|
+
jobflow/utils/graph.py,sha256=CNZNlUPmkVKio7m9Y3nifM9BbXe8vtvFHuXRwJu84R0,6562
|
|
21
|
+
jobflow/utils/log.py,sha256=tIMpsI4JTlkpxjBZfWqZ0qkEkIxk1-RBasz8JhDcF7E,692
|
|
22
|
+
jobflow/utils/uuid.py,sha256=lVgo8e8gUB7HLSR0H_9uZH-OPkVBaOT39atAnNKYAaI,268
|
|
23
|
+
jobflow-0.1.15.dist-info/LICENSE,sha256=jUEiENfZNQZh9RE9ixtUWgVkLRD85ScZ6iv1WREf19w,2418
|
|
24
|
+
jobflow-0.1.15.dist-info/METADATA,sha256=hFAXrVdF_OJfoi33zxO1DeIbMT9Ky-DJpy5xsES0fMY,9050
|
|
25
|
+
jobflow-0.1.15.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
|
26
|
+
jobflow-0.1.15.dist-info/top_level.txt,sha256=IanNooU88OupQPDrWnT0rbL3E27P2wEy7Jsfx9_j8zc,8
|
|
27
|
+
jobflow-0.1.15.dist-info/RECORD,,
|
jobflow-0.1.14.dist-info/RECORD
DELETED
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
jobflow/__init__.py,sha256=l7o10BaqEQWw5aZziWRg40PsIAgQ4lrlluXs9hIv2mg,570
|
|
2
|
-
jobflow/_version.py,sha256=ERuIgO0lzbMrVMSXNAE55GxFc5JHDdf1CW7GtoXJ67Q,206
|
|
3
|
-
jobflow/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
-
jobflow/settings.py,sha256=tixD1_4Se4Lo5-0vJ71eYVPCZ_KaJPAfcYd7ESSKMqs,5505
|
|
5
|
-
jobflow/core/__init__.py,sha256=3sx5t1Gysejc4c_fPrhvCjPUg0p_384Zko8ms2c_NnY,98
|
|
6
|
-
jobflow/core/flow.py,sha256=LqIzSFa3NuLCJLIj7LJeF4WaeR5fya0w24v3fGsR6IM,28907
|
|
7
|
-
jobflow/core/job.py,sha256=GRJyCfusF4G5FvpDRnrNv20m4L2iD9YdKRNr5_VLkw8,46431
|
|
8
|
-
jobflow/core/maker.py,sha256=cdEH7JE-LOyenibj_6y6N0y1bFe0_xbphTz8toigzzI,11206
|
|
9
|
-
jobflow/core/reference.py,sha256=3JIDQ6CSNZFBDWK4QTQRjqSqNRQLL9A9UVq0_SR-62c,16291
|
|
10
|
-
jobflow/core/state.py,sha256=qhmrxZ-dWFQV1QZH-FcMAEajYl-Zxue5x8d8xQQO0r0,746
|
|
11
|
-
jobflow/core/store.py,sha256=6pveNBfiBYN6Zpp-PM3GfNWWzH187aVfJ6pyrbQ0j-M,26852
|
|
12
|
-
jobflow/managers/__init__.py,sha256=KkA5cVDe2os2_2aTa8eiB9SnkGLZNybcci-Lo4tbaWM,55
|
|
13
|
-
jobflow/managers/fireworks.py,sha256=VEXsu2bghScl7fH7LQ-FbZOFLeFRCFRu_SRusmmHZ9A,6733
|
|
14
|
-
jobflow/managers/local.py,sha256=LliaffHhDPEs6dS9TxStNxv-xrJ2bGNrOIq-Mermouo,4985
|
|
15
|
-
jobflow/utils/__init__.py,sha256=meuvfuk05U594rx4YB6BoBnoQxBMjCA2hKX3TSfZsB8,328
|
|
16
|
-
jobflow/utils/dict_mods.py,sha256=bJmHB-JHtwBwEpWroO1YaOac_AvCItoQ6GBWlWdDMtk,6090
|
|
17
|
-
jobflow/utils/enum.py,sha256=mt8z0D2Nu2oQxZ1vXi6VTqorOMseRptSCjsgMNSvvOE,756
|
|
18
|
-
jobflow/utils/find.py,sha256=dGnXKJ_VDmWKxMBtJo_Dnb9jd3JVcPu7HasZsd6-jmI,6307
|
|
19
|
-
jobflow/utils/graph.py,sha256=61k0bA_MCqlkVtJ2ldwpH6Tx2cwXrE_NWveouh6zwTk,6646
|
|
20
|
-
jobflow/utils/log.py,sha256=dNIOvhApCtW7z1OamLlUmg6TL4mDXOQ0yFQxD8AM0Lk,725
|
|
21
|
-
jobflow/utils/uuid.py,sha256=lVgo8e8gUB7HLSR0H_9uZH-OPkVBaOT39atAnNKYAaI,268
|
|
22
|
-
jobflow-0.1.14.dist-info/LICENSE,sha256=jUEiENfZNQZh9RE9ixtUWgVkLRD85ScZ6iv1WREf19w,2418
|
|
23
|
-
jobflow-0.1.14.dist-info/METADATA,sha256=uFcQaCJN3BmmVk1J3Ls2clWCpH9ksnpqxe2mms6siZs,9192
|
|
24
|
-
jobflow-0.1.14.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
|
|
25
|
-
jobflow-0.1.14.dist-info/top_level.txt,sha256=IanNooU88OupQPDrWnT0rbL3E27P2wEy7Jsfx9_j8zc,8
|
|
26
|
-
jobflow-0.1.14.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|