jobflow 0.1.13__py3-none-any.whl → 0.1.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
jobflow/_version.py CHANGED
@@ -1,7 +1,7 @@
1
- from pkg_resources import DistributionNotFound, get_distribution
1
+ from importlib.metadata import PackageNotFoundError, version
2
2
 
3
3
  try:
4
- __version__ = get_distribution("jobflow").version
5
- except DistributionNotFound:
4
+ __version__ = version("jobflow")
5
+ except PackageNotFoundError: # pragma: no cover
6
6
  # package is not installed
7
7
  __version__ = ""
jobflow/core/flow.py CHANGED
@@ -5,7 +5,7 @@ from __future__ import annotations
5
5
  import logging
6
6
  import warnings
7
7
  from copy import deepcopy
8
- from typing import TYPE_CHECKING, Sequence
8
+ from typing import TYPE_CHECKING
9
9
 
10
10
  from monty.json import MSONable
11
11
 
@@ -14,13 +14,13 @@ from jobflow.core.reference import find_and_get_references
14
14
  from jobflow.utils import ValueEnum, contains_flow_or_job, suuid
15
15
 
16
16
  if TYPE_CHECKING:
17
- from typing import Any, Callable, Iterator
17
+ from collections.abc import Iterator, Sequence
18
+ from typing import Any, Callable
18
19
 
19
20
  from networkx import DiGraph
20
21
 
21
22
  from jobflow import Job
22
23
 
23
- __all__ = ["JobOrder", "Flow", "get_flow"]
24
24
 
25
25
  logger = logging.getLogger(__name__)
26
26
 
@@ -166,7 +166,7 @@ class Flow(MSONable):
166
166
  raise TypeError(
167
167
  f"Flow can only contain Job or Flow objects, not {type(value).__name__}"
168
168
  )
169
- jobs = list(self.jobs)
169
+ jobs = list(self)
170
170
  jobs[idx] = value # type: ignore[index, assignment]
171
171
  self.jobs = tuple(jobs)
172
172
 
@@ -188,10 +188,10 @@ class Flow(MSONable):
188
188
 
189
189
  def __sub__(self, other: Flow | Job) -> Flow:
190
190
  """Remove a job or subflow from the flow."""
191
- if other not in self.jobs:
191
+ if other not in self:
192
192
  raise ValueError(f"{other!r} not found in flow")
193
193
  new_flow = deepcopy(self)
194
- new_flow.jobs = tuple([job for job in new_flow.jobs if job != other])
194
+ new_flow.jobs = tuple([job for job in new_flow if job != other])
195
195
  return new_flow
196
196
 
197
197
  def __repr__(self, level: int = 0, prefix: str = "") -> str:
@@ -201,8 +201,8 @@ class Flow(MSONable):
201
201
  _prefix = f"{prefix}." if prefix else ""
202
202
  job_reprs = "\n".join(
203
203
  f"{indent}{_prefix}{i}. "
204
- f"{j.__repr__(level + 1, f'{_prefix}{i}') if isinstance(j, Flow) else j}"
205
- for i, j in enumerate(self.jobs, 1)
204
+ f"{jb.__repr__(level + 1, f'{_prefix}{i}') if isinstance(jb, Flow) else jb}"
205
+ for i, jb in enumerate(self, 1)
206
206
  )
207
207
  return f"Flow({name=}, {uuid=})\n{job_reprs}"
208
208
 
@@ -298,7 +298,7 @@ class Flow(MSONable):
298
298
  The uuids of all Jobs in the Flow (including nested Flows).
299
299
  """
300
300
  uuids: list[str] = []
301
- for job in self.jobs:
301
+ for job in self:
302
302
  if isinstance(job, Flow):
303
303
  uuids.extend(job.job_uuids)
304
304
  else:
@@ -316,7 +316,7 @@ class Flow(MSONable):
316
316
  The uuids of all Jobs and Flows in the Flow (including nested Flows).
317
317
  """
318
318
  uuids: list[str] = []
319
- for job in self.jobs:
319
+ for job in self:
320
320
  if isinstance(job, Flow):
321
321
  uuids.extend(job.all_uuids)
322
322
  uuids.append(job.uuid)
@@ -336,7 +336,7 @@ class Flow(MSONable):
336
336
 
337
337
  import networkx as nx
338
338
 
339
- graph = nx.compose_all([job.graph for job in self.jobs])
339
+ graph = nx.compose_all([job.graph for job in self])
340
340
 
341
341
  for node in graph:
342
342
  node_props = graph.nodes[node]
@@ -346,7 +346,7 @@ class Flow(MSONable):
346
346
  if self.order == JobOrder.LINEAR:
347
347
  # add fake edges between jobs to force linear order
348
348
  edges = []
349
- for job_a, job_b in nx.utils.pairwise(self.jobs):
349
+ for job_a, job_b in nx.utils.pairwise(self):
350
350
  if isinstance(job_a, Flow):
351
351
  leaves = [v for v, d in job_a.graph.out_degree() if d == 0]
352
352
  else:
@@ -474,7 +474,7 @@ class Flow(MSONable):
474
474
  >>> flow.update_kwargs({"number": 10}, name_filter="add")
475
475
  >>> flow.update_kwargs({"number": 10}, function_filter=add)
476
476
  """
477
- for job in self.jobs:
477
+ for job in self:
478
478
  job.update_kwargs(
479
479
  update,
480
480
  name_filter=name_filter,
@@ -573,7 +573,7 @@ class Flow(MSONable):
573
573
  ... {"number": 10}, class_filter=AddMaker, nested=False
574
574
  ... )
575
575
  """
576
- for job in self.jobs:
576
+ for job in self:
577
577
  job.update_maker_kwargs(
578
578
  update,
579
579
  name_filter=name_filter,
@@ -598,7 +598,7 @@ class Flow(MSONable):
598
598
  else:
599
599
  self.name += append_str
600
600
 
601
- for job in self.jobs:
601
+ for job in self:
602
602
  job.append_name(append_str, prepend=prepend)
603
603
 
604
604
  def update_metadata(
@@ -647,7 +647,7 @@ class Flow(MSONable):
647
647
 
648
648
  >>> flow.update_metadata({"tag": "addition_job"})
649
649
  """
650
- for job in self.jobs:
650
+ for job in self:
651
651
  job.update_metadata(
652
652
  update,
653
653
  name_filter=name_filter,
@@ -717,7 +717,7 @@ class Flow(MSONable):
717
717
 
718
718
  >>> flow.update_config({"manager_config": {"_fworker": "myfworker"}})
719
719
  """
720
- for job in self.jobs:
720
+ for job in self:
721
721
  job.update_config(
722
722
  config,
723
723
  name_filter=name_filter,
@@ -756,8 +756,8 @@ class Flow(MSONable):
756
756
  self.hosts.extend(hosts_uuids)
757
757
  else:
758
758
  hosts_uuids = [self.uuid]
759
- for j in self.jobs:
760
- j.add_hosts_uuids(hosts_uuids, prepend=prepend)
759
+ for job in self:
760
+ job.add_hosts_uuids(hosts_uuids, prepend=prepend)
761
761
 
762
762
  def add_jobs(self, jobs: Job | Flow | Sequence[Flow | Job]) -> None:
763
763
  """
@@ -794,7 +794,8 @@ class Flow(MSONable):
794
794
  f"current Flow ({self.uuid})"
795
795
  )
796
796
  job_ids.add(job.uuid)
797
- job.add_hosts_uuids(hosts)
797
+ if job.host != self.uuid:
798
+ job.add_hosts_uuids(hosts)
798
799
  self._jobs += tuple(jobs)
799
800
 
800
801
  def remove_jobs(self, indices: int | list[int]):
@@ -810,12 +811,12 @@ class Flow(MSONable):
810
811
  """
811
812
  if not isinstance(indices, (list, tuple)):
812
813
  indices = [indices]
813
- if any(i < 0 or i >= len(self.jobs) for i in indices):
814
+ if any(idx < 0 or idx >= len(self) for idx in indices):
814
815
  raise ValueError(
815
816
  "Only indices between 0 and the number of the jobs are accepted"
816
817
  )
817
818
 
818
- new_jobs = tuple(j for i, j in enumerate(self.jobs) if i not in indices)
819
+ new_jobs = tuple(job for idx, job in enumerate(self) if idx not in indices)
819
820
  uuids: set = set()
820
821
  for job in new_jobs:
821
822
  if isinstance(job, Flow):
jobflow/core/job.py CHANGED
@@ -13,7 +13,8 @@ from jobflow.core.reference import OnMissing, OutputReference
13
13
  from jobflow.utils.uuid import suuid
14
14
 
15
15
  if typing.TYPE_CHECKING:
16
- from typing import Any, Callable, Hashable, Sequence
16
+ from collections.abc import Hashable, Sequence
17
+ from typing import Any, Callable
17
18
 
18
19
  from networkx import DiGraph
19
20
  from pydantic import BaseModel
@@ -22,8 +23,6 @@ if typing.TYPE_CHECKING:
22
23
 
23
24
  logger = logging.getLogger(__name__)
24
25
 
25
- __all__ = ["job", "Job", "Response", "JobConfig", "store_inputs"]
26
-
27
26
 
28
27
  @dataclass
29
28
  class JobConfig(MSONable):
@@ -560,6 +559,7 @@ class Job(MSONable):
560
559
 
561
560
  from jobflow import CURRENT_JOB
562
561
  from jobflow.core.flow import get_flow
562
+ from jobflow.core.schemas import JobStoreDocument
563
563
 
564
564
  index_str = f", {self.index}" if self.index != 1 else ""
565
565
  logger.info(f"Starting job - {self.name} ({self.uuid}{index_str})")
@@ -633,15 +633,15 @@ class Job(MSONable):
633
633
  ) from err
634
634
 
635
635
  save = {k: "output" if v is True else v for k, v in self._kwargs.items()}
636
- data = {
637
- "uuid": self.uuid,
638
- "index": self.index,
639
- "output": output,
640
- "completed_at": datetime.now().isoformat(),
641
- "metadata": self.metadata,
642
- "hosts": self.hosts,
643
- "name": self.name,
644
- }
636
+ data: JobStoreDocument = JobStoreDocument(
637
+ uuid=self.uuid,
638
+ index=self.index,
639
+ output=output,
640
+ completed_at=datetime.now().isoformat(),
641
+ metadata=self.metadata,
642
+ hosts=self.hosts,
643
+ name=self.name,
644
+ )
645
645
  store.update(data, key=["uuid", "index"], save=save)
646
646
 
647
647
  CURRENT_JOB.reset()
@@ -1321,6 +1321,7 @@ def prepare_replace(
1321
1321
  store_output_job.index = current_job.index + 1
1322
1322
  store_output_job.metadata = current_job.metadata
1323
1323
  store_output_job.output_schema = current_job.output_schema
1324
+ store_output_job._kwargs = current_job._kwargs
1324
1325
  replace.add_jobs(store_output_job)
1325
1326
 
1326
1327
  elif isinstance(replace, Job):
jobflow/core/maker.py CHANGED
@@ -12,8 +12,6 @@ if typing.TYPE_CHECKING:
12
12
 
13
13
  import jobflow
14
14
 
15
- __all__ = ["Maker"]
16
-
17
15
 
18
16
  @dataclass
19
17
  class Maker(MSONable):
jobflow/core/reference.py CHANGED
@@ -4,24 +4,18 @@ from __future__ import annotations
4
4
 
5
5
  import contextlib
6
6
  import typing
7
- from typing import Any, Sequence
7
+ from typing import Any
8
8
 
9
9
  from monty.json import MontyDecoder, MontyEncoder, MSONable, jsanitize
10
10
  from pydantic import BaseModel
11
- from pydantic.utils import lenient_issubclass
11
+ from pydantic.v1.utils import lenient_issubclass
12
12
 
13
13
  from jobflow.utils.enum import ValueEnum
14
14
 
15
15
  if typing.TYPE_CHECKING:
16
- import jobflow
16
+ from collections.abc import Sequence
17
17
 
18
- __all__ = [
19
- "OnMissing",
20
- "OutputReference",
21
- "resolve_references",
22
- "find_and_resolve_references",
23
- "find_and_get_references",
24
- ]
18
+ import jobflow
25
19
 
26
20
 
27
21
  class OnMissing(ValueEnum):
@@ -95,14 +89,14 @@ class OutputReference(MSONable):
95
89
  uuid: str,
96
90
  attributes: tuple[tuple[str, Any], ...] = (),
97
91
  output_schema: type[BaseModel] = None,
98
- ):
92
+ ) -> None:
99
93
  super().__init__()
100
94
  self.uuid = uuid
101
95
  self.attributes = attributes
102
96
  self.output_schema = output_schema
103
97
 
104
98
  for attr_type, attr in attributes:
105
- if attr_type not in ("a", "i"):
99
+ if attr_type not in {"a", "i"}:
106
100
  raise ValueError(
107
101
  f"Unrecognised attribute type '{attr_type}' for attribute '{attr}'"
108
102
  )
@@ -165,11 +159,12 @@ class OutputReference(MSONable):
165
159
  if on_missing == OnMissing.ERROR and index not in cache[self.uuid]:
166
160
  istr = f" ({index})" if index is not None else ""
167
161
  raise ValueError(
168
- f"Could not resolve reference - {self.uuid}{istr} not in store or cache"
162
+ f"Could not resolve reference - {self.uuid}{istr} not in store or "
163
+ f"{index=}, {cache=}"
169
164
  )
170
- elif on_missing == OnMissing.NONE and index not in cache[self.uuid]:
165
+ if on_missing == OnMissing.NONE and index not in cache[self.uuid]:
171
166
  return None
172
- elif on_missing == OnMissing.PASS and index not in cache[self.uuid]:
167
+ if on_missing == OnMissing.PASS and index not in cache[self.uuid]:
173
168
  return self
174
169
 
175
170
  data = cache[self.uuid][index]
@@ -182,7 +177,11 @@ class OutputReference(MSONable):
182
177
 
183
178
  for attr_type, attr in self.attributes:
184
179
  # i means index else use attribute access
185
- data = data[attr] if attr_type == "i" else getattr(data, attr)
180
+ data = (
181
+ data[attr]
182
+ if attr_type == "i" or isinstance(data, dict)
183
+ else getattr(data, attr)
184
+ )
186
185
 
187
186
  return data
188
187
 
@@ -206,12 +205,11 @@ class OutputReference(MSONable):
206
205
  if inplace:
207
206
  self.uuid = uuid
208
207
  return self
209
- else:
210
- from copy import deepcopy
208
+ from copy import deepcopy
211
209
 
212
- new_reference = deepcopy(self)
213
- new_reference.uuid = uuid
214
- return new_reference
210
+ new_reference = deepcopy(self)
211
+ new_reference.uuid = uuid
212
+ return new_reference
215
213
 
216
214
  def __getitem__(self, item) -> OutputReference:
217
215
  """Index the reference."""
@@ -269,7 +267,7 @@ class OutputReference(MSONable):
269
267
  """Return a hash of the reference."""
270
268
  return hash(str(self))
271
269
 
272
- def __eq__(self, other: Any) -> bool:
270
+ def __eq__(self, other: object) -> bool:
273
271
  """Test for equality against another reference."""
274
272
  if isinstance(other, OutputReference):
275
273
  return (
@@ -291,7 +289,7 @@ class OutputReference(MSONable):
291
289
  """Serialize the reference as a dict."""
292
290
  schema = self.output_schema
293
291
  schema_dict = MontyEncoder().default(schema) if schema is not None else None
294
- data = {
292
+ return {
295
293
  "@module": self.__class__.__module__,
296
294
  "@class": type(self).__name__,
297
295
  "@version": None,
@@ -299,7 +297,6 @@ class OutputReference(MSONable):
299
297
  "attributes": self.attributes,
300
298
  "output_schema": schema_dict,
301
299
  }
302
- return data
303
300
 
304
301
 
305
302
  def resolve_references(
@@ -382,7 +379,7 @@ def find_and_get_references(arg: Any) -> tuple[OutputReference, ...]:
382
379
  # if the argument is a reference then stop there
383
380
  return (arg,)
384
381
 
385
- elif isinstance(arg, (float, int, str, bool)):
382
+ if isinstance(arg, (float, int, str, bool)):
386
383
  # argument is a primitive, we won't find a reference here
387
384
  return ()
388
385
 
@@ -438,7 +435,7 @@ def find_and_resolve_references(
438
435
  # if the argument is a reference then stop there
439
436
  return arg.resolve(store, cache=cache, on_missing=on_missing)
440
437
 
441
- elif isinstance(arg, (float, int, str, bool)):
438
+ if isinstance(arg, (float, int, str, bool)):
442
439
  # argument is a primitive, we won't find a reference here
443
440
  return arg
444
441
 
@@ -503,7 +500,7 @@ def validate_schema_access(
503
500
  raise AttributeError(f"{schema.__name__} does not have attribute '{item}'.")
504
501
 
505
502
  subschema = None
506
- item_type = schema.__fields__[item].outer_type_
503
+ item_type = schema.model_fields[item].annotation
507
504
  if lenient_issubclass(item_type, BaseModel):
508
505
  subschema = item_type
509
506
 
@@ -0,0 +1,34 @@
1
+ """A Pydantic model for Jobstore document."""
2
+
3
+ from typing import Any
4
+
5
+ from pydantic import BaseModel, Field
6
+
7
+
8
+ class JobStoreDocument(BaseModel):
9
+ """A Pydantic model for Jobstore document."""
10
+
11
+ uuid: str = Field(
12
+ None, description="An unique identifier for the job. Generated automatically."
13
+ )
14
+ index: int = Field(
15
+ None,
16
+ description="The index of the job (number of times the job has been replaced).",
17
+ )
18
+ output: Any = Field(
19
+ None,
20
+ description="This is a reference to the future job output.",
21
+ )
22
+ completed_at: str = Field(None, description="The time the job was completed.")
23
+ metadata: dict = Field(
24
+ None,
25
+ description="Metadata information supplied by the user.",
26
+ )
27
+ hosts: list[str] = Field(
28
+ None,
29
+ description="The list of UUIDs of the hosts containing the job.",
30
+ )
31
+ name: str = Field(
32
+ None,
33
+ description="The name of the job.",
34
+ )
jobflow/core/state.py CHANGED
@@ -15,12 +15,8 @@ import typing
15
15
  from monty.design_patterns import singleton
16
16
 
17
17
  if typing.TYPE_CHECKING:
18
- pass
19
-
20
18
  import jobflow
21
19
 
22
- __all__ = ["CURRENT_JOB"]
23
-
24
20
 
25
21
  @singleton
26
22
  class State:
jobflow/core/store.py CHANGED
@@ -11,17 +11,19 @@ from jobflow.core.reference import OnMissing
11
11
  from jobflow.utils.find import get_root_locations
12
12
 
13
13
  if typing.TYPE_CHECKING:
14
+ from collections.abc import Iterator
14
15
  from enum import Enum
15
16
  from pathlib import Path
16
- from typing import Any, Dict, Iterator, List, Optional, Type, Union
17
+ from typing import Any, Optional, Union
17
18
 
18
19
  from maggma.core import Sort
19
20
 
20
- obj_type = Union[str, Enum, Type[MSONable], List[Union[Enum, str, Type[MSONable]]]]
21
- save_type = Optional[Dict[str, obj_type]]
22
- load_type = Union[bool, Dict[str, Union[bool, obj_type]]]
21
+ from jobflow.core.schemas import JobStoreDocument
22
+
23
+ obj_type = Union[str, Enum, type[MSONable], list[Union[Enum, str, type[MSONable]]]]
24
+ save_type = Optional[dict[str, obj_type]]
25
+ load_type = Union[bool, dict[str, Union[bool, obj_type]]]
23
26
 
24
- __all__ = ["JobStore"]
25
27
 
26
28
  T = typing.TypeVar("T", bound="JobStore")
27
29
 
@@ -249,12 +251,11 @@ class JobStore(Store):
249
251
  docs = self.query(
250
252
  criteria=criteria, properties=properties, load=load, sort=sort, limit=1
251
253
  )
252
- d = next(docs, None)
253
- return d
254
+ return next(docs, None)
254
255
 
255
256
  def update(
256
257
  self,
257
- docs: list[dict] | dict,
258
+ docs: list[dict] | dict | JobStoreDocument | list[JobStoreDocument],
258
259
  key: list | str = None,
259
260
  save: bool | save_type = None,
260
261
  ):
@@ -264,7 +265,7 @@ class JobStore(Store):
264
265
  Parameters
265
266
  ----------
266
267
  docs
267
- The document or list of documents to update.
268
+ The Pydantic document or list of Pydantic documents to update.
268
269
  key
269
270
  Field name(s) to determine uniqueness for a document, can be a list of
270
271
  multiple fields, a single field, or None if the Store's key field is to
@@ -495,7 +496,7 @@ class JobStore(Store):
495
496
  # this could be fixed but will require more complicated logic just to
496
497
  # catch a very unlikely event.
497
498
 
498
- if isinstance(which, int) or which in ("last", "first"):
499
+ if isinstance(which, int) or which in {"last", "first"}:
499
500
  sort = -1 if which == "last" else 1
500
501
 
501
502
  criteria: dict[str, Any] = {"uuid": uuid}
@@ -521,28 +522,27 @@ class JobStore(Store):
521
522
  return find_and_resolve_references(
522
523
  result["output"], self, cache=cache, on_missing=on_missing
523
524
  )
524
- else:
525
- results = list(
526
- self.query(
527
- criteria={"uuid": uuid},
528
- properties=["output"],
529
- sort={"index": 1},
530
- load=load,
531
- )
525
+ results = list(
526
+ self.query(
527
+ criteria={"uuid": uuid},
528
+ properties=["output"],
529
+ sort={"index": 1},
530
+ load=load,
532
531
  )
532
+ )
533
533
 
534
- if len(results) == 0:
535
- raise ValueError(f"UUID: {uuid} has no outputs.")
534
+ if len(results) == 0:
535
+ raise ValueError(f"UUID: {uuid} has no outputs.")
536
536
 
537
- results = [r["output"] for r in results]
537
+ results = [r["output"] for r in results]
538
538
 
539
- refs = find_and_get_references(results)
540
- if any(ref.uuid == uuid for ref in refs):
541
- raise RuntimeError("Reference cycle detected - aborting.")
539
+ refs = find_and_get_references(results)
540
+ if any(ref.uuid == uuid for ref in refs):
541
+ raise RuntimeError("Reference cycle detected - aborting.")
542
542
 
543
- return find_and_resolve_references(
544
- results, self, cache=cache, on_missing=on_missing
545
- )
543
+ return find_and_resolve_references(
544
+ results, self, cache=cache, on_missing=on_missing
545
+ )
546
546
 
547
547
  @classmethod
548
548
  def from_file(cls: type[T], db_file: str | Path, **kwargs) -> T:
@@ -661,6 +661,10 @@ class JobStore(Store):
661
661
 
662
662
  all_stores = {s.__name__: s for s in all_subclasses(maggma.stores.Store)}
663
663
 
664
+ # add ssh tunnel support
665
+ tunnel = maggma.stores.ssh_tunnel.SSHTunnel
666
+ all_stores[tunnel.__name__] = tunnel
667
+
664
668
  docs_store_info = spec["docs_store"]
665
669
  docs_store = _construct_store(docs_store_info, all_stores)
666
670
 
@@ -760,7 +764,7 @@ def _filter_blobs(
760
764
 
761
765
  new_blobs = []
762
766
  new_locations = []
763
- for _store_name, store_load in load.items():
767
+ for store_load in load.values():
764
768
  for blob, location in zip(blob_infos, locations):
765
769
  if store_load is True:
766
770
  new_blobs.append(blob)
@@ -7,11 +7,10 @@ import typing
7
7
  from fireworks import FiretaskBase, Firework, FWAction, Workflow, explicit_serialize
8
8
 
9
9
  if typing.TYPE_CHECKING:
10
- from typing import Sequence
10
+ from collections.abc import Sequence
11
11
 
12
12
  import jobflow
13
-
14
- __all__ = ["flow_to_workflow", "job_to_firework", "JobFiretask"]
13
+ from jobflow.core.job import Job
15
14
 
16
15
 
17
16
  def flow_to_workflow(
@@ -148,7 +147,6 @@ class JobFiretask(FiretaskBase):
148
147
  def run_task(self, fw_spec):
149
148
  """Run the job and handle any dynamic firework submissions."""
150
149
  from jobflow import SETTINGS, initialize_logger
151
- from jobflow.core.job import Job
152
150
 
153
151
  job: Job = self.get("job")
154
152
  store = self.get("store")
@@ -192,11 +190,10 @@ class JobFiretask(FiretaskBase):
192
190
  else:
193
191
  detours = [detour_wf]
194
192
 
195
- fwa = FWAction(
193
+ return FWAction(
196
194
  stored_data=response.stored_data,
197
195
  detours=detours,
198
196
  additions=additions,
199
197
  defuse_workflow=response.stop_jobflow,
200
198
  defuse_children=response.stop_children,
201
199
  )
202
- return fwa
jobflow/managers/local.py CHANGED
@@ -6,12 +6,10 @@ import logging
6
6
  import typing
7
7
 
8
8
  if typing.TYPE_CHECKING:
9
- pass
9
+ from pathlib import Path
10
10
 
11
11
  import jobflow
12
12
 
13
- __all__ = ["run_locally"]
14
-
15
13
  logger = logging.getLogger(__name__)
16
14
 
17
15
 
@@ -20,6 +18,7 @@ def run_locally(
20
18
  log: bool = True,
21
19
  store: jobflow.JobStore = None,
22
20
  create_folders: bool = False,
21
+ root_dir: str | Path | None = None,
23
22
  ensure_success: bool = False,
24
23
  allow_external_references: bool = False,
25
24
  ) -> dict[str, dict[int, jobflow.Response]]:
@@ -28,25 +27,29 @@ def run_locally(
28
27
 
29
28
  Parameters
30
29
  ----------
31
- flow
30
+ flow : Flow | Job | list[Job]
32
31
  A job or flow.
33
- log
32
+ log : bool
34
33
  Whether to print log messages.
35
- store
34
+ store : JobStore
36
35
  A job store. If a job store is not specified then
37
36
  :obj:`JobflowSettings.JOB_STORE` will be used. By default this is a maggma
38
37
  ``MemoryStore`` but can be customised by setting the jobflow configuration file.
39
- create_folders
38
+ create_folders : bool
40
39
  Whether to run each job in a new folder.
41
- ensure_success
40
+ root_dir : str | Path | None
41
+ The root directory to run the jobs in or where to create new subfolders if
42
+ ``create_folders`` is True. If None then the current working
43
+ directory will be used.
44
+ ensure_success : bool
42
45
  Raise an error if the flow was not executed successfully.
43
- allow_external_references
46
+ allow_external_references : bool
44
47
  If False all the references to other outputs should be from other Jobs
45
48
  of the Flow.
46
49
 
47
50
  Returns
48
51
  -------
49
- Dict[str, Dict[int, Response]]
52
+ dict[str, dict[int, Response]]
50
53
  The responses of the jobs, as a dict of ``{uuid: {index: response}}``.
51
54
  """
52
55
  from collections import defaultdict
@@ -63,6 +66,9 @@ def run_locally(
63
66
  if store is None:
64
67
  store = SETTINGS.JOB_STORE
65
68
 
69
+ root_dir = Path.cwd() if root_dir is None else Path(root_dir).resolve()
70
+ root_dir.mkdir(exist_ok=True)
71
+
66
72
  store.connect()
67
73
 
68
74
  if log:
@@ -75,13 +81,11 @@ def run_locally(
75
81
  responses: dict[str, dict[int, jobflow.Response]] = defaultdict(dict)
76
82
  stop_jobflow = False
77
83
 
78
- root_dir = Path.cwd()
79
-
80
84
  def _run_job(job: jobflow.Job, parents):
81
85
  nonlocal stop_jobflow
82
86
 
83
87
  if stop_jobflow:
84
- return False
88
+ return None, True
85
89
 
86
90
  if len(set(parents).intersection(stopped_parents)) > 0:
87
91
  # stop children has been called for one of the jobs' parents
@@ -89,14 +93,14 @@ def run_locally(
89
93
  f"{job.name} is a child of a job with stop_children=True, skipping..."
90
94
  )
91
95
  stopped_parents.add(job.uuid)
92
- return
96
+ return None, False
93
97
 
94
98
  if (
95
99
  len(set(parents).intersection(errored)) > 0
96
100
  and job.config.on_missing_references == OnMissing.ERROR
97
101
  ):
98
102
  errored.add(job.uuid)
99
- return
103
+ return None, False
100
104
 
101
105
  try:
102
106
  response = job.run(store=store)
@@ -105,7 +109,7 @@ def run_locally(
105
109
 
106
110
  logger.info(f"{job.name} failed with exception:\n{traceback.format_exc()}")
107
111
  errored.add(job.uuid)
108
- return
112
+ return None, False
109
113
 
110
114
  responses[job.uuid][job.index] = response
111
115
 
@@ -117,21 +121,24 @@ def run_locally(
117
121
 
118
122
  if response.stop_jobflow:
119
123
  stop_jobflow = True
120
- return False
124
+ return None, True
121
125
 
126
+ diversion_responses = []
122
127
  if response.replace is not None:
123
128
  # first run any restarts
124
- _run(response.replace)
129
+ diversion_responses.append(_run(response.replace))
125
130
 
126
131
  if response.detour is not None:
127
132
  # next any detours
128
- _run(response.detour)
133
+ diversion_responses.append(_run(response.detour))
129
134
 
130
135
  if response.addition is not None:
131
136
  # finally any additions
132
- _run(response.addition)
137
+ diversion_responses.append(_run(response.addition))
133
138
 
134
- return response
139
+ if not all(diversion_responses):
140
+ return None, False
141
+ return response, False
135
142
 
136
143
  def _get_job_dir():
137
144
  if create_folders:
@@ -139,19 +146,20 @@ def run_locally(
139
146
  job_dir = root_dir / f"job_{time_now}-{randint(10000, 99999)}"
140
147
  job_dir.mkdir()
141
148
  return job_dir
142
- else:
143
- return root_dir
149
+ return root_dir
144
150
 
145
151
  def _run(root_flow):
146
- job: jobflow.Job
152
+ encountered_bad_response = False
147
153
  for job, parents in root_flow.iterflow():
148
154
  job_dir = _get_job_dir()
149
155
  with cd(job_dir):
150
- response = _run_job(job, parents)
151
- if response is False:
156
+ response, jobflow_stopped = _run_job(job, parents)
157
+
158
+ encountered_bad_response = encountered_bad_response or response is None
159
+ if jobflow_stopped is True:
152
160
  return False
153
161
 
154
- return response is not None
162
+ return not encountered_bad_response
155
163
 
156
164
  logger.info("Started executing jobs locally")
157
165
  finished_successfully = _run(flow)
jobflow/settings.py CHANGED
@@ -1,17 +1,17 @@
1
1
  """Settings for jobflow."""
2
2
 
3
+ import warnings
3
4
  from collections import defaultdict
4
5
  from pathlib import Path
5
6
 
6
7
  from maggma.stores import MemoryStore
7
- from pydantic import BaseSettings, Field, root_validator
8
+ from pydantic import Field, model_validator
9
+ from pydantic_settings import BaseSettings, SettingsConfigDict
8
10
 
9
11
  from jobflow import JobStore
10
12
 
11
13
  DEFAULT_CONFIG_FILE_PATH = Path("~/.jobflow.yaml").expanduser().as_posix()
12
14
 
13
- __all__ = ["JobflowSettings"]
14
-
15
15
 
16
16
  def _default_additional_store():
17
17
  """Create a default MemoryStore and connect it.
@@ -117,13 +117,10 @@ class JobflowSettings(BaseSettings):
117
117
  "%Y-%m-%d-%H-%M-%S-%f",
118
118
  description="Date stamp format used to create directories",
119
119
  )
120
+ model_config = SettingsConfigDict(env_prefix="jobflow_")
120
121
 
121
- class Config:
122
- """Pydantic config settings."""
123
-
124
- env_prefix = "jobflow_"
125
-
126
- @root_validator(pre=True)
122
+ @model_validator(mode="before")
123
+ @classmethod
127
124
  def load_default_settings(cls, values):
128
125
  """
129
126
  Load settings from file or environment variables.
@@ -136,10 +133,21 @@ class JobflowSettings(BaseSettings):
136
133
  from monty.serialization import loadfn
137
134
 
138
135
  config_file_path: str = values.get("CONFIG_FILE", DEFAULT_CONFIG_FILE_PATH)
139
-
140
136
  new_values = {}
141
137
  if Path(config_file_path).exists():
142
- new_values.update(loadfn(config_file_path))
138
+ if Path(config_file_path).stat().st_size == 0:
139
+ warnings.warn(
140
+ f"An empty JobFlow config file was located at {config_file_path}"
141
+ )
142
+ else:
143
+ try:
144
+ new_values.update(loadfn(config_file_path))
145
+ except ValueError:
146
+ raise ValueError(
147
+ f"A JobFlow configuration file was located at "
148
+ f"{config_file_path} but a problem was "
149
+ f"encountered while parsing it."
150
+ ) from None
143
151
 
144
152
  store = new_values.get("JOB_STORE")
145
153
  if isinstance(store, str):
@@ -21,9 +21,6 @@ if typing.TYPE_CHECKING:
21
21
  from typing import Any
22
22
 
23
23
 
24
- __all__ = ["DictMods", "apply_mod"]
25
-
26
-
27
24
  class DictMods:
28
25
  """
29
26
  Class to define mongo-like modifications on a dict.
jobflow/utils/enum.py CHANGED
@@ -2,8 +2,6 @@
2
2
 
3
3
  from enum import Enum
4
4
 
5
- __all__ = ["ValueEnum"]
6
-
7
5
 
8
6
  class ValueEnum(Enum):
9
7
  """Enum that serializes to string as the value and can be compared against a str."""
@@ -16,8 +14,7 @@ class ValueEnum(Enum):
16
14
  """Compare to another enum for equality."""
17
15
  if type(self) == type(other) and self.value == other.value:
18
16
  return True
19
- else:
20
- return str(self.value) == str(other)
17
+ return str(self.value) == str(other)
21
18
 
22
19
  def __hash__(self):
23
20
  """Get a hash of the enum."""
jobflow/utils/find.py CHANGED
@@ -5,17 +5,11 @@ from __future__ import annotations
5
5
  import typing
6
6
 
7
7
  if typing.TYPE_CHECKING:
8
- from typing import Any, Hashable
8
+ from collections.abc import Hashable
9
+ from typing import Any
9
10
 
10
11
  from monty.json import MSONable
11
12
 
12
- __all__ = [
13
- "find_key",
14
- "find_key_value",
15
- "update_in_dictionary",
16
- "contains_flow_or_job",
17
- ]
18
-
19
13
 
20
14
  def find_key(
21
15
  d: dict[Hashable, Any] | list[Any],
@@ -78,10 +72,8 @@ def find_key(
78
72
  if (
79
73
  inspect.isclass(key)
80
74
  and issubclass(key, MSONable)
81
- and "@module" in obj
82
- and obj["@module"] == key.__module__
83
- and "@class" in obj
84
- and obj["@class"] == key.__name__
75
+ and obj.get("@module") == key.__module__
76
+ and obj.get("@class") == key.__name__
85
77
  ):
86
78
  found_items.add(path)
87
79
  found = True
@@ -209,7 +201,7 @@ def contains_flow_or_job(obj: Any) -> bool:
209
201
  # if the argument is an flow or job then stop there
210
202
  return True
211
203
 
212
- elif isinstance(obj, (float, int, str, bool)):
204
+ if isinstance(obj, (float, int, str, bool)):
213
205
  # argument is a primitive, we won't find an flow or job here
214
206
  return False
215
207
 
jobflow/utils/graph.py CHANGED
@@ -8,19 +8,15 @@ import networkx as nx
8
8
  from monty.dev import requires
9
9
 
10
10
  try:
11
- import matplotlib
11
+ import matplotlib as mpl
12
12
  except ImportError:
13
- matplotlib = None
13
+ mpl = None
14
14
 
15
15
  import typing
16
16
 
17
17
  if typing.TYPE_CHECKING:
18
- pass
19
-
20
18
  import jobflow
21
19
 
22
- __all__ = ["itergraph", "draw_graph", "to_pydot", "to_mermaid"]
23
-
24
20
 
25
21
  def itergraph(graph: nx.DiGraph):
26
22
  """
@@ -56,7 +52,7 @@ def itergraph(graph: nx.DiGraph):
56
52
  yield from nx.topological_sort(subgraph)
57
53
 
58
54
 
59
- @requires(matplotlib, "matplotlib must be installed to plot flow graphs.")
55
+ @requires(mpl, "matplotlib must be installed to plot flow graphs.")
60
56
  def draw_graph(
61
57
  graph: nx.DiGraph,
62
58
  layout_function: typing.Callable = None,
@@ -155,20 +151,20 @@ def to_pydot(flow: jobflow.Flow):
155
151
  nx_graph = flow.graph
156
152
  pydot_graph = pydot.Dot(f'"{flow.name}"', graph_type="digraph")
157
153
 
158
- for n, nodedata in nx_graph.nodes(data=True):
159
- str_nodedata = {k: str(v) for k, v in nodedata.items()}
160
- p = pydot.Node(str(n), **str_nodedata)
154
+ for n, node_data in nx_graph.nodes(data=True):
155
+ str_node_data = {k: str(v) for k, v in node_data.items()}
156
+ p = pydot.Node(str(n), **str_node_data)
161
157
  pydot_graph.add_node(p)
162
158
 
163
- for u, v, edgedata in nx_graph.edges(data=True):
164
- str_edgedata = {k: str(v) for k, v in edgedata.items()}
165
- edge = pydot.Edge(str(u), str(v), label=str_edgedata["properties"])
159
+ for u, v, edge_data in nx_graph.edges(data=True):
160
+ str_edge_data = {k: str(v) for k, v in edge_data.items()}
161
+ edge = pydot.Edge(str(u), str(v), label=str_edge_data["properties"])
166
162
  pydot_graph.add_edge(edge)
167
163
 
168
164
  def add_cluster(nested_flow, outer_graph):
169
165
  cluster = pydot.Cluster(nested_flow.uuid)
170
166
  cluster.set_label(nested_flow.name)
171
- for job in nested_flow.jobs:
167
+ for job in nested_flow:
172
168
  if isinstance(job, Flow):
173
169
  add_cluster(job, cluster)
174
170
  else:
@@ -236,7 +232,7 @@ def to_mermaid(flow: jobflow.Flow | jobflow.Job, show_flow_boxes: bool = False)
236
232
  def add_subgraph(nested_flow, indent_level=1):
237
233
  prefix = " " * indent_level
238
234
 
239
- for job in nested_flow.jobs:
235
+ for job in nested_flow:
240
236
  if isinstance(job, Flow):
241
237
  if show_flow_boxes:
242
238
  lines.append(f"{prefix}subgraph {job.uuid} [{job.name}]")
jobflow/utils/log.py CHANGED
@@ -2,8 +2,6 @@
2
2
 
3
3
  import logging
4
4
 
5
- __all__ = ["initialize_logger"]
6
-
7
5
 
8
6
  def initialize_logger(level: int = logging.INFO) -> logging.Logger:
9
7
  """Initialize the default logger.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: jobflow
3
- Version: 0.1.13
3
+ Version: 0.1.15
4
4
  Summary: jobflow is a library for writing computational workflows
5
5
  Author-email: Alex Ganose <alexganose@gmail.com>
6
6
  License: modified BSD
@@ -9,64 +9,67 @@ Project-URL: repository, https://github.com/materialsproject/jobflow
9
9
  Project-URL: documentation, https://materialsproject.github.io/jobflow/
10
10
  Project-URL: changelog, https://github.com/materialsproject/jobflow/blob/main/CHANGELOG.md
11
11
  Keywords: high-throughput,workflow
12
- Classifier: Development Status :: 2 - Pre-Alpha
12
+ Classifier: Development Status :: 5 - Production/Stable
13
13
  Classifier: Intended Audience :: Information Technology
14
14
  Classifier: Intended Audience :: Science/Research
15
15
  Classifier: Intended Audience :: System Administrators
16
16
  Classifier: Operating System :: OS Independent
17
17
  Classifier: Programming Language :: Python :: 3
18
18
  Classifier: Programming Language :: Python :: 3.10
19
- Classifier: Programming Language :: Python :: 3.8
19
+ Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.9
21
21
  Classifier: Topic :: Database :: Front-Ends
22
22
  Classifier: Topic :: Other/Nonlisted Topic
23
23
  Classifier: Topic :: Scientific/Engineering
24
- Requires-Python: >=3.8
24
+ Requires-Python: >=3.9
25
25
  Description-Content-Type: text/markdown
26
26
  License-File: LICENSE
27
27
  Requires-Dist: PyYAML
28
- Requires-Dist: maggma >=0.38.1
29
- Requires-Dist: monty >=2021.5.9
28
+ Requires-Dist: maggma >=0.57.0
29
+ Requires-Dist: monty >=2023.9.25
30
30
  Requires-Dist: networkx
31
- Requires-Dist: pydantic
31
+ Requires-Dist: pydantic-settings >=2.0.3
32
+ Requires-Dist: pydantic >=2.0.1
32
33
  Requires-Dist: pydash
33
34
  Provides-Extra: dev
34
35
  Requires-Dist: pre-commit >=2.12.1 ; extra == 'dev'
35
36
  Provides-Extra: docs
36
- Requires-Dist: autodoc-pydantic ==1.9.0 ; extra == 'docs'
37
- Requires-Dist: furo ==2023.7.26 ; extra == 'docs'
38
- Requires-Dist: ipython ==8.14.0 ; extra == 'docs'
37
+ Requires-Dist: autodoc-pydantic ==2.0.1 ; extra == 'docs'
38
+ Requires-Dist: furo ==2023.9.10 ; extra == 'docs'
39
+ Requires-Dist: ipython ==8.18.1 ; extra == 'docs'
39
40
  Requires-Dist: myst-parser ==2.0.0 ; extra == 'docs'
40
- Requires-Dist: nbsphinx ==0.9.2 ; extra == 'docs'
41
+ Requires-Dist: nbsphinx ==0.9.3 ; extra == 'docs'
41
42
  Requires-Dist: sphinx-copybutton ==0.5.2 ; extra == 'docs'
42
- Requires-Dist: sphinx ==7.1.2 ; extra == 'docs'
43
+ Requires-Dist: sphinx ==7.2.6 ; extra == 'docs'
43
44
  Provides-Extra: fireworks
44
45
  Requires-Dist: FireWorks ; extra == 'fireworks'
45
46
  Provides-Extra: strict
46
47
  Requires-Dist: FireWorks ==2.0.3 ; extra == 'strict'
47
48
  Requires-Dist: PyYAML ==6.0.1 ; extra == 'strict'
48
- Requires-Dist: maggma ==0.53.0 ; extra == 'strict'
49
- Requires-Dist: matplotlib ==3.7.2 ; extra == 'strict'
50
- Requires-Dist: monty ==2023.8.8 ; extra == 'strict'
51
- Requires-Dist: moto ==4.1.14 ; extra == 'strict'
52
- Requires-Dist: networkx ==3.1 ; extra == 'strict'
53
- Requires-Dist: pydantic ==1.10.9 ; extra == 'strict'
49
+ Requires-Dist: maggma ==0.58.0 ; extra == 'strict'
50
+ Requires-Dist: matplotlib ==3.8.2 ; extra == 'strict'
51
+ Requires-Dist: monty ==2023.11.3 ; extra == 'strict'
52
+ Requires-Dist: moto ==4.2.11 ; extra == 'strict'
53
+ Requires-Dist: networkx ==3.2.1 ; extra == 'strict'
54
+ Requires-Dist: pydantic-settings ==2.1.0 ; extra == 'strict'
55
+ Requires-Dist: pydantic ==2.5.2 ; extra == 'strict'
54
56
  Requires-Dist: pydash ==7.0.6 ; extra == 'strict'
55
57
  Requires-Dist: pydot ==1.4.2 ; extra == 'strict'
56
- Requires-Dist: typing-extensions ==4.7.1 ; extra == 'strict'
58
+ Requires-Dist: typing-extensions ==4.8.0 ; extra == 'strict'
57
59
  Provides-Extra: tests
60
+ Requires-Dist: moto ==4.2.11 ; extra == 'tests'
58
61
  Requires-Dist: pytest-cov ==4.1.0 ; extra == 'tests'
59
- Requires-Dist: pytest ==7.4.0 ; extra == 'tests'
62
+ Requires-Dist: pytest ==7.4.3 ; extra == 'tests'
60
63
  Provides-Extra: vis
61
64
  Requires-Dist: matplotlib ; extra == 'vis'
62
65
  Requires-Dist: pydot ; extra == 'vis'
63
66
 
64
67
  # jobflow
65
68
 
66
- <a href="https://github.com/materialsproject/jobflow/actions?query=workflow%3Atesting"><img alt="code coverage" src="https://img.shields.io/github/actions/workflow/status/materialsproject/jobflow/testing.yml?branch=main&label=tests"></a>
67
- <a href="https://codecov.io/gh/materialsproject/jobflow/"><img alt="code coverage" src="https://img.shields.io/codecov/c/gh/materialsproject/jobflow/main"></a>
68
- <a href="https://pypi.org/project/jobflow"><img alt="pypi version" src="https://img.shields.io/pypi/v/jobflow?color=blue"></a>
69
- <img alt="supported python versions" src="https://img.shields.io/pypi/pyversions/jobflow">
69
+ [![tests](https://img.shields.io/github/actions/workflow/status/materialsproject/jobflow/testing.yml?branch=main&label=tests)](https://github.com/materialsproject/jobflow/actions?query=workflow%3Atesting)
70
+ [![code coverage](https://img.shields.io/codecov/c/gh/materialsproject/jobflow/main)](https://codecov.io/gh/materialsproject/jobflow/)
71
+ [![pypi version](https://img.shields.io/pypi/v/jobflow?color=blue)](https://pypi.org/project/jobflow/)
72
+ ![supported python versions](https://img.shields.io/pypi/pyversions/jobflow)
70
73
 
71
74
  [Documentation](https://materialsproject.github.io/jobflow/) | [PyPI](https://pypi.org/project/jobflow/) | [GitHub](https://github.com/materialsproject/jobflow)
72
75
 
@@ -139,7 +142,7 @@ the jobs is determined automatically and can be visualised using the flow graph.
139
142
 
140
143
  ## Installation
141
144
 
142
- The jobflow is a Python 3.8+ library and can be installed using pip.
145
+ `jobflow` is a Python 3.9+ library and can be installed using `pip`.
143
146
 
144
147
  ```bash
145
148
  pip install jobflow
@@ -0,0 +1,27 @@
1
+ jobflow/__init__.py,sha256=l7o10BaqEQWw5aZziWRg40PsIAgQ4lrlluXs9hIv2mg,570
2
+ jobflow/_version.py,sha256=Ym07PBD7sAmpqVpX8tuzWma3P_Hv6KXbDKXWkw8OwaI,205
3
+ jobflow/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ jobflow/settings.py,sha256=e_8QwC05MUkDRenAutnqg8eAy7pQ-firtmCob7BKA3g,5474
5
+ jobflow/core/__init__.py,sha256=3sx5t1Gysejc4c_fPrhvCjPUg0p_384Zko8ms2c_NnY,98
6
+ jobflow/core/flow.py,sha256=c_rG9lzOQ00ckzN40gDHLm5CCvFlAeVE75kKLvvdJ2U,28879
7
+ jobflow/core/job.py,sha256=bPZMcmiW-Qqs-qNnlypbwZCgS_IN9kM0sybYw3koXOw,46466
8
+ jobflow/core/maker.py,sha256=DKfFXe91v9rRFtgUrm8FMiEcLArmb6jnK7nysuDdZts,11185
9
+ jobflow/core/reference.py,sha256=x6RXt-yxbQHoea2gb5SR4GWBVD1U2LaNwPxUGPdvGZo,16252
10
+ jobflow/core/schemas.py,sha256=Oi5-PnZpI8S9jSY7Q4f8H7xUybbRZDXlgugeVewVsrA,968
11
+ jobflow/core/state.py,sha256=IGJTtmpotDKEcgDEnsT5x20ZeyvQT68Mr3teTjkgYnM,709
12
+ jobflow/core/store.py,sha256=Bdm92-NqCPpID183DV9F9pMnG9PQLMpFOKUJkbzccNo,26982
13
+ jobflow/managers/__init__.py,sha256=KkA5cVDe2os2_2aTa8eiB9SnkGLZNybcci-Lo4tbaWM,55
14
+ jobflow/managers/fireworks.py,sha256=5IKDkE-dppvbhDWTfJKCMmqvxg50zBgCqm6qUqsVZtc,6654
15
+ jobflow/managers/local.py,sha256=J2GHodXMqryME0EfccJTGgbjXuuwUmtrVA9RnCkDcy8,5355
16
+ jobflow/utils/__init__.py,sha256=meuvfuk05U594rx4YB6BoBnoQxBMjCA2hKX3TSfZsB8,328
17
+ jobflow/utils/dict_mods.py,sha256=g50aMw-mK3RjXp_hHJBR9xUaWRYXoqqmPTMCPDDluz4,6052
18
+ jobflow/utils/enum.py,sha256=rFDdqQr-844Vzj9G9vuzPTWAJG60pQVNzgjyugkNwtc,713
19
+ jobflow/utils/find.py,sha256=Qaxh0TxQtXznK4Wy72klLFfFB3NUfUI4TF6-NEAzxcU,6162
20
+ jobflow/utils/graph.py,sha256=CNZNlUPmkVKio7m9Y3nifM9BbXe8vtvFHuXRwJu84R0,6562
21
+ jobflow/utils/log.py,sha256=tIMpsI4JTlkpxjBZfWqZ0qkEkIxk1-RBasz8JhDcF7E,692
22
+ jobflow/utils/uuid.py,sha256=lVgo8e8gUB7HLSR0H_9uZH-OPkVBaOT39atAnNKYAaI,268
23
+ jobflow-0.1.15.dist-info/LICENSE,sha256=jUEiENfZNQZh9RE9ixtUWgVkLRD85ScZ6iv1WREf19w,2418
24
+ jobflow-0.1.15.dist-info/METADATA,sha256=hFAXrVdF_OJfoi33zxO1DeIbMT9Ky-DJpy5xsES0fMY,9050
25
+ jobflow-0.1.15.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
26
+ jobflow-0.1.15.dist-info/top_level.txt,sha256=IanNooU88OupQPDrWnT0rbL3E27P2wEy7Jsfx9_j8zc,8
27
+ jobflow-0.1.15.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.41.1)
2
+ Generator: bdist_wheel (0.42.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,26 +0,0 @@
1
- jobflow/__init__.py,sha256=l7o10BaqEQWw5aZziWRg40PsIAgQ4lrlluXs9hIv2mg,570
2
- jobflow/_version.py,sha256=ERuIgO0lzbMrVMSXNAE55GxFc5JHDdf1CW7GtoXJ67Q,206
3
- jobflow/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- jobflow/settings.py,sha256=JHyTq9AOzMjSN5ohpLeooMLM0ddO2nPMuHuOyH62X6E,4908
5
- jobflow/core/__init__.py,sha256=3sx5t1Gysejc4c_fPrhvCjPUg0p_384Zko8ms2c_NnY,98
6
- jobflow/core/flow.py,sha256=LqIzSFa3NuLCJLIj7LJeF4WaeR5fya0w24v3fGsR6IM,28907
7
- jobflow/core/job.py,sha256=15Y3VW8CiDdBN6YxuSk78HvxGnKKW2OSTLVE3AHdtAY,46376
8
- jobflow/core/maker.py,sha256=cdEH7JE-LOyenibj_6y6N0y1bFe0_xbphTz8toigzzI,11206
9
- jobflow/core/reference.py,sha256=i9vnM13Ax7_J-RXmXQuYTnacCsc6pNpbCPxI59RU3O0,16287
10
- jobflow/core/state.py,sha256=qhmrxZ-dWFQV1QZH-FcMAEajYl-Zxue5x8d8xQQO0r0,746
11
- jobflow/core/store.py,sha256=6pveNBfiBYN6Zpp-PM3GfNWWzH187aVfJ6pyrbQ0j-M,26852
12
- jobflow/managers/__init__.py,sha256=KkA5cVDe2os2_2aTa8eiB9SnkGLZNybcci-Lo4tbaWM,55
13
- jobflow/managers/fireworks.py,sha256=VEXsu2bghScl7fH7LQ-FbZOFLeFRCFRu_SRusmmHZ9A,6733
14
- jobflow/managers/local.py,sha256=rIqYYkF-HEmO2qc5sSbIhkSIA5zlpkH0sRV0mQAt348,4593
15
- jobflow/utils/__init__.py,sha256=meuvfuk05U594rx4YB6BoBnoQxBMjCA2hKX3TSfZsB8,328
16
- jobflow/utils/dict_mods.py,sha256=bJmHB-JHtwBwEpWroO1YaOac_AvCItoQ6GBWlWdDMtk,6090
17
- jobflow/utils/enum.py,sha256=mt8z0D2Nu2oQxZ1vXi6VTqorOMseRptSCjsgMNSvvOE,756
18
- jobflow/utils/find.py,sha256=dGnXKJ_VDmWKxMBtJo_Dnb9jd3JVcPu7HasZsd6-jmI,6307
19
- jobflow/utils/graph.py,sha256=61k0bA_MCqlkVtJ2ldwpH6Tx2cwXrE_NWveouh6zwTk,6646
20
- jobflow/utils/log.py,sha256=dNIOvhApCtW7z1OamLlUmg6TL4mDXOQ0yFQxD8AM0Lk,725
21
- jobflow/utils/uuid.py,sha256=lVgo8e8gUB7HLSR0H_9uZH-OPkVBaOT39atAnNKYAaI,268
22
- jobflow-0.1.13.dist-info/LICENSE,sha256=jUEiENfZNQZh9RE9ixtUWgVkLRD85ScZ6iv1WREf19w,2418
23
- jobflow-0.1.13.dist-info/METADATA,sha256=Aye0TI6H8FgmQgvSmTLXIUFPxwJ3p7D_z1DwCUbRuAM,8976
24
- jobflow-0.1.13.dist-info/WHEEL,sha256=5sUXSg9e4bi7lTLOHcm6QEYwO5TIF1TNbTSVFVjcJcc,92
25
- jobflow-0.1.13.dist-info/top_level.txt,sha256=IanNooU88OupQPDrWnT0rbL3E27P2wEy7Jsfx9_j8zc,8
26
- jobflow-0.1.13.dist-info/RECORD,,