python-workflow-definition 0.1.0__tar.gz → 0.1.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (19) hide show
  1. {python_workflow_definition-0.1.0 → python_workflow_definition-0.1.3}/.gitignore +2 -2
  2. python_workflow_definition-0.1.3/LICENSE +29 -0
  3. {python_workflow_definition-0.1.0 → python_workflow_definition-0.1.3}/PKG-INFO +12 -8
  4. python_workflow_definition-0.1.3/pyproject.toml +63 -0
  5. python_workflow_definition-0.1.3/src/python_workflow_definition/__init__.py +3 -0
  6. python_workflow_definition-0.1.3/src/python_workflow_definition/_version.py +34 -0
  7. {python_workflow_definition-0.1.0 → python_workflow_definition-0.1.3}/src/python_workflow_definition/aiida.py +30 -24
  8. {python_workflow_definition-0.1.0 → python_workflow_definition-0.1.3}/src/python_workflow_definition/executorlib.py +1 -1
  9. {python_workflow_definition-0.1.0 → python_workflow_definition-0.1.3}/src/python_workflow_definition/models.py +19 -2
  10. {python_workflow_definition-0.1.0 → python_workflow_definition-0.1.3}/src/python_workflow_definition/pyiron_base.py +21 -7
  11. python_workflow_definition-0.1.3/src/python_workflow_definition/pyiron_workflow.py +407 -0
  12. python_workflow_definition-0.1.0/pyproject.toml +0 -31
  13. python_workflow_definition-0.1.0/src/python_workflow_definition/__init__.py +0 -0
  14. {python_workflow_definition-0.1.0 → python_workflow_definition-0.1.3}/src/python_workflow_definition/cwl/__init__.py +0 -0
  15. {python_workflow_definition-0.1.0 → python_workflow_definition-0.1.3}/src/python_workflow_definition/cwl/__main__.py +0 -0
  16. {python_workflow_definition-0.1.0 → python_workflow_definition-0.1.3}/src/python_workflow_definition/jobflow.py +0 -0
  17. {python_workflow_definition-0.1.0 → python_workflow_definition-0.1.3}/src/python_workflow_definition/plot.py +0 -0
  18. {python_workflow_definition-0.1.0 → python_workflow_definition-0.1.3}/src/python_workflow_definition/purepython.py +0 -0
  19. {python_workflow_definition-0.1.0 → python_workflow_definition-0.1.3}/src/python_workflow_definition/shared.py +0 -0
@@ -178,7 +178,7 @@ cython_debug/
178
178
  input_tmp.in
179
179
  pyiron.log
180
180
  pyiron_draw.png
181
- python_workflow_definition/src/python_workflow_definition/__pycache__/
181
+ src/python_workflow_definition/__pycache__/
182
182
  test/
183
183
  mini/
184
184
  evcurve.png
@@ -199,4 +199,4 @@ jobflow_to_aiida_qe.json
199
199
  aiida_to_jobflow_qe.json
200
200
  pyiron_base_to_aiida_simple.json
201
201
  pyiron_base_to_jobflow_qe.json
202
-
202
+ **/*.h5
@@ -0,0 +1,29 @@
1
+ BSD 3-Clause License
2
+
3
+ Copyright (c) 2025, Jan Janssen
4
+ All rights reserved.
5
+
6
+ Redistribution and use in source and binary forms, with or without
7
+ modification, are permitted provided that the following conditions are met:
8
+
9
+ * Redistributions of source code must retain the above copyright notice, this
10
+ list of conditions and the following disclaimer.
11
+
12
+ * Redistributions in binary form must reproduce the above copyright notice,
13
+ this list of conditions and the following disclaimer in the documentation
14
+ and/or other materials provided with the distribution.
15
+
16
+ * Neither the name of the copyright holder nor the names of its
17
+ contributors may be used to endorse or promote products derived from
18
+ this software without specific prior written permission.
19
+
20
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: python_workflow_definition
3
- Version: 0.1.0
3
+ Version: 0.1.3
4
4
  Summary: Python Workflow Definition - workflow interoperability for aiida, jobflow and pyiron
5
5
  Author-email: Jan Janssen <janssen@mpie.de>, Janine George <janine.geogre@bam.de>, Julian Geiger <julian.geiger@psi.ch>, Xing Wang <xing.wang@psi.ch>, Marnik Bercx <marnik.bercx@psi.ch>, Christina Ertural <christina.ertural@bam.de>
6
6
  License: BSD 3-Clause License
@@ -32,12 +32,16 @@ License: BSD 3-Clause License
32
32
  CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
33
33
  OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
34
34
  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35
- Requires-Dist: aiida-workgraph<=0.5.2,>=0.5.1
36
- Requires-Dist: jobflow<=0.1.19,>=0.1.18
37
- Requires-Dist: numpy<2,>=1.21
38
- Requires-Dist: pydantic<=2.11.4,>=2.7.0
39
- Requires-Dist: pyiron-base<=0.11.11,>=0.11.10
35
+ License-File: LICENSE
36
+ Requires-Dist: numpy>=1.21
37
+ Requires-Dist: pydantic<=2.12.4,>=2.7.0
38
+ Provides-Extra: aiida
39
+ Requires-Dist: aiida-workgraph<=0.7.6,>=0.5.1; extra == 'aiida'
40
+ Provides-Extra: jobflow
41
+ Requires-Dist: jobflow<=0.2.1,>=0.1.18; extra == 'jobflow'
40
42
  Provides-Extra: plot
41
- Requires-Dist: ipython<=9.0.2,>=7.33.0; extra == 'plot'
42
- Requires-Dist: networkx<=3.4.2,>=2.8.8; extra == 'plot'
43
+ Requires-Dist: ipython<=9.8.0,>=7.33.0; extra == 'plot'
44
+ Requires-Dist: networkx<=3.5,>=2.8.8; extra == 'plot'
43
45
  Requires-Dist: pygraphviz<=1.14,>=1.10; extra == 'plot'
46
+ Provides-Extra: pyiron
47
+ Requires-Dist: pyiron-base<=0.15.12,>=0.11.10; extra == 'pyiron'
@@ -0,0 +1,63 @@
1
+ [build-system]
2
+ requires = ["hatchling"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [project]
6
+ name = "python_workflow_definition"
7
+ description = "Python Workflow Definition - workflow interoperability for aiida, jobflow and pyiron"
8
+ authors = [
9
+ { name = "Jan Janssen", email = "janssen@mpie.de" },
10
+ { name = "Janine George", email = "janine.geogre@bam.de" },
11
+ { name = "Julian Geiger", email = "julian.geiger@psi.ch" },
12
+ { name = "Xing Wang", email = "xing.wang@psi.ch" },
13
+ { name = "Marnik Bercx", email = "marnik.bercx@psi.ch" },
14
+ { name = "Christina Ertural", email = "christina.ertural@bam.de" },
15
+ ]
16
+ license = { file = "LICENSE" }
17
+ dependencies = [
18
+ "numpy>=1.21",
19
+ "pydantic>=2.7.0,<=2.12.4",
20
+ ]
21
+ dynamic = ["version"]
22
+
23
+ [project.optional-dependencies]
24
+ aiida = [
25
+ "aiida-workgraph>=0.5.1,<=0.7.6",
26
+ ]
27
+ jobflow = [
28
+ "jobflow>=0.1.18,<=0.2.1",
29
+ ]
30
+ pyiron = [
31
+ "pyiron_base>=0.11.10,<=0.15.12",
32
+ ]
33
+ plot = [
34
+ "pygraphviz>=1.10,<=1.14",
35
+ "networkx>=2.8.8,<=3.5",
36
+ "ipython>=7.33.0,<=9.8.0",
37
+ ]
38
+
39
+ [tool.hatch.build]
40
+ include = [
41
+ "src/python_workflow_definition"
42
+ ]
43
+
44
+ [tool.hatch.build.hooks.vcs]
45
+ version-file = "src/python_workflow_definition/_version.py"
46
+
47
+ [tool.hatch.build.targets.sdist]
48
+ include = [
49
+ "src/python_workflow_definition"
50
+ ]
51
+
52
+ [tool.hatch.build.targets.wheel]
53
+ packages = [
54
+ "src/python_workflow_definition"
55
+ ]
56
+
57
+ [tool.hatch.version]
58
+ source = "vcs"
59
+ path = "src/python_workflow_definition/_version.py"
60
+
61
+ [tool.coverage.run]
62
+ source = ["python_workflow_definition"]
63
+ command_line = "-m unittest discover tests"
@@ -0,0 +1,3 @@
1
+ import python_workflow_definition._version
2
+
3
+ __version__ = python_workflow_definition._version.__version__
@@ -0,0 +1,34 @@
1
+ # file generated by setuptools-scm
2
+ # don't change, don't track in version control
3
+
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
12
+
13
+ TYPE_CHECKING = False
14
+ if TYPE_CHECKING:
15
+ from typing import Tuple
16
+ from typing import Union
17
+
18
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
20
+ else:
21
+ VERSION_TUPLE = object
22
+ COMMIT_ID = object
23
+
24
+ version: str
25
+ __version__: str
26
+ __version_tuple__: VERSION_TUPLE
27
+ version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
30
+
31
+ __version__ = version = '0.1.3'
32
+ __version_tuple__ = version_tuple = (0, 1, 3)
33
+
34
+ __commit_id__ = commit_id = None
@@ -3,14 +3,14 @@ import traceback
3
3
 
4
4
  from aiida import orm
5
5
  from aiida_pythonjob.data.serializer import general_serializer
6
- from aiida_workgraph import WorkGraph, task
6
+ from aiida_workgraph import WorkGraph, task, Task, namespace
7
7
  from aiida_workgraph.socket import TaskSocketNamespace
8
-
8
+ from dataclasses import replace
9
+ from node_graph.node_spec import SchemaSource
9
10
  from python_workflow_definition.models import PythonWorkflowDefinitionWorkflow
10
11
  from python_workflow_definition.shared import (
11
12
  convert_nodes_list_to_dict,
12
13
  update_node_names,
13
- remove_result,
14
14
  set_result_node,
15
15
  NODES_LABEL,
16
16
  EDGES_LABEL,
@@ -24,11 +24,8 @@ from python_workflow_definition.shared import (
24
24
 
25
25
 
26
26
  def load_workflow_json(file_name: str) -> WorkGraph:
27
- data = remove_result(
28
- workflow_dict=PythonWorkflowDefinitionWorkflow.load_json_file(
29
- file_name=file_name
30
- )
31
- )
27
+
28
+ data = PythonWorkflowDefinitionWorkflow.load_json_file(file_name=file_name)
32
29
 
33
30
  wg = WorkGraph()
34
31
  task_name_mapping = {}
@@ -40,10 +37,10 @@ def load_workflow_json(file_name: str) -> WorkGraph:
40
37
  p, m = identifier.rsplit(".", 1)
41
38
  mod = import_module(p)
42
39
  func = getattr(mod, m)
43
- wg.add_task(func)
44
- # Remove the default result output, because we will add the outputs later from the data in the link
45
- del wg.tasks[-1].outputs["result"]
46
- task_name_mapping[id] = wg.tasks[-1]
40
+ decorated_func = task(outputs=namespace())(func)
41
+ new_task = wg.add_task(decorated_func)
42
+ new_task.spec = replace(new_task.spec, schema_source=SchemaSource.EMBEDDED)
43
+ task_name_mapping[id] = new_task
47
44
  else:
48
45
  # data task
49
46
  data_node = general_serializer(identifier)
@@ -51,13 +48,17 @@ def load_workflow_json(file_name: str) -> WorkGraph:
51
48
 
52
49
  # add links
53
50
  for link in data[EDGES_LABEL]:
51
+ # TODO: continue here
54
52
  to_task = task_name_mapping[str(link[TARGET_LABEL])]
55
53
  # if the input is not exit, it means we pass the data into to the kwargs
56
54
  # in this case, we add the input socket
57
- if link[TARGET_PORT_LABEL] not in to_task.inputs:
58
- to_socket = to_task.add_input("workgraph.any", name=link[TARGET_PORT_LABEL])
59
- else:
60
- to_socket = to_task.inputs[link[TARGET_PORT_LABEL]]
55
+ if isinstance(to_task, Task):
56
+ if link[TARGET_PORT_LABEL] not in to_task.inputs:
57
+ to_socket = to_task.add_input_spec(
58
+ "workgraph.any", name=link[TARGET_PORT_LABEL]
59
+ )
60
+ else:
61
+ to_socket = to_task.inputs[link[TARGET_PORT_LABEL]]
61
62
  from_task = task_name_mapping[str(link[SOURCE_LABEL])]
62
63
  if isinstance(from_task, orm.Data):
63
64
  to_socket.value = from_task
@@ -69,16 +70,14 @@ def load_workflow_json(file_name: str) -> WorkGraph:
69
70
  # we add it here, and assume the output exit
70
71
  if link[SOURCE_PORT_LABEL] not in from_task.outputs:
71
72
  # if str(link["sourcePort"]) not in from_task.outputs:
72
- from_socket = from_task.add_output(
73
+ from_socket = from_task.add_output_spec(
73
74
  "workgraph.any",
74
75
  name=link[SOURCE_PORT_LABEL],
75
- # name=str(link["sourcePort"]),
76
- metadata={"is_function_output": True},
77
76
  )
78
77
  else:
79
78
  from_socket = from_task.outputs[link[SOURCE_PORT_LABEL]]
80
-
81
- wg.add_link(from_socket, to_socket)
79
+ if isinstance(to_task, Task):
80
+ wg.add_link(from_socket, to_socket)
82
81
  except Exception as e:
83
82
  traceback.print_exc()
84
83
  print("Failed to link", link, "with error:", e)
@@ -90,12 +89,18 @@ def write_workflow_json(wg: WorkGraph, file_name: str) -> dict:
90
89
  node_name_mapping = {}
91
90
  data_node_name_mapping = {}
92
91
  i = 0
92
+ GRAPH_LEVEL_NAMES = ["graph_inputs", "graph_outputs", "graph_ctx"]
93
+
93
94
  for node in wg.tasks:
94
- executor = node.get_executor()
95
+
96
+ if node.name in GRAPH_LEVEL_NAMES:
97
+ continue
98
+
95
99
  node_name_mapping[node.name] = i
96
100
 
97
- callable_name = executor["callable_name"]
98
- callable_name = f"{executor['module_path']}.{callable_name}"
101
+ executor = node.get_executor()
102
+ callable_name = f"{executor.module_path}.{executor.callable_name}"
103
+
99
104
  data[NODES_LABEL].append({"id": i, "type": "function", "value": callable_name})
100
105
  i += 1
101
106
 
@@ -141,6 +146,7 @@ def write_workflow_json(wg: WorkGraph, file_name: str) -> dict:
141
146
  SOURCE_PORT_LABEL: None,
142
147
  }
143
148
  )
149
+
144
150
  data[VERSION_LABEL] = VERSION_NUMBER
145
151
  PythonWorkflowDefinitionWorkflow(
146
152
  **set_result_node(workflow_dict=update_node_names(workflow_dict=data))
@@ -34,7 +34,7 @@ def _get_value(result_dict: dict, nodes_new_dict: dict, link_dict: dict, exe: Ex
34
34
  if source_handle is None:
35
35
  return result
36
36
  else:
37
- return exe.submit(fn=get_item, obj=result, key=source_handle)
37
+ return exe.submit(get_item, obj=result, key=source_handle)
38
38
 
39
39
 
40
40
  def load_workflow_json(file_name: str, exe: Executor):
@@ -1,5 +1,15 @@
1
1
  from pathlib import Path
2
- from typing import List, Union, Optional, Literal, Any, Annotated, Type, TypeVar
2
+ from typing import (
3
+ List,
4
+ Union,
5
+ Optional,
6
+ Literal,
7
+ Any,
8
+ Annotated,
9
+ Type,
10
+ TypeVar,
11
+ )
12
+ from typing_extensions import TypeAliasType
3
13
  from pydantic import BaseModel, Field, field_validator, field_serializer
4
14
  from pydantic import ValidationError
5
15
  import json
@@ -19,6 +29,13 @@ __all__ = (
19
29
  )
20
30
 
21
31
 
32
+ JsonPrimitive = Union[str, int, float, bool, None]
33
+ AllowableDefaults = TypeAliasType(
34
+ "AllowableDefaults",
35
+ "Union[JsonPrimitive, dict[str, AllowableDefaults], list[AllowableDefaults]]",
36
+ )
37
+
38
+
22
39
  class PythonWorkflowDefinitionBaseNode(BaseModel):
23
40
  """Base model for all node types, containing common fields."""
24
41
 
@@ -33,7 +50,7 @@ class PythonWorkflowDefinitionInputNode(PythonWorkflowDefinitionBaseNode):
33
50
 
34
51
  type: Literal["input"]
35
52
  name: str
36
- value: Optional[Any] = None
53
+ value: Optional[AllowableDefaults] = None
37
54
 
38
55
 
39
56
  class PythonWorkflowDefinitionOutputNode(PythonWorkflowDefinitionBaseNode):
@@ -98,11 +98,11 @@ def get_dict(**kwargs) -> dict:
98
98
 
99
99
 
100
100
  def get_list(**kwargs) -> list:
101
- return list(kwargs["kwargs"].values())
101
+ return list(kwargs["kwargs"])
102
102
 
103
103
 
104
104
  def _remove_server_obj(nodes_dict: dict, edges_lst: list):
105
- server_lst = [k for k in nodes_dict.keys() if k.startswith("server_obj_")]
105
+ server_lst = [k for k in nodes_dict.keys() if k.startswith("_server_obj_")]
106
106
  for s in server_lst:
107
107
  del nodes_dict[s]
108
108
  edges_lst = [ep for ep in edges_lst if s not in ep]
@@ -285,18 +285,32 @@ def write_workflow_json(
285
285
  )
286
286
 
287
287
  nodes_store_lst = []
288
- for k, v in nodes_new_dict.items():
288
+ translate_dict = {}
289
+ for i, k in enumerate(list(nodes_new_dict.keys())[::-1]):
290
+ v = nodes_new_dict[k]
291
+ translate_dict[k] = i
289
292
  if isfunction(v):
290
293
  mod = v.__module__
291
294
  if mod == "python_workflow_definition.pyiron_base":
292
295
  mod = "python_workflow_definition.shared"
293
296
  nodes_store_lst.append(
294
- {"id": k, "type": "function", "value": mod + "." + v.__name__}
297
+ {"id": i, "type": "function", "value": mod + "." + v.__name__}
295
298
  )
296
299
  elif isinstance(v, np.ndarray):
297
- nodes_store_lst.append({"id": k, "type": "input", "value": v.tolist()})
300
+ nodes_store_lst.append({"id": i, "type": "input", "value": v.tolist()})
298
301
  else:
299
- nodes_store_lst.append({"id": k, "type": "input", "value": v})
302
+ nodes_store_lst.append({"id": i, "type": "input", "value": v})
303
+
304
+ print(translate_dict)
305
+ edges_store_lst = [
306
+ {
307
+ TARGET_LABEL: translate_dict[edge[TARGET_LABEL]],
308
+ TARGET_PORT_LABEL: edge[TARGET_PORT_LABEL],
309
+ SOURCE_LABEL: translate_dict[edge[SOURCE_LABEL]],
310
+ SOURCE_PORT_LABEL: edge[SOURCE_PORT_LABEL],
311
+ }
312
+ for edge in edges_new_lst
313
+ ]
300
314
 
301
315
  PythonWorkflowDefinitionWorkflow(
302
316
  **set_result_node(
@@ -304,7 +318,7 @@ def write_workflow_json(
304
318
  workflow_dict={
305
319
  VERSION_LABEL: VERSION_NUMBER,
306
320
  NODES_LABEL: nodes_store_lst,
307
- EDGES_LABEL: edges_new_lst,
321
+ EDGES_LABEL: edges_store_lst,
308
322
  }
309
323
  )
310
324
  )
@@ -0,0 +1,407 @@
1
+ from collections import Counter
2
+ from inspect import isfunction
3
+ from importlib import import_module
4
+ from typing import Any
5
+
6
+ import numpy as np
7
+ from pyiron_workflow import as_function_node, function_node, Workflow
8
+ from pyiron_workflow.api import Function
9
+
10
+ from python_workflow_definition.models import PythonWorkflowDefinitionWorkflow
11
+ from python_workflow_definition.shared import (
12
+ get_dict,
13
+ update_node_names,
14
+ set_result_node,
15
+ remove_result,
16
+ NODES_LABEL,
17
+ EDGES_LABEL,
18
+ SOURCE_LABEL,
19
+ SOURCE_PORT_LABEL,
20
+ TARGET_LABEL,
21
+ TARGET_PORT_LABEL,
22
+ VERSION_NUMBER,
23
+ VERSION_LABEL,
24
+ )
25
+
26
+
27
+ def get_linked_nodes(graph_dict):
28
+ nodes_dict = {}
29
+ node_mapping_dict = {}
30
+ input_dict = {}
31
+ for i, [k, v] in enumerate(graph_dict["nodes"].items()):
32
+ if "inputs_to_dict_factory" in str(type(v)):
33
+ nodes_dict[i] = get_dict
34
+ else:
35
+ nodes_dict[i] = v.node_function
36
+ node_mapping_dict[k] = i
37
+ input_dict[k] = {
38
+ con.full_label: con.value
39
+ for con in v.inputs.channel_dict.to_list()
40
+ if len(con.connections) == 0
41
+ }
42
+ return nodes_dict, node_mapping_dict, input_dict
43
+
44
+
45
+ def extend_nodes_dict(nodes_dict, input_dict):
46
+ i = len(nodes_dict)
47
+ nodes_links_dict = {}
48
+ nodes_values_str_lst = [str(s) for s in nodes_dict.values()]
49
+ for val_dict in input_dict.values():
50
+ for k, v in val_dict.items():
51
+ if str(v) not in nodes_values_str_lst:
52
+ nodes_dict[i] = v
53
+ nodes_links_dict[k] = i
54
+ i += 1
55
+ else:
56
+ nodes_links_dict[k] = {tv: tk for tk, tv in nodes_dict.items()}[v]
57
+ return nodes_links_dict
58
+
59
+
60
+ def get_edges(graph_dict, node_mapping_dict, nodes_links_dict):
61
+ edges_lst = []
62
+ for link in list(graph_dict["edges"]["data"].keys()):
63
+ source_combo, target_combo = link
64
+ target, target_handle = target_combo.split(".")
65
+ source, source_handle = source_combo.split(".")
66
+ edges_lst.append(
67
+ {
68
+ TARGET_LABEL: node_mapping_dict[target],
69
+ TARGET_PORT_LABEL: target_handle,
70
+ SOURCE_LABEL: node_mapping_dict[source],
71
+ SOURCE_PORT_LABEL: source_handle,
72
+ }
73
+ )
74
+
75
+ for k, v in nodes_links_dict.items():
76
+ target, target_handle = k.split(".")
77
+ edges_lst.append(
78
+ {
79
+ TARGET_LABEL: node_mapping_dict[target],
80
+ TARGET_PORT_LABEL: target_handle,
81
+ SOURCE_LABEL: v,
82
+ SOURCE_PORT_LABEL: None,
83
+ }
84
+ )
85
+ return edges_lst
86
+
87
+
88
+ def write_workflow_json(graph_as_dict: dict, file_name: str = "workflow.json"):
89
+ nodes_dict, node_mapping_dict, input_dict = get_linked_nodes(
90
+ graph_dict=graph_as_dict
91
+ )
92
+ nodes_links_dict = extend_nodes_dict(nodes_dict=nodes_dict, input_dict=input_dict)
93
+ edges_lst = get_edges(
94
+ graph_dict=graph_as_dict,
95
+ node_mapping_dict=node_mapping_dict,
96
+ nodes_links_dict=nodes_links_dict,
97
+ )
98
+
99
+ pyiron_workflow_modules = {}
100
+ for k, v in nodes_dict.items():
101
+ if isfunction(v) and "pyiron_workflow" in v.__module__:
102
+ pyiron_workflow_modules[k] = v
103
+
104
+ cache_mapping_dict, remap_dict = {}, {}
105
+ for k, v in nodes_dict.items():
106
+ if not isfunction(v) and str(v) not in cache_mapping_dict:
107
+ cache_mapping_dict[str(v)] = k
108
+ elif not isfunction(v):
109
+ remap_dict[k] = cache_mapping_dict[str(v)]
110
+
111
+ item_node_lst = [
112
+ e[SOURCE_LABEL]
113
+ for e in edges_lst
114
+ if e[TARGET_LABEL] in pyiron_workflow_modules.keys()
115
+ and e[TARGET_PORT_LABEL] == "item"
116
+ ]
117
+
118
+ values_from_dict_lst = [
119
+ k
120
+ for k, v in nodes_dict.items()
121
+ if isfunction(v) and v.__name__ == "get_values_from_dict"
122
+ ]
123
+
124
+ remap_get_list_dict = {}
125
+ for e in edges_lst:
126
+ if e[TARGET_LABEL] in values_from_dict_lst:
127
+ remap_get_list_dict[e[SOURCE_LABEL]] = e[TARGET_LABEL]
128
+
129
+ nodes_remaining_dict = {
130
+ k: v
131
+ for k, v in nodes_dict.items()
132
+ if k not in pyiron_workflow_modules.keys()
133
+ and k not in remap_dict.keys()
134
+ and k not in item_node_lst
135
+ and k not in remap_get_list_dict.values()
136
+ }
137
+
138
+ nodes_store_lst = []
139
+ nodes_final_order_dict = {}
140
+ for k, [i, v] in enumerate(nodes_remaining_dict.items()):
141
+ if i in remap_get_list_dict:
142
+ nodes_store_lst.append(
143
+ {
144
+ "id": k,
145
+ "type": "function",
146
+ "value": "python_workflow_definition.shared.get_list",
147
+ }
148
+ )
149
+ elif isfunction(v):
150
+ mod = v.__module__
151
+ if mod == "python_workflow_definition.pyiron_workflow":
152
+ mod = "python_workflow_definition.shared"
153
+ nodes_store_lst.append(
154
+ {"id": k, "type": "function", "value": mod + "." + v.__name__}
155
+ )
156
+ elif isinstance(v, np.ndarray):
157
+ nodes_store_lst.append({"id": k, "type": "input", "value": v.tolist()})
158
+ else:
159
+ nodes_store_lst.append({"id": k, "type": "input", "value": v})
160
+ nodes_final_order_dict[i] = k
161
+
162
+ remap_get_list_remove_edges = [
163
+ edge for edge in edges_lst if edge[TARGET_LABEL] in remap_get_list_dict.values()
164
+ ]
165
+
166
+ edge_get_list_updated_lst = []
167
+ for edge in edges_lst:
168
+ if edge[SOURCE_LABEL] in remap_get_list_dict.values():
169
+ connected_edge = [
170
+ edge_con
171
+ for edge_con in remap_get_list_remove_edges
172
+ if edge_con[TARGET_LABEL] == edge[SOURCE_LABEL]
173
+ ][-1]
174
+ edge_updated = {
175
+ TARGET_LABEL: edge[TARGET_LABEL],
176
+ TARGET_PORT_LABEL: edge[TARGET_PORT_LABEL],
177
+ SOURCE_LABEL: connected_edge[SOURCE_LABEL],
178
+ SOURCE_PORT_LABEL: connected_edge[SOURCE_PORT_LABEL],
179
+ }
180
+ edge_get_list_updated_lst.append(edge_updated)
181
+ elif edge[SOURCE_LABEL] in remap_dict.keys():
182
+ edge_updated = {
183
+ TARGET_LABEL: edge[TARGET_LABEL],
184
+ TARGET_PORT_LABEL: edge[TARGET_PORT_LABEL],
185
+ SOURCE_LABEL: remap_dict[edge[SOURCE_LABEL]],
186
+ SOURCE_PORT_LABEL: edge[SOURCE_PORT_LABEL],
187
+ }
188
+ edge_get_list_updated_lst.append(edge_updated)
189
+ elif edge[TARGET_LABEL] not in remap_get_list_dict.values():
190
+ edge_get_list_updated_lst.append(edge)
191
+
192
+ target_dict = {}
193
+ for edge in edge_get_list_updated_lst:
194
+ for k in pyiron_workflow_modules.keys():
195
+ if k == edge[TARGET_LABEL]:
196
+ if k not in target_dict:
197
+ target_dict[k] = []
198
+ target_dict[k].append(edge)
199
+
200
+ source_dict = {}
201
+ for edge in edge_get_list_updated_lst:
202
+ for k in pyiron_workflow_modules.keys():
203
+ if k == edge[SOURCE_LABEL]:
204
+ if k not in source_dict:
205
+ source_dict[k] = []
206
+ source_dict[k].append(edge)
207
+
208
+ edge_new_lst, nodes_to_delete = [], []
209
+ for k in target_dict.keys():
210
+ source, sourcehandle = None, None
211
+ for edge in target_dict[k]:
212
+ if edge[SOURCE_PORT_LABEL] is None:
213
+ sourcehandle = nodes_dict[edge[SOURCE_LABEL]]
214
+ nodes_to_delete.append(edge[SOURCE_LABEL])
215
+ else:
216
+ source = edge[SOURCE_LABEL]
217
+ if "s_" == source_dict[k][-1][TARGET_PORT_LABEL][:2]:
218
+ edge_new_lst.append(
219
+ {
220
+ SOURCE_LABEL: nodes_final_order_dict[source],
221
+ SOURCE_PORT_LABEL: sourcehandle,
222
+ TARGET_LABEL: nodes_final_order_dict[
223
+ source_dict[k][-1][TARGET_LABEL]
224
+ ],
225
+ TARGET_PORT_LABEL: source_dict[k][-1][TARGET_PORT_LABEL][2:],
226
+ }
227
+ )
228
+ else:
229
+ edge_new_lst.append(
230
+ {
231
+ SOURCE_LABEL: nodes_final_order_dict[source],
232
+ SOURCE_PORT_LABEL: sourcehandle,
233
+ TARGET_LABEL: nodes_final_order_dict[
234
+ source_dict[k][-1][TARGET_LABEL]
235
+ ],
236
+ TARGET_PORT_LABEL: source_dict[k][-1][TARGET_PORT_LABEL],
237
+ }
238
+ )
239
+
240
+ nodes_to_skip = nodes_to_delete + list(pyiron_workflow_modules.keys())
241
+ for edge in edge_get_list_updated_lst:
242
+ if (
243
+ edge[TARGET_LABEL] not in nodes_to_skip
244
+ and edge[SOURCE_LABEL] not in nodes_to_skip
245
+ ):
246
+ source_node = nodes_remaining_dict[edge[SOURCE_LABEL]]
247
+ if (
248
+ isfunction(source_node)
249
+ and source_node.__name__ == edge[SOURCE_PORT_LABEL]
250
+ ):
251
+ edge_new_lst.append(
252
+ {
253
+ TARGET_LABEL: nodes_final_order_dict[edge[TARGET_LABEL]],
254
+ TARGET_PORT_LABEL: edge[TARGET_PORT_LABEL],
255
+ SOURCE_LABEL: nodes_final_order_dict[edge[SOURCE_LABEL]],
256
+ SOURCE_PORT_LABEL: None,
257
+ }
258
+ )
259
+ elif (
260
+ isfunction(source_node)
261
+ and source_node.__name__ == "get_dict"
262
+ and edge[SOURCE_PORT_LABEL] == "dict"
263
+ ):
264
+ edge_new_lst.append(
265
+ {
266
+ TARGET_LABEL: nodes_final_order_dict[edge[TARGET_LABEL]],
267
+ TARGET_PORT_LABEL: edge[TARGET_PORT_LABEL],
268
+ SOURCE_LABEL: nodes_final_order_dict[edge[SOURCE_LABEL]],
269
+ SOURCE_PORT_LABEL: None,
270
+ }
271
+ )
272
+ else:
273
+ edge_new_lst.append(
274
+ {
275
+ TARGET_LABEL: nodes_final_order_dict[edge[TARGET_LABEL]],
276
+ TARGET_PORT_LABEL: edge[TARGET_PORT_LABEL],
277
+ SOURCE_LABEL: nodes_final_order_dict[edge[SOURCE_LABEL]],
278
+ SOURCE_PORT_LABEL: edge[SOURCE_PORT_LABEL],
279
+ }
280
+ )
281
+
282
+ PythonWorkflowDefinitionWorkflow(
283
+ **set_result_node(
284
+ workflow_dict=update_node_names(
285
+ workflow_dict={
286
+ VERSION_LABEL: VERSION_NUMBER,
287
+ NODES_LABEL: nodes_store_lst,
288
+ EDGES_LABEL: edge_new_lst,
289
+ }
290
+ )
291
+ )
292
+ ).dump_json_file(file_name=file_name, indent=2)
293
+
294
+
295
+ def import_from_string(library_path: str) -> Any:
296
+ # Copied from bagofholding
297
+ split_path = library_path.split(".", 1)
298
+ if len(split_path) == 1:
299
+ module_name, path = split_path[0], ""
300
+ else:
301
+ module_name, path = split_path
302
+ obj = import_module(module_name)
303
+ for k in path.split("."):
304
+ obj = getattr(obj, k)
305
+ return obj
306
+
307
+
308
+ def generate_get_dict_function(args_of_lst):
309
+ lines = "def get_dict(" + ", ".join(args_of_lst) + "):\n"
310
+ lines += " return {\n"
311
+ for parameter in args_of_lst:
312
+ lines += ' "' + parameter + '": ' + parameter + ",\n"
313
+ lines += " }"
314
+ return lines
315
+
316
+
317
+ def generate_get_list_function(args_of_lst):
318
+ lines = "def get_list(" + ", ".join(args_of_lst) + "):\n"
319
+ lines += " return [\n"
320
+ for parameter in args_of_lst:
321
+ lines += " " + parameter + ",\n"
322
+ lines += " ]"
323
+ return lines
324
+
325
+
326
+ def load_workflow_json(file_name: str) -> Workflow:
327
+ content = remove_result(
328
+ PythonWorkflowDefinitionWorkflow.load_json_file(file_name=file_name)
329
+ )
330
+
331
+ input_values: dict[int, object] = (
332
+ {}
333
+ ) # Type is actually more restrictive, must be jsonifyable object
334
+ nodes: dict[int, Function] = {}
335
+ total_counter_dict = Counter(
336
+ [n["value"] for n in content[NODES_LABEL] if n["type"] == "function"]
337
+ )
338
+ counter_dict = {k: -1 for k in total_counter_dict.keys()}
339
+ wf = Workflow(file_name.split(".")[0])
340
+ nodes_look_up_dict = {node["id"]: node["value"] for node in content[NODES_LABEL]}
341
+ for node_dict in content[NODES_LABEL]:
342
+ if node_dict["type"] == "function":
343
+ if node_dict["value"] == "python_workflow_definition.shared.get_dict":
344
+ exec(
345
+ generate_get_dict_function(
346
+ args_of_lst=[
347
+ edge[TARGET_PORT_LABEL]
348
+ for edge in content[EDGES_LABEL]
349
+ if edge[TARGET_LABEL] == node_dict["id"]
350
+ ]
351
+ )
352
+ )
353
+ fnc = eval("get_dict")
354
+ elif node_dict["value"] == "python_workflow_definition.shared.get_list":
355
+ exec(
356
+ generate_get_list_function(
357
+ args_of_lst=[
358
+ "s_" + edge[TARGET_PORT_LABEL]
359
+ for edge in content[EDGES_LABEL]
360
+ if edge[TARGET_LABEL] == node_dict["id"]
361
+ ]
362
+ )
363
+ )
364
+ fnc = eval("get_list")
365
+ else:
366
+ fnc = import_from_string(node_dict["value"])
367
+ if total_counter_dict[node_dict["value"]] > 1:
368
+ counter_dict[node_dict["value"]] += 1
369
+ name = fnc.__name__ + "_" + str(counter_dict[node_dict["value"]])
370
+ else:
371
+ name = fnc.__name__
372
+ n = function_node(
373
+ fnc, output_labels=name, validate_output_labels=False
374
+ ) # Strictly force single-output
375
+ nodes[node_dict["id"]] = n
376
+ wf.add_child(child=n, label=name)
377
+ elif node_dict["type"] == "input":
378
+ input_values[node_dict["id"]] = node_dict["value"]
379
+
380
+ for edge_dict in content[EDGES_LABEL]:
381
+ target_id = edge_dict[TARGET_LABEL]
382
+ target_port = edge_dict[TARGET_PORT_LABEL]
383
+ source_id = edge_dict[SOURCE_LABEL]
384
+ source_port = edge_dict[SOURCE_PORT_LABEL]
385
+
386
+ if source_port is None:
387
+ if source_id in input_values.keys(): # Parent input value
388
+ upstream = input_values[source_id]
389
+ else: # Single-output sibling
390
+ upstream = nodes[source_id]
391
+ else: # Dictionary-output sibling
392
+ injected_attribute_access = nodes[source_id].__getitem__(source_port)
393
+ upstream = injected_attribute_access
394
+ downstream = nodes[target_id]
395
+ if (
396
+ nodes_look_up_dict[target_id]
397
+ == "python_workflow_definition.shared.get_list"
398
+ ):
399
+ setattr(downstream.inputs, "s_" + target_port, upstream)
400
+ else:
401
+ setattr(
402
+ downstream.inputs, target_port, upstream
403
+ ) # Exploit input panel magic
404
+ # Warning: edge setup routine is bespoke for an environment where all nodes return
405
+ # a single value or a dictionary
406
+
407
+ return wf
@@ -1,31 +0,0 @@
1
- [build-system]
2
- requires = ["hatchling"]
3
- build-backend = "hatchling.build"
4
-
5
- [project]
6
- name = "python_workflow_definition"
7
- version = "0.1.0"
8
- description = "Python Workflow Definition - workflow interoperability for aiida, jobflow and pyiron"
9
- authors = [
10
- { name = "Jan Janssen", email = "janssen@mpie.de" },
11
- { name = "Janine George", email = "janine.geogre@bam.de" },
12
- { name = "Julian Geiger", email = "julian.geiger@psi.ch" },
13
- { name = "Xing Wang", email = "xing.wang@psi.ch" },
14
- { name = "Marnik Bercx", email = "marnik.bercx@psi.ch" },
15
- { name = "Christina Ertural", email = "christina.ertural@bam.de" },
16
- ]
17
- license = { file = "../LICENSE" }
18
- dependencies = [
19
- "aiida-workgraph>=0.5.1,<=0.5.2",
20
- "numpy>=1.21,<2",
21
- "jobflow>=0.1.18,<=0.1.19",
22
- "pyiron_base>=0.11.10,<=0.11.11",
23
- "pydantic>=2.7.0,<=2.11.4",
24
- ]
25
-
26
- [project.optional-dependencies]
27
- plot = [
28
- "pygraphviz>=1.10,<=1.14",
29
- "networkx>=2.8.8,<=3.4.2",
30
- "ipython>=7.33.0,<=9.0.2",
31
- ]