lmnr 0.2.5__tar.gz → 0.2.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. {lmnr-0.2.5 → lmnr-0.2.7}/PKG-INFO +4 -11
  2. {lmnr-0.2.5 → lmnr-0.2.7}/README.md +3 -9
  3. {lmnr-0.2.5 → lmnr-0.2.7}/pyproject.toml +2 -3
  4. lmnr-0.2.7/src/lmnr/cli/parser/nodes/code.py +27 -0
  5. lmnr-0.2.7/src/lmnr/cli/parser/nodes/condition.py +30 -0
  6. lmnr-0.2.7/src/lmnr/cli/parser/nodes/input.py +26 -0
  7. lmnr-0.2.7/src/lmnr/cli/parser/nodes/llm.py +51 -0
  8. lmnr-0.2.7/src/lmnr/cli/parser/nodes/output.py +27 -0
  9. lmnr-0.2.7/src/lmnr/cli/parser/nodes/router.py +37 -0
  10. lmnr-0.2.7/src/lmnr/cli/parser/nodes/semantic_search.py +81 -0
  11. lmnr-0.2.7/src/lmnr/cli/parser/nodes/types.py +138 -0
  12. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/parser/parser.py +4 -4
  13. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/engine/engine.py +38 -6
  14. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/pipelines/{{cookiecutter.pipeline_dir_name}}/nodes/functions.py +79 -7
  15. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/sdk/endpoint.py +31 -11
  16. lmnr-0.2.7/src/lmnr/sdk/remote_debugger.py +139 -0
  17. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/types.py +41 -13
  18. lmnr-0.2.5/src/lmnr/cli/parser/nodes/types.py +0 -157
  19. lmnr-0.2.5/src/lmnr/sdk/remote_debugger.py +0 -96
  20. {lmnr-0.2.5 → lmnr-0.2.7}/LICENSE +0 -0
  21. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/__init__.py +0 -0
  22. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/__init__.py +0 -0
  23. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/__main__.py +0 -0
  24. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/cli.py +0 -0
  25. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/cookiecutter.json +0 -0
  26. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/parser/__init__.py +0 -0
  27. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/parser/nodes/__init__.py +0 -0
  28. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/parser/utils.py +0 -0
  29. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/__init__.py +0 -0
  30. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/engine/__init__.py +0 -0
  31. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/engine/action.py +0 -0
  32. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/engine/state.py +0 -0
  33. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/engine/task.py +0 -0
  34. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/pipelines/{{cookiecutter.pipeline_dir_name}}/__init__.py +0 -0
  35. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/pipelines/{{cookiecutter.pipeline_dir_name}}/{{cookiecutter.pipeline_dir_name}}.py +0 -0
  36. {lmnr-0.2.5 → lmnr-0.2.7}/src/lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/types.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lmnr
3
- Version: 0.2.5
3
+ Version: 0.2.7
4
4
  Summary: Python SDK for Laminar AI
5
5
  License: Apache-2.0
6
6
  Author: lmnr.ai
@@ -17,11 +17,12 @@ Requires-Dist: cookiecutter (>=2.6.0,<3.0.0)
17
17
  Requires-Dist: pydantic (>=2.7.4,<3.0.0)
18
18
  Requires-Dist: python-dotenv (>=1.0.1,<2.0.0)
19
19
  Requires-Dist: requests (>=2.32.3,<3.0.0)
20
- Requires-Dist: urllib3 (==1.26.6)
21
20
  Requires-Dist: websockets (>=12.0,<13.0)
22
21
  Description-Content-Type: text/markdown
23
22
 
24
- # Python SDK for Laminar AI
23
+ # Laminar AI
24
+
25
+ This repo provides core for code generation, Laminar CLI, and Laminar SDK.
25
26
 
26
27
  ## Quickstart
27
28
  ```sh
@@ -139,14 +140,6 @@ Set up `DEBUGGER_SESSION_ID` environment variable in your pipeline.
139
140
 
140
141
  You can run as many sessions as you need, experimenting with your flows.
141
142
 
142
- #### 5. Stop the debugger
143
-
144
- In order to stop the session, do
145
-
146
- ```python
147
- debugger.stop()
148
- ```
149
-
150
143
  ## CLI for code generation
151
144
 
152
145
  ### Basic usage
@@ -1,4 +1,6 @@
1
- # Python SDK for Laminar AI
1
+ # Laminar AI
2
+
3
+ This repo provides core for code generation, Laminar CLI, and Laminar SDK.
2
4
 
3
5
  ## Quickstart
4
6
  ```sh
@@ -116,14 +118,6 @@ Set up `DEBUGGER_SESSION_ID` environment variable in your pipeline.
116
118
 
117
119
  You can run as many sessions as you need, experimenting with your flows.
118
120
 
119
- #### 5. Stop the debugger
120
-
121
- In order to stop the session, do
122
-
123
- ```python
124
- debugger.stop()
125
- ```
126
-
127
121
  ## CLI for code generation
128
122
 
129
123
  ### Basic usage
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "lmnr"
3
- version = "0.2.5"
3
+ version = "0.2.7"
4
4
  description = "Python SDK for Laminar AI"
5
5
  authors = [
6
6
  { name = "lmnr.ai", email = "founders@lmnr.ai" }
@@ -11,7 +11,7 @@ license = "Apache-2.0"
11
11
 
12
12
  [tool.poetry]
13
13
  name = "lmnr"
14
- version = "0.2.5"
14
+ version = "0.2.7"
15
15
  description = "Python SDK for Laminar AI"
16
16
  authors = ["lmnr.ai"]
17
17
  readme = "README.md"
@@ -23,7 +23,6 @@ black = "^24.4.2"
23
23
  pydantic = "^2.7.4"
24
24
  click = "^8.1.7"
25
25
  requests = "^2.32.3"
26
- urllib3 = "1.26.6"
27
26
  websockets = "^12.0"
28
27
  cookiecutter = "^2.6.0"
29
28
  python-dotenv = "^1.0.1"
@@ -0,0 +1,27 @@
1
+ from dataclasses import dataclass
2
+ import uuid
3
+
4
+ from lmnr.cli.parser.nodes import Handle, NodeFunctions
5
+ from lmnr.cli.parser.utils import map_handles
6
+
7
+
8
+ @dataclass
9
+ class CodeNode(NodeFunctions):
10
+ id: uuid.UUID
11
+ name: str
12
+ inputs: list[Handle]
13
+ outputs: list[Handle]
14
+ inputs_mappings: dict[uuid.UUID, uuid.UUID]
15
+
16
+ def handles_mapping(
17
+ self, output_handle_id_to_node_name: dict[str, str]
18
+ ) -> list[tuple[str, str]]:
19
+ return map_handles(
20
+ self.inputs, self.inputs_mappings, output_handle_id_to_node_name
21
+ )
22
+
23
+ def node_type(self) -> str:
24
+ return "Code"
25
+
26
+ def config(self) -> dict:
27
+ return {}
@@ -0,0 +1,30 @@
1
+ from dataclasses import dataclass
2
+ import uuid
3
+
4
+ from lmnr.cli.parser.nodes import Handle, NodeFunctions
5
+ from lmnr.cli.parser.utils import map_handles
6
+
7
+
8
+ @dataclass
9
+ class ConditionNode(NodeFunctions):
10
+ id: uuid.UUID
11
+ name: str
12
+ inputs: list[Handle]
13
+ outputs: list[Handle]
14
+ inputs_mappings: dict[uuid.UUID, uuid.UUID]
15
+ condition: str
16
+
17
+ def handles_mapping(
18
+ self, output_handle_id_to_node_name: dict[str, str]
19
+ ) -> list[tuple[str, str]]:
20
+ return map_handles(
21
+ self.inputs, self.inputs_mappings, output_handle_id_to_node_name
22
+ )
23
+
24
+ def node_type(self) -> str:
25
+ return "Condition"
26
+
27
+ def config(self) -> dict:
28
+ return {
29
+ "condition": self.condition,
30
+ }
@@ -0,0 +1,26 @@
1
+ from dataclasses import dataclass
2
+ from typing import Optional
3
+ import uuid
4
+
5
+ from lmnr.cli.parser.nodes import Handle, HandleType, NodeFunctions
6
+ from lmnr.types import NodeInput
7
+
8
+
9
+ @dataclass
10
+ class InputNode(NodeFunctions):
11
+ id: uuid.UUID
12
+ name: str
13
+ outputs: list[Handle]
14
+ input: Optional[NodeInput]
15
+ input_type: HandleType
16
+
17
+ def handles_mapping(
18
+ self, output_handle_id_to_node_name: dict[str, str]
19
+ ) -> list[tuple[str, str]]:
20
+ return []
21
+
22
+ def node_type(self) -> str:
23
+ return "Input"
24
+
25
+ def config(self) -> dict:
26
+ return {}
@@ -0,0 +1,51 @@
1
+ from dataclasses import dataclass
2
+ from typing import Optional
3
+ import uuid
4
+
5
+ from lmnr.cli.parser.nodes import Handle, NodeFunctions
6
+ from lmnr.cli.parser.utils import map_handles
7
+
8
+
9
+ @dataclass
10
+ class LLMNode(NodeFunctions):
11
+ id: uuid.UUID
12
+ name: str
13
+ inputs: list[Handle]
14
+ dynamic_inputs: list[Handle]
15
+ outputs: list[Handle]
16
+ inputs_mappings: dict[uuid.UUID, uuid.UUID]
17
+ prompt: str
18
+ model: str
19
+ model_params: Optional[str]
20
+ stream: bool
21
+ structured_output_enabled: bool
22
+ structured_output_max_retries: int
23
+ structured_output_schema: Optional[str]
24
+ structured_output_schema_target: Optional[str]
25
+
26
+ def handles_mapping(
27
+ self, output_handle_id_to_node_name: dict[str, str]
28
+ ) -> list[tuple[str, str]]:
29
+ combined_inputs = self.inputs + self.dynamic_inputs
30
+ return map_handles(
31
+ combined_inputs, self.inputs_mappings, output_handle_id_to_node_name
32
+ )
33
+
34
+ def node_type(self) -> str:
35
+ return "LLM"
36
+
37
+ def config(self) -> dict:
38
+ # For easier access in the template separate the provider and model here
39
+ provider, model = self.model.split(":", maxsplit=1)
40
+
41
+ return {
42
+ "prompt": self.prompt,
43
+ "provider": provider,
44
+ "model": model,
45
+ "model_params": self.model_params,
46
+ "stream": self.stream,
47
+ "structured_output_enabled": self.structured_output_enabled,
48
+ "structured_output_max_retries": self.structured_output_max_retries,
49
+ "structured_output_schema": self.structured_output_schema,
50
+ "structured_output_schema_target": self.structured_output_schema_target,
51
+ }
@@ -0,0 +1,27 @@
1
+ from dataclasses import dataclass
2
+ import uuid
3
+
4
+ from lmnr.cli.parser.nodes import Handle, NodeFunctions
5
+ from lmnr.cli.parser.utils import map_handles
6
+
7
+
8
+ @dataclass
9
+ class OutputNode(NodeFunctions):
10
+ id: uuid.UUID
11
+ name: str
12
+ inputs: list[Handle]
13
+ outputs: list[Handle]
14
+ inputs_mappings: dict[uuid.UUID, uuid.UUID]
15
+
16
+ def handles_mapping(
17
+ self, output_handle_id_to_node_name: dict[str, str]
18
+ ) -> list[tuple[str, str]]:
19
+ return map_handles(
20
+ self.inputs, self.inputs_mappings, output_handle_id_to_node_name
21
+ )
22
+
23
+ def node_type(self) -> str:
24
+ return "Output"
25
+
26
+ def config(self) -> dict:
27
+ return {}
@@ -0,0 +1,37 @@
1
+ from dataclasses import dataclass
2
+ import uuid
3
+
4
+ from lmnr.cli.parser.nodes import Handle, NodeFunctions
5
+ from lmnr.cli.parser.utils import map_handles
6
+
7
+
8
+ @dataclass
9
+ class Route:
10
+ name: str
11
+
12
+
13
+ @dataclass
14
+ class RouterNode(NodeFunctions):
15
+ id: uuid.UUID
16
+ name: str
17
+ inputs: list[Handle]
18
+ outputs: list[Handle]
19
+ inputs_mappings: dict[uuid.UUID, uuid.UUID]
20
+ routes: list[Route]
21
+ has_default_route: bool
22
+
23
+ def handles_mapping(
24
+ self, output_handle_id_to_node_name: dict[str, str]
25
+ ) -> list[tuple[str, str]]:
26
+ return map_handles(
27
+ self.inputs, self.inputs_mappings, output_handle_id_to_node_name
28
+ )
29
+
30
+ def node_type(self) -> str:
31
+ return "Router"
32
+
33
+ def config(self) -> dict:
34
+ return {
35
+ "routes": str([route.name for route in self.routes]),
36
+ "has_default_route": str(self.has_default_route),
37
+ }
@@ -0,0 +1,81 @@
1
+ from dataclasses import dataclass
2
+ from datetime import datetime
3
+
4
+ import uuid
5
+
6
+ from lmnr.cli.parser.nodes import Handle, NodeFunctions
7
+ from lmnr.cli.parser.utils import map_handles
8
+
9
+
10
+ @dataclass
11
+ class FileMetadata:
12
+ id: uuid.UUID
13
+ created_at: datetime
14
+ project_id: uuid.UUID
15
+ filename: str
16
+
17
+
18
+ @dataclass
19
+ class Dataset:
20
+ id: uuid.UUID
21
+ created_at: datetime
22
+ project_id: uuid.UUID
23
+ name: str
24
+
25
+
26
+ @dataclass
27
+ class SemanticSearchDatasource:
28
+ type: str
29
+ id: uuid.UUID
30
+ # TODO: Paste other fields here, use Union[FileMetadata, Dataset]
31
+
32
+ @classmethod
33
+ def from_dict(cls, datasource_dict: dict) -> "SemanticSearchDatasource":
34
+ if datasource_dict["type"] == "File":
35
+ return cls(
36
+ type="File",
37
+ id=uuid.UUID(datasource_dict["id"]),
38
+ )
39
+ elif datasource_dict["type"] == "Dataset":
40
+ return cls(
41
+ type="Dataset",
42
+ id=uuid.UUID(datasource_dict["id"]),
43
+ )
44
+ else:
45
+ raise ValueError(
46
+ f"Invalid SemanticSearchDatasource type: {datasource_dict['type']}"
47
+ )
48
+
49
+
50
+ @dataclass
51
+ class SemanticSearchNode(NodeFunctions):
52
+ id: uuid.UUID
53
+ name: str
54
+ inputs: list[Handle]
55
+ outputs: list[Handle]
56
+ inputs_mappings: dict[uuid.UUID, uuid.UUID]
57
+ limit: int
58
+ threshold: float
59
+ template: str
60
+ datasources: list[SemanticSearchDatasource]
61
+
62
+ def handles_mapping(
63
+ self, output_handle_id_to_node_name: dict[str, str]
64
+ ) -> list[tuple[str, str]]:
65
+ return map_handles(
66
+ self.inputs, self.inputs_mappings, output_handle_id_to_node_name
67
+ )
68
+
69
+ def node_type(self) -> str:
70
+ return "SemanticSearch"
71
+
72
+ def config(self) -> dict:
73
+ return {
74
+ "limit": self.limit,
75
+ "threshold": self.threshold,
76
+ "template": self.template,
77
+ "datasource_ids": [str(datasource.id) for datasource in self.datasources],
78
+ "datasource_ids_list": str(
79
+ [str(datasource.id) for datasource in self.datasources]
80
+ ),
81
+ }
@@ -0,0 +1,138 @@
1
+ from typing import Any, Union
2
+ import uuid
3
+
4
+ from lmnr.cli.parser.nodes import Handle
5
+ from lmnr.cli.parser.nodes.code import CodeNode
6
+ from lmnr.cli.parser.nodes.condition import ConditionNode
7
+ from lmnr.cli.parser.nodes.input import InputNode
8
+ from lmnr.cli.parser.nodes.llm import LLMNode
9
+ from lmnr.cli.parser.nodes.output import OutputNode
10
+ from lmnr.cli.parser.nodes.router import Route, RouterNode
11
+ from lmnr.cli.parser.nodes.semantic_search import (
12
+ SemanticSearchDatasource,
13
+ SemanticSearchNode,
14
+ )
15
+ from lmnr.types import NodeInput, ChatMessage
16
+
17
+
18
+ def node_input_from_json(json_val: Any) -> NodeInput:
19
+ if isinstance(json_val, str):
20
+ return json_val
21
+ elif isinstance(json_val, list):
22
+ return [ChatMessage.model_validate(msg) for msg in json_val]
23
+ else:
24
+ raise ValueError(f"Invalid NodeInput value: {json_val}")
25
+
26
+
27
+ Node = Union[
28
+ InputNode,
29
+ OutputNode,
30
+ ConditionNode,
31
+ LLMNode,
32
+ RouterNode,
33
+ SemanticSearchNode,
34
+ CodeNode,
35
+ ]
36
+
37
+
38
+ def node_from_dict(node_dict: dict) -> Node:
39
+ if node_dict["type"] == "Input":
40
+ return InputNode(
41
+ id=uuid.UUID(node_dict["id"]),
42
+ name=node_dict["name"],
43
+ outputs=[Handle.from_dict(handle) for handle in node_dict["outputs"]],
44
+ input=node_input_from_json(node_dict["input"]),
45
+ input_type=node_dict["inputType"],
46
+ )
47
+ elif node_dict["type"] == "Output":
48
+ return OutputNode(
49
+ id=uuid.UUID(node_dict["id"]),
50
+ name=node_dict["name"],
51
+ inputs=[Handle.from_dict(handle) for handle in node_dict["inputs"]],
52
+ outputs=[Handle.from_dict(handle) for handle in node_dict["outputs"]],
53
+ inputs_mappings={
54
+ uuid.UUID(k): uuid.UUID(v)
55
+ for k, v in node_dict["inputsMappings"].items()
56
+ },
57
+ )
58
+ elif node_dict["type"] == "Condition":
59
+ return ConditionNode(
60
+ id=uuid.UUID(node_dict["id"]),
61
+ name=node_dict["name"],
62
+ inputs=[Handle.from_dict(handle) for handle in node_dict["inputs"]],
63
+ outputs=[Handle.from_dict(handle) for handle in node_dict["outputs"]],
64
+ inputs_mappings={
65
+ uuid.UUID(k): uuid.UUID(v)
66
+ for k, v in node_dict["inputsMappings"].items()
67
+ },
68
+ condition=node_dict["condition"],
69
+ )
70
+ elif node_dict["type"] == "LLM":
71
+ return LLMNode(
72
+ id=uuid.UUID(node_dict["id"]),
73
+ name=node_dict["name"],
74
+ inputs=[Handle.from_dict(handle) for handle in node_dict["inputs"]],
75
+ dynamic_inputs=[
76
+ Handle.from_dict(handle) for handle in node_dict["dynamicInputs"]
77
+ ],
78
+ outputs=[Handle.from_dict(handle) for handle in node_dict["outputs"]],
79
+ inputs_mappings={
80
+ uuid.UUID(k): uuid.UUID(v)
81
+ for k, v in node_dict["inputsMappings"].items()
82
+ },
83
+ prompt=node_dict["prompt"],
84
+ model=node_dict["model"],
85
+ model_params=(
86
+ node_dict["modelParams"] if "modelParams" in node_dict else None
87
+ ),
88
+ stream=False,
89
+ # TODO: Implement structured output
90
+ structured_output_enabled=False,
91
+ structured_output_max_retries=3,
92
+ structured_output_schema=None,
93
+ structured_output_schema_target=None,
94
+ )
95
+ elif node_dict["type"] == "Router":
96
+ return RouterNode(
97
+ id=uuid.UUID(node_dict["id"]),
98
+ name=node_dict["name"],
99
+ inputs=[Handle.from_dict(handle) for handle in node_dict["inputs"]],
100
+ outputs=[Handle.from_dict(handle) for handle in node_dict["outputs"]],
101
+ inputs_mappings={
102
+ uuid.UUID(k): uuid.UUID(v)
103
+ for k, v in node_dict["inputsMappings"].items()
104
+ },
105
+ routes=[Route(name=route["name"]) for route in node_dict["routes"]],
106
+ has_default_route=node_dict["hasDefaultRoute"],
107
+ )
108
+ elif node_dict["type"] == "SemanticSearch":
109
+ return SemanticSearchNode(
110
+ id=uuid.UUID(node_dict["id"]),
111
+ name=node_dict["name"],
112
+ inputs=[Handle.from_dict(handle) for handle in node_dict["inputs"]],
113
+ outputs=[Handle.from_dict(handle) for handle in node_dict["outputs"]],
114
+ inputs_mappings={
115
+ uuid.UUID(k): uuid.UUID(v)
116
+ for k, v in node_dict["inputsMappings"].items()
117
+ },
118
+ limit=node_dict["limit"],
119
+ threshold=node_dict["threshold"],
120
+ template=node_dict["template"],
121
+ datasources=[
122
+ SemanticSearchDatasource.from_dict(ds)
123
+ for ds in node_dict["datasources"]
124
+ ],
125
+ )
126
+ elif node_dict["type"] == "Code":
127
+ return CodeNode(
128
+ id=uuid.UUID(node_dict["id"]),
129
+ name=node_dict["name"],
130
+ inputs=[Handle.from_dict(handle) for handle in node_dict["inputs"]],
131
+ outputs=[Handle.from_dict(handle) for handle in node_dict["outputs"]],
132
+ inputs_mappings={
133
+ uuid.UUID(k): uuid.UUID(v)
134
+ for k, v in node_dict["inputsMappings"].items()
135
+ },
136
+ )
137
+ else:
138
+ raise ValueError(f"Node type {node_dict['type']} not supported")
@@ -17,6 +17,9 @@ def runnable_graph_to_template_vars(graph: dict) -> dict:
17
17
  node = node_from_dict(node_obj)
18
18
  handles_mapping = node.handles_mapping(output_handle_id_to_node_name)
19
19
  node_type = node.node_type()
20
+
21
+ unique_handles = set([handle_name for (handle_name, _) in handles_mapping])
22
+
20
23
  tasks.append(
21
24
  {
22
25
  "name": node.name,
@@ -28,10 +31,7 @@ def runnable_graph_to_template_vars(graph: dict) -> dict:
28
31
  handle_name for (handle_name, _) in handles_mapping
29
32
  ],
30
33
  "handle_args": ", ".join(
31
- [
32
- f"{handle_name}: NodeInput"
33
- for (handle_name, _) in handles_mapping
34
- ]
34
+ [f"{handle_name}: NodeInput" for handle_name in unique_handles]
35
35
  ),
36
36
  "prev": [],
37
37
  "next": [],
@@ -9,7 +9,8 @@ import queue
9
9
  from .task import Task
10
10
  from .action import NodeRunError, RunOutput
11
11
  from .state import State
12
- from lmnr_engine.types import Message, NodeInput
12
+ from lmnr.types import NodeInput
13
+ from lmnr_engine.types import Message
13
14
 
14
15
 
15
16
  logger = logging.getLogger(__name__)
@@ -169,13 +170,17 @@ class Engine:
169
170
  active_tasks.remove(task_id)
170
171
 
171
172
  if depth > 0:
172
- # propagate reset once we enter the loop
173
- # TODO: Implement this for cycles
174
- raise NotImplementedError()
173
+ self.propagate_reset(task_id, task_id, tasks)
175
174
 
175
+ # terminate graph on recursion depth exceeding 10
176
176
  if depth == 10:
177
- # TODO: Implement this for cycles
178
- raise NotImplementedError()
177
+ logging.error("Max recursion depth exceeded, terminating graph")
178
+ error = Message(
179
+ id=uuid.uuid4(),
180
+ value="Max recursion depth exceeded",
181
+ start_time=start_time,
182
+ end_time=datetime.datetime.now(),
183
+ )
179
184
 
180
185
  if not next:
181
186
  # if there are no next tasks, we can terminate the graph
@@ -259,3 +264,30 @@ class Engine:
259
264
  task,
260
265
  queue,
261
266
  )
267
+
268
+ def propagate_reset(
269
+ self, current_task_name: str, start_task_name: str, tasks: dict[str, Task]
270
+ ) -> None:
271
+ task = tasks[current_task_name]
272
+
273
+ for next_task_name in task.next:
274
+ if next_task_name == start_task_name:
275
+ return
276
+
277
+ next_task = tasks[next_task_name]
278
+
279
+ # in majority of cases there will be only one handle name
280
+ # however we need to handle the case when single output is mapped
281
+ # to multiple inputs on the next node
282
+ handle_names = []
283
+ for k, v in next_task.handles_mapping:
284
+ if v == task.name:
285
+ handle_names.append(k)
286
+
287
+ for handle_name in handle_names:
288
+ next_state = next_task.input_states[handle_name]
289
+
290
+ if next_state.get_state().is_success():
291
+ next_state.set_state(State.empty())
292
+ next_state.semaphore.release()
293
+ self.propagate_reset(next_task_name, start_task_name, tasks)