service-forge 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- service_forge/api/http_api.py +138 -0
- service_forge/api/kafka_api.py +126 -0
- service_forge/api/task_manager.py +141 -0
- service_forge/api/websocket_api.py +86 -0
- service_forge/api/websocket_manager.py +425 -0
- service_forge/db/__init__.py +1 -0
- service_forge/db/database.py +119 -0
- service_forge/llm/__init__.py +62 -0
- service_forge/llm/llm.py +56 -0
- service_forge/main.py +121 -0
- service_forge/model/__init__.py +0 -0
- service_forge/model/websocket.py +13 -0
- service_forge/proto/foo_input.py +5 -0
- service_forge/service.py +111 -0
- service_forge/service_config.py +115 -0
- service_forge/sft/cli.py +91 -0
- service_forge/sft/cmd/config_command.py +67 -0
- service_forge/sft/cmd/deploy_service.py +124 -0
- service_forge/sft/cmd/list_tars.py +41 -0
- service_forge/sft/cmd/service_command.py +149 -0
- service_forge/sft/cmd/upload_service.py +36 -0
- service_forge/sft/config/injector.py +87 -0
- service_forge/sft/config/injector_default_files.py +97 -0
- service_forge/sft/config/sf_metadata.py +30 -0
- service_forge/sft/config/sft_config.py +125 -0
- service_forge/sft/file/__init__.py +0 -0
- service_forge/sft/file/ignore_pattern.py +80 -0
- service_forge/sft/file/sft_file_manager.py +107 -0
- service_forge/sft/kubernetes/kubernetes_manager.py +257 -0
- service_forge/sft/util/assert_util.py +25 -0
- service_forge/sft/util/logger.py +16 -0
- service_forge/sft/util/name_util.py +2 -0
- service_forge/utils/__init__.py +0 -0
- service_forge/utils/default_type_converter.py +12 -0
- service_forge/utils/register.py +39 -0
- service_forge/utils/type_converter.py +74 -0
- service_forge/workflow/__init__.py +1 -0
- service_forge/workflow/context.py +13 -0
- service_forge/workflow/edge.py +31 -0
- service_forge/workflow/node.py +179 -0
- service_forge/workflow/nodes/__init__.py +7 -0
- service_forge/workflow/nodes/control/if_node.py +29 -0
- service_forge/workflow/nodes/input/console_input_node.py +26 -0
- service_forge/workflow/nodes/llm/query_llm_node.py +41 -0
- service_forge/workflow/nodes/nested/workflow_node.py +28 -0
- service_forge/workflow/nodes/output/kafka_output_node.py +27 -0
- service_forge/workflow/nodes/output/print_node.py +29 -0
- service_forge/workflow/nodes/test/if_console_input_node.py +33 -0
- service_forge/workflow/nodes/test/time_consuming_node.py +61 -0
- service_forge/workflow/port.py +86 -0
- service_forge/workflow/trigger.py +20 -0
- service_forge/workflow/triggers/__init__.py +4 -0
- service_forge/workflow/triggers/fast_api_trigger.py +125 -0
- service_forge/workflow/triggers/kafka_api_trigger.py +44 -0
- service_forge/workflow/triggers/once_trigger.py +20 -0
- service_forge/workflow/triggers/period_trigger.py +26 -0
- service_forge/workflow/workflow.py +251 -0
- service_forge/workflow/workflow_factory.py +227 -0
- service_forge/workflow/workflow_group.py +23 -0
- service_forge/workflow/workflow_type.py +52 -0
- service_forge-0.1.0.dist-info/METADATA +93 -0
- service_forge-0.1.0.dist-info/RECORD +64 -0
- service_forge-0.1.0.dist-info/WHEEL +4 -0
- service_forge-0.1.0.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any, AsyncIterator, Union, TYPE_CHECKING, Callable, Awaitable
|
|
4
|
+
from abc import ABC, abstractmethod
|
|
5
|
+
import uuid
|
|
6
|
+
from loguru import logger
|
|
7
|
+
from copy import deepcopy
|
|
8
|
+
from .edge import Edge
|
|
9
|
+
from .port import Port
|
|
10
|
+
from .context import Context
|
|
11
|
+
from ..utils.register import Register
|
|
12
|
+
from ..db.database import Database, DatabaseManager
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from .workflow import Workflow
|
|
16
|
+
|
|
17
|
+
class Node(ABC):
|
|
18
|
+
DEFAULT_INPUT_PORTS: list[Port] = []
|
|
19
|
+
DEFAULT_OUTPUT_PORTS: list[Port] = []
|
|
20
|
+
|
|
21
|
+
CLASS_NOT_REQUIRED_TO_REGISTER = ['Node']
|
|
22
|
+
AUTO_FILL_INPUT_PORTS = []
|
|
23
|
+
|
|
24
|
+
def __init__(
|
|
25
|
+
self,
|
|
26
|
+
name: str,
|
|
27
|
+
context: Context = None,
|
|
28
|
+
input_edges: list[Edge] = None,
|
|
29
|
+
output_edges: list[Edge] = None,
|
|
30
|
+
input_ports: list[Port] = DEFAULT_INPUT_PORTS,
|
|
31
|
+
output_ports: list[Port] = DEFAULT_OUTPUT_PORTS,
|
|
32
|
+
query_user: Callable[[str, str], Awaitable[str]] = None,
|
|
33
|
+
) -> None:
|
|
34
|
+
from .workflow_group import WorkflowGroup
|
|
35
|
+
self.name = name
|
|
36
|
+
self.input_edges = [] if input_edges is None else input_edges
|
|
37
|
+
self.output_edges = [] if output_edges is None else output_edges
|
|
38
|
+
self.input_ports = input_ports
|
|
39
|
+
self.output_ports = output_ports
|
|
40
|
+
self.workflow: Workflow = None
|
|
41
|
+
self.query_user = query_user
|
|
42
|
+
self.sub_workflows: WorkflowGroup = None
|
|
43
|
+
|
|
44
|
+
# runtime variables
|
|
45
|
+
self.context = context
|
|
46
|
+
self.input_variables: dict[Port, Any] = {}
|
|
47
|
+
self.num_activated_input_edges = 0
|
|
48
|
+
|
|
49
|
+
@property
|
|
50
|
+
def default_database(self) -> Database | None:
|
|
51
|
+
return self.database_manager.get_default_database()
|
|
52
|
+
|
|
53
|
+
@property
|
|
54
|
+
def database_manager(self) -> DatabaseManager:
|
|
55
|
+
return self.workflow.database_manager
|
|
56
|
+
|
|
57
|
+
def backup(self) -> None:
|
|
58
|
+
# do NOT use deepcopy here
|
|
59
|
+
# self.bak_context = deepcopy(self.context)
|
|
60
|
+
# TODO: what if the value changes after backup?
|
|
61
|
+
self.bak_input_variables = {port: value for port, value in self.input_variables.items()}
|
|
62
|
+
self.bak_num_activated_input_edges = self.num_activated_input_edges
|
|
63
|
+
|
|
64
|
+
def reset(self) -> None:
|
|
65
|
+
# self.context = deepcopy(self.bak_context)
|
|
66
|
+
self.input_variables = {port: value for port, value in self.bak_input_variables.items()}
|
|
67
|
+
self.num_activated_input_edges = self.bak_num_activated_input_edges
|
|
68
|
+
|
|
69
|
+
def __init_subclass__(cls) -> None:
|
|
70
|
+
if cls.__name__ not in Node.CLASS_NOT_REQUIRED_TO_REGISTER:
|
|
71
|
+
node_register.register(cls.__name__, cls)
|
|
72
|
+
return super().__init_subclass__()
|
|
73
|
+
|
|
74
|
+
def _query_user(self, prompt: str) -> Callable[[str, str], Awaitable[str]]:
|
|
75
|
+
return self.query_user(self.name, prompt)
|
|
76
|
+
|
|
77
|
+
def variables_to_params(self) -> dict[str, Any]:
|
|
78
|
+
return {port.name: self.input_variables[port] for port in self.input_variables.keys()}
|
|
79
|
+
|
|
80
|
+
def is_trigger(self) -> bool:
|
|
81
|
+
from .trigger import Trigger
|
|
82
|
+
return isinstance(self, Trigger)
|
|
83
|
+
|
|
84
|
+
# TODO: maybe add a function before the run function?
|
|
85
|
+
|
|
86
|
+
@abstractmethod
|
|
87
|
+
def _run(self, **kwargs) -> Union[None, AsyncIterator]:
|
|
88
|
+
...
|
|
89
|
+
|
|
90
|
+
def run(self, task_id: uuid.UUID = None) -> Union[None, AsyncIterator]:
|
|
91
|
+
for key in list(self.input_variables.keys()):
|
|
92
|
+
if key and key.name[0].isupper():
|
|
93
|
+
del self.input_variables[key]
|
|
94
|
+
params = self.variables_to_params()
|
|
95
|
+
if task_id is not None and 'task_id' in self._run.__code__.co_varnames:
|
|
96
|
+
params['task_id'] = task_id
|
|
97
|
+
result = self._run(**params)
|
|
98
|
+
return result
|
|
99
|
+
|
|
100
|
+
def get_input_port_by_name(self, name: str) -> Port:
|
|
101
|
+
for port in self.input_ports:
|
|
102
|
+
if port.name == name:
|
|
103
|
+
return port
|
|
104
|
+
return None
|
|
105
|
+
|
|
106
|
+
def get_output_port_by_name(self, name: str) -> Port:
|
|
107
|
+
for port in self.output_ports:
|
|
108
|
+
if port.name == name:
|
|
109
|
+
return port
|
|
110
|
+
return None
|
|
111
|
+
|
|
112
|
+
def num_input_ports(self) -> int:
|
|
113
|
+
return len(self.input_ports)
|
|
114
|
+
|
|
115
|
+
def is_ready(self) -> bool:
|
|
116
|
+
return self.num_activated_input_edges == self.num_input_ports()
|
|
117
|
+
|
|
118
|
+
def get_input_port_by_name(self, name: str) -> Port:
|
|
119
|
+
for port in self.input_ports:
|
|
120
|
+
if port.name == name:
|
|
121
|
+
return port
|
|
122
|
+
return None
|
|
123
|
+
|
|
124
|
+
def get_output_port_by_name(self, name: str) -> Port:
|
|
125
|
+
for port in self.output_ports:
|
|
126
|
+
if port.name == name:
|
|
127
|
+
return port
|
|
128
|
+
return None
|
|
129
|
+
|
|
130
|
+
def fill_input_by_name(self, port_name: str, value: Any) -> None:
|
|
131
|
+
port = self.get_input_port_by_name(port_name)
|
|
132
|
+
if port is None:
|
|
133
|
+
raise ValueError(f'{port_name} is not a valid input port.')
|
|
134
|
+
self.fill_input(port, value)
|
|
135
|
+
|
|
136
|
+
def fill_input(self, port: Port, value: Any) -> None:
|
|
137
|
+
port.activate(value)
|
|
138
|
+
|
|
139
|
+
def activate_output_edges(self, port: Port, data: Any) -> None:
|
|
140
|
+
for output_edge in self.output_edges:
|
|
141
|
+
if output_edge.start_port == port:
|
|
142
|
+
output_edge.end_port.activate(data)
|
|
143
|
+
|
|
144
|
+
# for trigger nodes
|
|
145
|
+
def prepare_output_edges(self, port: Port, data: Any) -> None:
|
|
146
|
+
for output_edge in self.output_edges:
|
|
147
|
+
if output_edge.start_port == port:
|
|
148
|
+
output_edge.end_port.prepare(data)
|
|
149
|
+
|
|
150
|
+
def trigger_output_edges(self, port: Port) -> None:
|
|
151
|
+
for output_edge in self.output_edges:
|
|
152
|
+
if output_edge.start_port == port:
|
|
153
|
+
output_edge.end_port.trigger()
|
|
154
|
+
|
|
155
|
+
# TODO: the result is outputed to the trigger now, maybe we should add a new function to output the result to the workflow
|
|
156
|
+
def output_to_workflow(self, data: Any) -> None:
|
|
157
|
+
self.workflow._handle_workflow_output(self.name, data)
|
|
158
|
+
|
|
159
|
+
def _simple_clone(self) -> Node:
|
|
160
|
+
node = self.__class__(
|
|
161
|
+
name=self.name
|
|
162
|
+
)
|
|
163
|
+
node.context = self.context._clone()
|
|
164
|
+
node.input_edges = []
|
|
165
|
+
node.output_edges = []
|
|
166
|
+
node.input_ports = []
|
|
167
|
+
node.output_ports = []
|
|
168
|
+
node.query_user = self.query_user
|
|
169
|
+
node.workflow = None
|
|
170
|
+
|
|
171
|
+
if self.sub_workflows is not None:
|
|
172
|
+
raise ValueError("Sub workflows are not supported in simple clone.")
|
|
173
|
+
node.sub_workflows = None
|
|
174
|
+
node.input_variables = {}
|
|
175
|
+
node.num_activated_input_edges = self.num_activated_input_edges
|
|
176
|
+
|
|
177
|
+
return node
|
|
178
|
+
|
|
179
|
+
node_register = Register[Node]()
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
from .output.print_node import PrintNode
|
|
2
|
+
from .output.kafka_output_node import KafkaOutputNode
|
|
3
|
+
from .control.if_node import IfNode
|
|
4
|
+
from .llm.query_llm_node import QueryLLMNode
|
|
5
|
+
from .test.if_console_input_node import IfConsoleInputNode
|
|
6
|
+
from .nested.workflow_node import WorkflowNode
|
|
7
|
+
from .test.time_consuming_node import TimeConsumingNode
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from ...node import Node
|
|
3
|
+
from ...port import Port
|
|
4
|
+
|
|
5
|
+
class IfNode(Node):
|
|
6
|
+
DEFAULT_INPUT_PORTS = [
|
|
7
|
+
Port("TRIGGER", bool),
|
|
8
|
+
Port("condition", str),
|
|
9
|
+
]
|
|
10
|
+
|
|
11
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
12
|
+
Port("true", bool),
|
|
13
|
+
Port("false", bool),
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
name: str,
|
|
19
|
+
) -> None:
|
|
20
|
+
super().__init__(
|
|
21
|
+
name,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
async def _run(self, condition: str) -> None:
|
|
25
|
+
result = eval(condition)
|
|
26
|
+
if result:
|
|
27
|
+
self.activate_output_edges(self.get_output_port_by_name('true'), True)
|
|
28
|
+
else:
|
|
29
|
+
self.activate_output_edges(self.get_output_port_by_name('false'), False)
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from service_forge.workflow.node import Node
|
|
3
|
+
from service_forge.workflow.port import Port
|
|
4
|
+
from service_forge.llm import chat_stream, Model
|
|
5
|
+
|
|
6
|
+
class ConsoleInputNode(Node):
|
|
7
|
+
DEFAULT_INPUT_PORTS = [
|
|
8
|
+
Port("TRIGGER", bool),
|
|
9
|
+
Port("prompt", str),
|
|
10
|
+
]
|
|
11
|
+
|
|
12
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
13
|
+
Port("user_input", str),
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
AUTO_FILL_INPUT_PORTS = []
|
|
17
|
+
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
name: str,
|
|
21
|
+
) -> None:
|
|
22
|
+
super().__init__(name)
|
|
23
|
+
|
|
24
|
+
async def _run(self, prompt: str) -> None:
|
|
25
|
+
user_input = self._query_user(prompt)
|
|
26
|
+
self.activate_output_edges(self.get_output_port_by_name('user_input'), user_input)
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
from service_forge.workflow.node import Node
|
|
5
|
+
from service_forge.workflow.port import Port
|
|
6
|
+
from service_forge.llm import chat_stream, Model
|
|
7
|
+
|
|
8
|
+
class QueryLLMNode(Node):
|
|
9
|
+
DEFAULT_INPUT_PORTS = [
|
|
10
|
+
Port("prompt", str),
|
|
11
|
+
Port("system_prompt", str),
|
|
12
|
+
Port("temperature", float),
|
|
13
|
+
Port("TRIGGER", bool),
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
17
|
+
Port("response", str),
|
|
18
|
+
]
|
|
19
|
+
|
|
20
|
+
AUTO_FILL_INPUT_PORTS = [('TRIGGER', True)]
|
|
21
|
+
|
|
22
|
+
def __init__(
|
|
23
|
+
self,
|
|
24
|
+
name: str,
|
|
25
|
+
) -> None:
|
|
26
|
+
super().__init__(
|
|
27
|
+
name,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
async def _run(self, prompt: str, system_prompt: str, temperature: float) -> None:
|
|
31
|
+
if os.path.exists(system_prompt):
|
|
32
|
+
with open(system_prompt, "r") as f:
|
|
33
|
+
system_prompt = f.read()
|
|
34
|
+
if os.path.exists(prompt):
|
|
35
|
+
with open(prompt, "r") as f:
|
|
36
|
+
prompt = f.read()
|
|
37
|
+
|
|
38
|
+
print(f"prompt: {prompt} temperature: {temperature}")
|
|
39
|
+
response = chat_stream(prompt, system_prompt, Model.DEEPSEEK_V3_250324, temperature)
|
|
40
|
+
for chunk in response:
|
|
41
|
+
yield chunk
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
from service_forge.workflow.node import Node
|
|
5
|
+
from service_forge.workflow.port import Port
|
|
6
|
+
|
|
7
|
+
class WorkflowNode(Node):
|
|
8
|
+
from service_forge.workflow.workflow import Workflow
|
|
9
|
+
DEFAULT_INPUT_PORTS = [
|
|
10
|
+
Port("workflow", Workflow),
|
|
11
|
+
]
|
|
12
|
+
|
|
13
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
name: str,
|
|
19
|
+
) -> None:
|
|
20
|
+
super().__init__(
|
|
21
|
+
name,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
async def _run(self, workflow: Workflow, **kwargs) -> None:
|
|
25
|
+
for input_port in self.input_ports:
|
|
26
|
+
if input_port.is_sub_workflow_input_port():
|
|
27
|
+
input_port.port.node.fill_input(input_port.port, input_port.value)
|
|
28
|
+
await workflow.run()
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from service_forge.workflow.node import Node
|
|
3
|
+
from service_forge.workflow.port import Port
|
|
4
|
+
from service_forge.api.kafka_api import KafkaApp
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
class KafkaOutputNode(Node):
|
|
8
|
+
DEFAULT_INPUT_PORTS = [
|
|
9
|
+
Port("app", KafkaApp),
|
|
10
|
+
Port("topic", str),
|
|
11
|
+
Port("data_type", type),
|
|
12
|
+
Port("data", Any),
|
|
13
|
+
]
|
|
14
|
+
|
|
15
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
16
|
+
]
|
|
17
|
+
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
name: str,
|
|
21
|
+
) -> None:
|
|
22
|
+
super().__init__(
|
|
23
|
+
name,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
async def _run(self, app: KafkaApp, topic: str, data_type: type, data: Any) -> None:
|
|
27
|
+
await app.send_message(topic, data_type, data)
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import asyncio
|
|
3
|
+
from typing import AsyncIterator
|
|
4
|
+
from service_forge.workflow.node import Node
|
|
5
|
+
from service_forge.workflow.port import Port
|
|
6
|
+
|
|
7
|
+
class PrintNode(Node):
|
|
8
|
+
DEFAULT_INPUT_PORTS = [
|
|
9
|
+
Port("TRIGGER", bool),
|
|
10
|
+
Port("message", str),
|
|
11
|
+
]
|
|
12
|
+
|
|
13
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
AUTO_FILL_INPUT_PORTS = [('TRIGGER', True)]
|
|
17
|
+
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
name: str,
|
|
21
|
+
) -> None:
|
|
22
|
+
super().__init__(
|
|
23
|
+
name,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
async def _run(self, message: str) -> AsyncIterator[str]:
|
|
27
|
+
for char in str(message):
|
|
28
|
+
await asyncio.sleep(0.1)
|
|
29
|
+
yield char
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from typing import AsyncIterator
|
|
3
|
+
from service_forge.workflow.node import Node
|
|
4
|
+
from service_forge.workflow.port import Port
|
|
5
|
+
|
|
6
|
+
class IfConsoleInputNode(Node):
|
|
7
|
+
DEFAULT_INPUT_PORTS = [
|
|
8
|
+
Port("TRIGGER", bool),
|
|
9
|
+
Port("condition", str),
|
|
10
|
+
]
|
|
11
|
+
|
|
12
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
13
|
+
Port("true", bool),
|
|
14
|
+
Port("false", bool),
|
|
15
|
+
]
|
|
16
|
+
|
|
17
|
+
def __init__(
|
|
18
|
+
self,
|
|
19
|
+
name: str,
|
|
20
|
+
) -> None:
|
|
21
|
+
super().__init__(
|
|
22
|
+
name,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
async def _run(self, condition: str) -> None:
|
|
26
|
+
while True:
|
|
27
|
+
user_input = await self._query_user(condition)
|
|
28
|
+
if user_input.lower() in ['y', 'yes']:
|
|
29
|
+
self.activate_output_edges(self.get_output_port_by_name('true'), True)
|
|
30
|
+
break
|
|
31
|
+
elif user_input.lower() in ['n', 'no']:
|
|
32
|
+
self.activate_output_edges(self.get_output_port_by_name('false'), False)
|
|
33
|
+
break
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
import asyncio
|
|
4
|
+
import uuid
|
|
5
|
+
import json
|
|
6
|
+
from typing import Any
|
|
7
|
+
from ...node import Node
|
|
8
|
+
from ...port import Port
|
|
9
|
+
from ....api.websocket_manager import websocket_manager
|
|
10
|
+
|
|
11
|
+
class TimeConsumingNode(Node):
|
|
12
|
+
"""模拟耗时节点,定期发送进度更新"""
|
|
13
|
+
|
|
14
|
+
DEFAULT_INPUT_PORTS = [
|
|
15
|
+
Port("input", Any)
|
|
16
|
+
]
|
|
17
|
+
|
|
18
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
19
|
+
Port("output", Any)
|
|
20
|
+
]
|
|
21
|
+
|
|
22
|
+
def __init__(self, name: str, duration: float = 2.0):
|
|
23
|
+
super().__init__(name)
|
|
24
|
+
self.duration = duration # 总耗时(秒)
|
|
25
|
+
self.progress = 0.0
|
|
26
|
+
self.task_id = None
|
|
27
|
+
|
|
28
|
+
async def _run(self, input: Any = None, task_id: uuid.UUID = None) -> str:
|
|
29
|
+
"""执行耗时任务,定期更新进度"""
|
|
30
|
+
# 保存任务ID(如果有)
|
|
31
|
+
if task_id is not None:
|
|
32
|
+
self.task_id = task_id
|
|
33
|
+
|
|
34
|
+
total_steps = 10
|
|
35
|
+
result = f"Completed {self.name} after {self.duration} seconds"
|
|
36
|
+
|
|
37
|
+
# 分步骤执行,每步更新进度
|
|
38
|
+
for i in range(total_steps + 1):
|
|
39
|
+
# 更新进度
|
|
40
|
+
self.progress = i / total_steps
|
|
41
|
+
|
|
42
|
+
# 发送进度更新
|
|
43
|
+
if self.task_id:
|
|
44
|
+
await websocket_manager.send_progress(
|
|
45
|
+
self.task_id,
|
|
46
|
+
self.name,
|
|
47
|
+
self.progress
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
# 模拟耗时
|
|
51
|
+
if i < total_steps: # 最后一步不需要等待
|
|
52
|
+
await asyncio.sleep(self.duration / total_steps)
|
|
53
|
+
|
|
54
|
+
# 获取输出端口并设置值
|
|
55
|
+
output_port = self.get_output_port_by_name('output')
|
|
56
|
+
output_port.prepare(result)
|
|
57
|
+
|
|
58
|
+
# 激活输出端口
|
|
59
|
+
self.activate_output_edges(output_port, result)
|
|
60
|
+
|
|
61
|
+
return result
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from typing import TYPE_CHECKING, Any
|
|
3
|
+
|
|
4
|
+
if TYPE_CHECKING:
|
|
5
|
+
from .node import Node
|
|
6
|
+
|
|
7
|
+
class Port:
|
|
8
|
+
def __init__(
|
|
9
|
+
self,
|
|
10
|
+
name: str,
|
|
11
|
+
type: type,
|
|
12
|
+
node: Node = None,
|
|
13
|
+
port: Port = None,
|
|
14
|
+
value: Any = None,
|
|
15
|
+
default: Any = None,
|
|
16
|
+
) -> None:
|
|
17
|
+
self.name = name
|
|
18
|
+
self.type = type
|
|
19
|
+
self.node = node
|
|
20
|
+
self.port = port
|
|
21
|
+
self.value = value
|
|
22
|
+
# not used yet
|
|
23
|
+
self.default = default
|
|
24
|
+
self.is_prepared = False
|
|
25
|
+
|
|
26
|
+
def is_sub_workflow_input_port(self) -> bool:
|
|
27
|
+
return self.port != None
|
|
28
|
+
|
|
29
|
+
def prepare(self, data: Any) -> None:
|
|
30
|
+
from ..utils.default_type_converter import type_converter
|
|
31
|
+
data = type_converter.convert(data, self.type, node=self.node)
|
|
32
|
+
self.value = data
|
|
33
|
+
self.is_prepared = True
|
|
34
|
+
|
|
35
|
+
def trigger(self) -> None:
|
|
36
|
+
if self.node is None:
|
|
37
|
+
return
|
|
38
|
+
if self in self.node.input_variables:
|
|
39
|
+
return
|
|
40
|
+
self.node.input_variables[self] = self.value
|
|
41
|
+
self.node.num_activated_input_edges += 1
|
|
42
|
+
if self.node.is_ready():
|
|
43
|
+
self.node.workflow.ready_nodes.append(self.node)
|
|
44
|
+
|
|
45
|
+
def activate(self, data: Any) -> None:
|
|
46
|
+
self.prepare(data)
|
|
47
|
+
self.trigger()
|
|
48
|
+
|
|
49
|
+
def _simple_clone(self, node_map: dict[Node, Node]) -> Port:
|
|
50
|
+
# 检查节点是否为None,避免KeyError
|
|
51
|
+
node = node_map[self.node] if self.node is not None else None
|
|
52
|
+
port = Port(
|
|
53
|
+
name=self.name,
|
|
54
|
+
type=self.type,
|
|
55
|
+
node=node,
|
|
56
|
+
port=None,
|
|
57
|
+
value=self.value,
|
|
58
|
+
default=self.default,
|
|
59
|
+
)
|
|
60
|
+
port.is_prepared = self.is_prepared
|
|
61
|
+
return port
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
# node port
|
|
65
|
+
def create_port(name: str, type: type, node: Node = None, value: Any = None) -> Port:
|
|
66
|
+
return Port(name, type, node, None, value)
|
|
67
|
+
|
|
68
|
+
# workflow input port
|
|
69
|
+
def create_workflow_input_port(name: str, port: Port, value: Any = None) -> Port:
|
|
70
|
+
if value is None:
|
|
71
|
+
value = port.value
|
|
72
|
+
return Port(name, port.type, port.node, port, value)
|
|
73
|
+
|
|
74
|
+
# sub workflow input port
|
|
75
|
+
# node is the node that the sub workflow is running on
|
|
76
|
+
def create_sub_workflow_input_port(name: str, node: Node, port: Port, value: Any = None) -> Port:
|
|
77
|
+
if value is None:
|
|
78
|
+
value = port.value
|
|
79
|
+
return Port(name, port.type, node, port, value)
|
|
80
|
+
|
|
81
|
+
PORT_DELIMITER = '|'
|
|
82
|
+
|
|
83
|
+
def parse_port_name(port_name: str) -> tuple[str, str]:
|
|
84
|
+
if PORT_DELIMITER not in port_name or len(port_name.split(PORT_DELIMITER)) != 2:
|
|
85
|
+
raise ValueError(f"Invalid port name: {port_name}")
|
|
86
|
+
return port_name.split(PORT_DELIMITER)
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import asyncio
|
|
3
|
+
from typing import AsyncIterator
|
|
4
|
+
from abc import ABC, abstractmethod
|
|
5
|
+
import uuid
|
|
6
|
+
from .node import Node
|
|
7
|
+
|
|
8
|
+
class Trigger(Node, ABC):
|
|
9
|
+
def __init__(self, name: str):
|
|
10
|
+
super().__init__(name)
|
|
11
|
+
self.trigger_queue = asyncio.Queue()
|
|
12
|
+
self.result_queues = {}
|
|
13
|
+
|
|
14
|
+
@abstractmethod
|
|
15
|
+
async def _run(self) -> AsyncIterator[bool]:
|
|
16
|
+
...
|
|
17
|
+
|
|
18
|
+
def trigger(self, task_id: uuid.UUID) -> bool:
|
|
19
|
+
self.prepare_output_edges(self.get_output_port_by_name('trigger'), True)
|
|
20
|
+
return task_id
|