service-forge 0.1.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of service-forge might be problematic. Click here for more details.
- service_forge/api/deprecated_websocket_api.py +86 -0
- service_forge/api/deprecated_websocket_manager.py +425 -0
- service_forge/api/http_api.py +152 -0
- service_forge/api/http_api_doc.py +455 -0
- service_forge/api/kafka_api.py +126 -0
- service_forge/api/routers/feedback/feedback_router.py +148 -0
- service_forge/api/routers/service/service_router.py +127 -0
- service_forge/api/routers/websocket/websocket_manager.py +83 -0
- service_forge/api/routers/websocket/websocket_router.py +78 -0
- service_forge/api/task_manager.py +141 -0
- service_forge/current_service.py +14 -0
- service_forge/db/__init__.py +1 -0
- service_forge/db/database.py +237 -0
- service_forge/db/migrations/feedback_migration.py +154 -0
- service_forge/db/models/__init__.py +0 -0
- service_forge/db/models/feedback.py +33 -0
- service_forge/llm/__init__.py +67 -0
- service_forge/llm/llm.py +56 -0
- service_forge/model/__init__.py +0 -0
- service_forge/model/feedback.py +30 -0
- service_forge/model/websocket.py +13 -0
- service_forge/proto/foo_input.py +5 -0
- service_forge/service.py +280 -0
- service_forge/service_config.py +44 -0
- service_forge/sft/cli.py +91 -0
- service_forge/sft/cmd/config_command.py +67 -0
- service_forge/sft/cmd/deploy_service.py +123 -0
- service_forge/sft/cmd/list_tars.py +41 -0
- service_forge/sft/cmd/service_command.py +149 -0
- service_forge/sft/cmd/upload_service.py +36 -0
- service_forge/sft/config/injector.py +129 -0
- service_forge/sft/config/injector_default_files.py +131 -0
- service_forge/sft/config/sf_metadata.py +30 -0
- service_forge/sft/config/sft_config.py +200 -0
- service_forge/sft/file/__init__.py +0 -0
- service_forge/sft/file/ignore_pattern.py +80 -0
- service_forge/sft/file/sft_file_manager.py +107 -0
- service_forge/sft/kubernetes/kubernetes_manager.py +257 -0
- service_forge/sft/util/assert_util.py +25 -0
- service_forge/sft/util/logger.py +16 -0
- service_forge/sft/util/name_util.py +8 -0
- service_forge/sft/util/yaml_utils.py +57 -0
- service_forge/storage/__init__.py +5 -0
- service_forge/storage/feedback_storage.py +245 -0
- service_forge/utils/__init__.py +0 -0
- service_forge/utils/default_type_converter.py +12 -0
- service_forge/utils/register.py +39 -0
- service_forge/utils/type_converter.py +99 -0
- service_forge/utils/workflow_clone.py +124 -0
- service_forge/workflow/__init__.py +1 -0
- service_forge/workflow/context.py +14 -0
- service_forge/workflow/edge.py +24 -0
- service_forge/workflow/node.py +184 -0
- service_forge/workflow/nodes/__init__.py +8 -0
- service_forge/workflow/nodes/control/if_node.py +29 -0
- service_forge/workflow/nodes/control/switch_node.py +28 -0
- service_forge/workflow/nodes/input/console_input_node.py +26 -0
- service_forge/workflow/nodes/llm/query_llm_node.py +41 -0
- service_forge/workflow/nodes/nested/workflow_node.py +28 -0
- service_forge/workflow/nodes/output/kafka_output_node.py +27 -0
- service_forge/workflow/nodes/output/print_node.py +29 -0
- service_forge/workflow/nodes/test/if_console_input_node.py +33 -0
- service_forge/workflow/nodes/test/time_consuming_node.py +62 -0
- service_forge/workflow/port.py +89 -0
- service_forge/workflow/trigger.py +28 -0
- service_forge/workflow/triggers/__init__.py +6 -0
- service_forge/workflow/triggers/a2a_api_trigger.py +257 -0
- service_forge/workflow/triggers/fast_api_trigger.py +201 -0
- service_forge/workflow/triggers/kafka_api_trigger.py +47 -0
- service_forge/workflow/triggers/once_trigger.py +23 -0
- service_forge/workflow/triggers/period_trigger.py +29 -0
- service_forge/workflow/triggers/websocket_api_trigger.py +189 -0
- service_forge/workflow/workflow.py +227 -0
- service_forge/workflow/workflow_callback.py +141 -0
- service_forge/workflow/workflow_config.py +66 -0
- service_forge/workflow/workflow_event.py +15 -0
- service_forge/workflow/workflow_factory.py +246 -0
- service_forge/workflow/workflow_group.py +51 -0
- service_forge/workflow/workflow_type.py +52 -0
- service_forge-0.1.18.dist-info/METADATA +98 -0
- service_forge-0.1.18.dist-info/RECORD +83 -0
- service_forge-0.1.18.dist-info/WHEEL +4 -0
- service_forge-0.1.18.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any, AsyncIterator, Union, TYPE_CHECKING, Callable, Awaitable
|
|
4
|
+
from abc import ABC, abstractmethod
|
|
5
|
+
import uuid
|
|
6
|
+
from loguru import logger
|
|
7
|
+
from .edge import Edge
|
|
8
|
+
from .port import Port
|
|
9
|
+
from .context import Context
|
|
10
|
+
from ..utils.register import Register
|
|
11
|
+
from ..db.database import DatabaseManager, PostgresDatabase, MongoDatabase, RedisDatabase
|
|
12
|
+
from ..utils.workflow_clone import node_clone
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from .workflow import Workflow
|
|
16
|
+
|
|
17
|
+
class Node(ABC):
|
|
18
|
+
DEFAULT_INPUT_PORTS: list[Port] = []
|
|
19
|
+
DEFAULT_OUTPUT_PORTS: list[Port] = []
|
|
20
|
+
|
|
21
|
+
CLASS_NOT_REQUIRED_TO_REGISTER = ['Node']
|
|
22
|
+
AUTO_FILL_INPUT_PORTS = []
|
|
23
|
+
|
|
24
|
+
def __init__(
|
|
25
|
+
self,
|
|
26
|
+
name: str,
|
|
27
|
+
context: Context = None,
|
|
28
|
+
input_edges: list[Edge] = None,
|
|
29
|
+
output_edges: list[Edge] = None,
|
|
30
|
+
input_ports: list[Port] = DEFAULT_INPUT_PORTS,
|
|
31
|
+
output_ports: list[Port] = DEFAULT_OUTPUT_PORTS,
|
|
32
|
+
query_user: Callable[[str, str], Awaitable[str]] = None,
|
|
33
|
+
) -> None:
|
|
34
|
+
from .workflow_group import WorkflowGroup
|
|
35
|
+
self.name = name
|
|
36
|
+
self.input_edges = [] if input_edges is None else input_edges
|
|
37
|
+
self.output_edges = [] if output_edges is None else output_edges
|
|
38
|
+
self.input_ports = input_ports
|
|
39
|
+
self.output_ports = output_ports
|
|
40
|
+
self.workflow: Workflow = None
|
|
41
|
+
self.query_user = query_user
|
|
42
|
+
self.sub_workflows: WorkflowGroup = None
|
|
43
|
+
|
|
44
|
+
# runtime variables
|
|
45
|
+
self.context = context
|
|
46
|
+
self.input_variables: dict[Port, Any] = {}
|
|
47
|
+
self.num_activated_input_edges = 0
|
|
48
|
+
|
|
49
|
+
@property
|
|
50
|
+
def default_postgres_database(self) -> PostgresDatabase | None:
|
|
51
|
+
return self.database_manager.get_default_postgres_database()
|
|
52
|
+
|
|
53
|
+
@property
|
|
54
|
+
def default_mongo_database(self) -> MongoDatabase | None:
|
|
55
|
+
return self.database_manager.get_default_mongo_database()
|
|
56
|
+
|
|
57
|
+
@property
|
|
58
|
+
def default_redis_database(self) -> RedisDatabase | None:
|
|
59
|
+
return self.database_manager.get_default_redis_database()
|
|
60
|
+
|
|
61
|
+
@property
|
|
62
|
+
def database_manager(self) -> DatabaseManager:
|
|
63
|
+
return self.workflow.database_manager
|
|
64
|
+
|
|
65
|
+
def backup(self) -> None:
|
|
66
|
+
# do NOT use deepcopy here
|
|
67
|
+
# self.bak_context = deepcopy(self.context)
|
|
68
|
+
# TODO: what if the value changes after backup?
|
|
69
|
+
self.bak_input_variables = {port: value for port, value in self.input_variables.items()}
|
|
70
|
+
self.bak_num_activated_input_edges = self.num_activated_input_edges
|
|
71
|
+
|
|
72
|
+
def reset(self) -> None:
|
|
73
|
+
# self.context = deepcopy(self.bak_context)
|
|
74
|
+
self.input_variables = {port: value for port, value in self.bak_input_variables.items()}
|
|
75
|
+
self.num_activated_input_edges = self.bak_num_activated_input_edges
|
|
76
|
+
|
|
77
|
+
def __init_subclass__(cls) -> None:
|
|
78
|
+
if cls.__name__ not in Node.CLASS_NOT_REQUIRED_TO_REGISTER:
|
|
79
|
+
node_register.register(cls.__name__, cls)
|
|
80
|
+
return super().__init_subclass__()
|
|
81
|
+
|
|
82
|
+
def _query_user(self, prompt: str) -> Callable[[str, str], Awaitable[str]]:
|
|
83
|
+
return self.query_user(self.name, prompt)
|
|
84
|
+
|
|
85
|
+
def variables_to_params(self) -> dict[str, Any]:
|
|
86
|
+
params = {port.name: self.input_variables[port] for port in self.input_variables.keys() if not port.is_extended_generated}
|
|
87
|
+
for port in self.input_variables.keys():
|
|
88
|
+
if port.is_extended_generated:
|
|
89
|
+
if port.get_extended_name() not in params:
|
|
90
|
+
params[port.get_extended_name()] = []
|
|
91
|
+
params[port.get_extended_name()].append((port.get_extended_index(), self.input_variables[port]))
|
|
92
|
+
params[port.get_extended_name()].sort()
|
|
93
|
+
return params
|
|
94
|
+
|
|
95
|
+
def is_trigger(self) -> bool:
|
|
96
|
+
from .trigger import Trigger
|
|
97
|
+
return isinstance(self, Trigger)
|
|
98
|
+
|
|
99
|
+
# TODO: maybe add a function before the run function?
|
|
100
|
+
|
|
101
|
+
@abstractmethod
|
|
102
|
+
async def _run(self, **kwargs) -> Union[None, AsyncIterator]:
|
|
103
|
+
...
|
|
104
|
+
|
|
105
|
+
def run(self) -> Union[None, AsyncIterator]:
|
|
106
|
+
for key in list(self.input_variables.keys()):
|
|
107
|
+
if key and key.name[0].isupper():
|
|
108
|
+
del self.input_variables[key]
|
|
109
|
+
params = self.variables_to_params()
|
|
110
|
+
return self._run(**params)
|
|
111
|
+
|
|
112
|
+
def get_input_port_by_name(self, name: str) -> Port:
|
|
113
|
+
# TODO: add warning if port is extended
|
|
114
|
+
for port in self.input_ports:
|
|
115
|
+
if port.name == name:
|
|
116
|
+
return port
|
|
117
|
+
return None
|
|
118
|
+
|
|
119
|
+
def get_output_port_by_name(self, name: str) -> Port:
|
|
120
|
+
# TODO: add warning if port is extended
|
|
121
|
+
for port in self.output_ports:
|
|
122
|
+
if port.name == name:
|
|
123
|
+
return port
|
|
124
|
+
return None
|
|
125
|
+
|
|
126
|
+
def try_create_extended_input_port(self, name: str) -> None:
|
|
127
|
+
for port in self.input_ports:
|
|
128
|
+
if port.is_extended and name.startswith(port.name + '_') and name[len(port.name + '_'):].isdigit():
|
|
129
|
+
self.input_ports.append(Port(name=name, type=port.type, node=port.node, port=port.port, value=port.value, default=port.default, is_extended=False, is_extended_generated=True))
|
|
130
|
+
|
|
131
|
+
def try_create_extended_output_port(self, name: str) -> None:
|
|
132
|
+
for port in self.output_ports:
|
|
133
|
+
if port.is_extended and name.startswith(port.name + '_') and name[len(port.name + '_'):].isdigit():
|
|
134
|
+
self.output_ports.append(Port(name=name, type=port.type, node=port.node, port=port.port, value=port.value, default=port.default, is_extended=False, is_extended_generated=True))
|
|
135
|
+
|
|
136
|
+
def num_input_ports(self) -> int:
|
|
137
|
+
return sum(1 for port in self.input_ports if not port.is_extended)
|
|
138
|
+
|
|
139
|
+
def is_ready(self) -> bool:
|
|
140
|
+
return self.num_activated_input_edges == self.num_input_ports()
|
|
141
|
+
|
|
142
|
+
def fill_input_by_name(self, port_name: str, value: Any) -> None:
|
|
143
|
+
self.try_create_extended_input_port(port_name)
|
|
144
|
+
port = self.get_input_port_by_name(port_name)
|
|
145
|
+
if port is None:
|
|
146
|
+
raise ValueError(f'{port_name} is not a valid input port.')
|
|
147
|
+
self.fill_input(port, value)
|
|
148
|
+
|
|
149
|
+
def fill_input(self, port: Port, value: Any) -> None:
|
|
150
|
+
port.activate(value)
|
|
151
|
+
|
|
152
|
+
def activate_output_edges(self, port: str | Port, data: Any) -> None:
|
|
153
|
+
if isinstance(port, str):
|
|
154
|
+
port = self.get_output_port_by_name(port)
|
|
155
|
+
for output_edge in self.output_edges:
|
|
156
|
+
if output_edge.start_port == port:
|
|
157
|
+
output_edge.end_port.activate(data)
|
|
158
|
+
|
|
159
|
+
# for trigger nodes
|
|
160
|
+
def prepare_output_edges(self, port: Port, data: Any) -> None:
|
|
161
|
+
if isinstance(port, str):
|
|
162
|
+
port = self.get_output_port_by_name(port)
|
|
163
|
+
for output_edge in self.output_edges:
|
|
164
|
+
if output_edge.start_port == port:
|
|
165
|
+
output_edge.end_port.prepare(data)
|
|
166
|
+
|
|
167
|
+
def trigger_output_edges(self, port: Port) -> None:
|
|
168
|
+
if isinstance(port, str):
|
|
169
|
+
port = self.get_output_port_by_name(port)
|
|
170
|
+
for output_edge in self.output_edges:
|
|
171
|
+
if output_edge.start_port == port:
|
|
172
|
+
output_edge.end_port.trigger()
|
|
173
|
+
|
|
174
|
+
# TODO: the result is outputed to the trigger now, maybe we should add a new function to output the result to the workflow
|
|
175
|
+
def output_to_workflow(self, data: Any) -> None:
|
|
176
|
+
self.workflow._handle_workflow_output(self.name, data)
|
|
177
|
+
|
|
178
|
+
def extended_output_name(self, name: str, index: int) -> str:
|
|
179
|
+
return name + '_' + str(index)
|
|
180
|
+
|
|
181
|
+
def _clone(self, context: Context) -> Node:
|
|
182
|
+
return node_clone(self, context)
|
|
183
|
+
|
|
184
|
+
node_register = Register[Node]()
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
from .output.print_node import PrintNode
|
|
2
|
+
from .output.kafka_output_node import KafkaOutputNode
|
|
3
|
+
from .control.if_node import IfNode
|
|
4
|
+
from .control.switch_node import SwitchNode
|
|
5
|
+
from .llm.query_llm_node import QueryLLMNode
|
|
6
|
+
from .test.if_console_input_node import IfConsoleInputNode
|
|
7
|
+
from .nested.workflow_node import WorkflowNode
|
|
8
|
+
from .test.time_consuming_node import TimeConsumingNode
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from ...node import Node
|
|
3
|
+
from ...port import Port
|
|
4
|
+
|
|
5
|
+
class IfNode(Node):
|
|
6
|
+
DEFAULT_INPUT_PORTS = [
|
|
7
|
+
Port("TRIGGER", bool),
|
|
8
|
+
Port("condition", str),
|
|
9
|
+
]
|
|
10
|
+
|
|
11
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
12
|
+
Port("true", bool),
|
|
13
|
+
Port("false", bool),
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
name: str,
|
|
19
|
+
) -> None:
|
|
20
|
+
super().__init__(
|
|
21
|
+
name,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
async def _run(self, condition: str) -> None:
|
|
25
|
+
result = eval(condition)
|
|
26
|
+
if result:
|
|
27
|
+
self.activate_output_edges('true', True)
|
|
28
|
+
else:
|
|
29
|
+
self.activate_output_edges('false', False)
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from typing import Any
|
|
3
|
+
from ...node import Node
|
|
4
|
+
from ...port import Port
|
|
5
|
+
|
|
6
|
+
class SwitchNode(Node):
|
|
7
|
+
DEFAULT_INPUT_PORTS = [
|
|
8
|
+
Port("TRIGGER", bool),
|
|
9
|
+
Port("condition", str, is_extended=True),
|
|
10
|
+
]
|
|
11
|
+
|
|
12
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
13
|
+
Port("result", Any, is_extended=True),
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
name: str,
|
|
19
|
+
) -> None:
|
|
20
|
+
super().__init__(
|
|
21
|
+
name,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
async def _run(self, condition: list[tuple[int, str]]) -> None:
|
|
25
|
+
for index, cond in condition:
|
|
26
|
+
if eval(cond):
|
|
27
|
+
self.activate_output_edges(self.extended_output_name('result', index), str(index))
|
|
28
|
+
break
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from service_forge.workflow.node import Node
|
|
3
|
+
from service_forge.workflow.port import Port
|
|
4
|
+
from service_forge.llm import chat_stream, Model
|
|
5
|
+
|
|
6
|
+
class ConsoleInputNode(Node):
|
|
7
|
+
DEFAULT_INPUT_PORTS = [
|
|
8
|
+
Port("TRIGGER", bool),
|
|
9
|
+
Port("prompt", str),
|
|
10
|
+
]
|
|
11
|
+
|
|
12
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
13
|
+
Port("user_input", str),
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
AUTO_FILL_INPUT_PORTS = []
|
|
17
|
+
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
name: str,
|
|
21
|
+
) -> None:
|
|
22
|
+
super().__init__(name)
|
|
23
|
+
|
|
24
|
+
async def _run(self, prompt: str) -> None:
|
|
25
|
+
user_input = self._query_user(prompt)
|
|
26
|
+
self.activate_output_edges(self.get_output_port_by_name('user_input'), user_input)
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
from service_forge.workflow.node import Node
|
|
5
|
+
from service_forge.workflow.port import Port
|
|
6
|
+
from service_forge.llm import chat_stream, Model
|
|
7
|
+
|
|
8
|
+
class QueryLLMNode(Node):
|
|
9
|
+
DEFAULT_INPUT_PORTS = [
|
|
10
|
+
Port("prompt", str),
|
|
11
|
+
Port("system_prompt", str),
|
|
12
|
+
Port("temperature", float),
|
|
13
|
+
Port("TRIGGER", bool),
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
17
|
+
Port("response", str),
|
|
18
|
+
]
|
|
19
|
+
|
|
20
|
+
AUTO_FILL_INPUT_PORTS = [('TRIGGER', True)]
|
|
21
|
+
|
|
22
|
+
def __init__(
|
|
23
|
+
self,
|
|
24
|
+
name: str,
|
|
25
|
+
) -> None:
|
|
26
|
+
super().__init__(
|
|
27
|
+
name,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
async def _run(self, prompt: str, system_prompt: str, temperature: float) -> None:
|
|
31
|
+
if os.path.exists(system_prompt):
|
|
32
|
+
with open(system_prompt, "r") as f:
|
|
33
|
+
system_prompt = f.read()
|
|
34
|
+
if os.path.exists(prompt):
|
|
35
|
+
with open(prompt, "r") as f:
|
|
36
|
+
prompt = f.read()
|
|
37
|
+
|
|
38
|
+
print(f"prompt: {prompt} temperature: {temperature}")
|
|
39
|
+
response = chat_stream(prompt, system_prompt, Model.GEMINI, temperature)
|
|
40
|
+
for chunk in response:
|
|
41
|
+
yield chunk
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
from service_forge.workflow.node import Node
|
|
5
|
+
from service_forge.workflow.port import Port
|
|
6
|
+
|
|
7
|
+
class WorkflowNode(Node):
|
|
8
|
+
from service_forge.workflow.workflow import Workflow
|
|
9
|
+
DEFAULT_INPUT_PORTS = [
|
|
10
|
+
Port("workflow", Workflow),
|
|
11
|
+
]
|
|
12
|
+
|
|
13
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
name: str,
|
|
19
|
+
) -> None:
|
|
20
|
+
super().__init__(
|
|
21
|
+
name,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
async def _run(self, workflow: Workflow, **kwargs) -> None:
|
|
25
|
+
for input_port in self.input_ports:
|
|
26
|
+
if input_port.is_sub_workflow_input_port():
|
|
27
|
+
input_port.port.node.fill_input(input_port.port, input_port.value)
|
|
28
|
+
await workflow.run()
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from service_forge.workflow.node import Node
|
|
3
|
+
from service_forge.workflow.port import Port
|
|
4
|
+
from service_forge.api.kafka_api import KafkaApp
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
class KafkaOutputNode(Node):
|
|
8
|
+
DEFAULT_INPUT_PORTS = [
|
|
9
|
+
Port("app", KafkaApp),
|
|
10
|
+
Port("topic", str),
|
|
11
|
+
Port("data_type", type),
|
|
12
|
+
Port("data", Any),
|
|
13
|
+
]
|
|
14
|
+
|
|
15
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
16
|
+
]
|
|
17
|
+
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
name: str,
|
|
21
|
+
) -> None:
|
|
22
|
+
super().__init__(
|
|
23
|
+
name,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
async def _run(self, app: KafkaApp, topic: str, data_type: type, data: Any) -> None:
|
|
27
|
+
await app.send_message(topic, data_type, data)
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import asyncio
|
|
3
|
+
from typing import AsyncIterator
|
|
4
|
+
from service_forge.workflow.node import Node
|
|
5
|
+
from service_forge.workflow.port import Port
|
|
6
|
+
|
|
7
|
+
class PrintNode(Node):
|
|
8
|
+
DEFAULT_INPUT_PORTS = [
|
|
9
|
+
Port("TRIGGER", bool),
|
|
10
|
+
Port("message", str),
|
|
11
|
+
]
|
|
12
|
+
|
|
13
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
AUTO_FILL_INPUT_PORTS = [('TRIGGER', True)]
|
|
17
|
+
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
name: str,
|
|
21
|
+
) -> None:
|
|
22
|
+
super().__init__(
|
|
23
|
+
name,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
async def _run(self, message: str) -> AsyncIterator[str]:
|
|
27
|
+
for char in str(message):
|
|
28
|
+
await asyncio.sleep(0.1)
|
|
29
|
+
yield char
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from typing import AsyncIterator
|
|
3
|
+
from service_forge.workflow.node import Node
|
|
4
|
+
from service_forge.workflow.port import Port
|
|
5
|
+
|
|
6
|
+
class IfConsoleInputNode(Node):
|
|
7
|
+
DEFAULT_INPUT_PORTS = [
|
|
8
|
+
Port("TRIGGER", bool),
|
|
9
|
+
Port("condition", str),
|
|
10
|
+
]
|
|
11
|
+
|
|
12
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
13
|
+
Port("true", bool),
|
|
14
|
+
Port("false", bool),
|
|
15
|
+
]
|
|
16
|
+
|
|
17
|
+
def __init__(
|
|
18
|
+
self,
|
|
19
|
+
name: str,
|
|
20
|
+
) -> None:
|
|
21
|
+
super().__init__(
|
|
22
|
+
name,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
async def _run(self, condition: str) -> None:
|
|
26
|
+
while True:
|
|
27
|
+
user_input = await self._query_user(condition)
|
|
28
|
+
if user_input.lower() in ['y', 'yes']:
|
|
29
|
+
self.activate_output_edges(self.get_output_port_by_name('true'), True)
|
|
30
|
+
break
|
|
31
|
+
elif user_input.lower() in ['n', 'no']:
|
|
32
|
+
self.activate_output_edges(self.get_output_port_by_name('false'), False)
|
|
33
|
+
break
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
import asyncio
|
|
4
|
+
import uuid
|
|
5
|
+
import json
|
|
6
|
+
from typing import Any
|
|
7
|
+
from ...node import Node
|
|
8
|
+
from ...port import Port
|
|
9
|
+
from ....api.routers.websocket.websocket_manager import websocket_manager
|
|
10
|
+
|
|
11
|
+
# It's deprecated, just for testing
|
|
12
|
+
class TimeConsumingNode(Node):
|
|
13
|
+
"""模拟耗时节点,定期发送进度更新"""
|
|
14
|
+
|
|
15
|
+
DEFAULT_INPUT_PORTS = [
|
|
16
|
+
Port("input", Any)
|
|
17
|
+
]
|
|
18
|
+
|
|
19
|
+
DEFAULT_OUTPUT_PORTS = [
|
|
20
|
+
Port("output", Any)
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
def __init__(self, name: str, duration: float = 2.0):
|
|
24
|
+
super().__init__(name)
|
|
25
|
+
self.duration = duration # 总耗时(秒)
|
|
26
|
+
self.progress = 0.0
|
|
27
|
+
self.task_id = None
|
|
28
|
+
|
|
29
|
+
async def _run(self, input: Any = None, task_id: uuid.UUID = None) -> str:
|
|
30
|
+
"""执行耗时任务,定期更新进度"""
|
|
31
|
+
# 保存任务ID(如果有)
|
|
32
|
+
if task_id is not None:
|
|
33
|
+
self.task_id = task_id
|
|
34
|
+
|
|
35
|
+
total_steps = 10
|
|
36
|
+
result = f"Completed {self.name} after {self.duration} seconds"
|
|
37
|
+
|
|
38
|
+
# 分步骤执行,每步更新进度
|
|
39
|
+
for i in range(total_steps + 1):
|
|
40
|
+
# 更新进度
|
|
41
|
+
self.progress = i / total_steps
|
|
42
|
+
|
|
43
|
+
# 发送进度更新
|
|
44
|
+
if self.task_id:
|
|
45
|
+
await websocket_manager.send_progress(
|
|
46
|
+
self.task_id,
|
|
47
|
+
self.name,
|
|
48
|
+
self.progress
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
# 模拟耗时
|
|
52
|
+
if i < total_steps: # 最后一步不需要等待
|
|
53
|
+
await asyncio.sleep(self.duration / total_steps)
|
|
54
|
+
|
|
55
|
+
# 获取输出端口并设置值
|
|
56
|
+
output_port = self.get_output_port_by_name('output')
|
|
57
|
+
output_port.prepare(result)
|
|
58
|
+
|
|
59
|
+
# 激活输出端口
|
|
60
|
+
self.activate_output_edges(output_port, result)
|
|
61
|
+
|
|
62
|
+
return result
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from typing import TYPE_CHECKING, Any
|
|
3
|
+
from ..utils.workflow_clone import port_clone
|
|
4
|
+
|
|
5
|
+
if TYPE_CHECKING:
|
|
6
|
+
from .node import Node
|
|
7
|
+
|
|
8
|
+
class Port:
|
|
9
|
+
def __init__(
|
|
10
|
+
self,
|
|
11
|
+
name: str,
|
|
12
|
+
type: type,
|
|
13
|
+
node: Node = None,
|
|
14
|
+
port: Port = None,
|
|
15
|
+
value: Any = None,
|
|
16
|
+
default: Any = None,
|
|
17
|
+
is_extended: bool = False,
|
|
18
|
+
is_extended_generated: bool = False,
|
|
19
|
+
) -> None:
|
|
20
|
+
self.name = name
|
|
21
|
+
self.type = type
|
|
22
|
+
self.node = node
|
|
23
|
+
self.port = port
|
|
24
|
+
self.value = value
|
|
25
|
+
# not used yet
|
|
26
|
+
self.default = default
|
|
27
|
+
self.is_prepared = False
|
|
28
|
+
self.is_extended = is_extended
|
|
29
|
+
self.is_extended_generated = is_extended_generated
|
|
30
|
+
|
|
31
|
+
def is_sub_workflow_input_port(self) -> bool:
|
|
32
|
+
return self.port != None
|
|
33
|
+
|
|
34
|
+
def prepare(self, data: Any) -> None:
|
|
35
|
+
from ..utils.default_type_converter import type_converter
|
|
36
|
+
data = type_converter.convert(data, self.type, node=self.node)
|
|
37
|
+
self.value = data
|
|
38
|
+
self.is_prepared = True
|
|
39
|
+
|
|
40
|
+
def trigger(self) -> None:
|
|
41
|
+
if self.node is None:
|
|
42
|
+
return
|
|
43
|
+
if self in self.node.input_variables:
|
|
44
|
+
return
|
|
45
|
+
self.node.input_variables[self] = self.value
|
|
46
|
+
self.node.num_activated_input_edges += 1
|
|
47
|
+
if self.node.is_ready():
|
|
48
|
+
self.node.workflow.ready_nodes.append(self.node)
|
|
49
|
+
|
|
50
|
+
def activate(self, data: Any) -> None:
|
|
51
|
+
self.prepare(data)
|
|
52
|
+
self.trigger()
|
|
53
|
+
|
|
54
|
+
def get_extended_name(self) -> str:
|
|
55
|
+
if self.is_extended_generated:
|
|
56
|
+
return '_'.join(self.name.split('_')[:-1])
|
|
57
|
+
raise ValueError(f"Port {self.name} is not extended generated.")
|
|
58
|
+
|
|
59
|
+
def get_extended_index(self) -> int:
|
|
60
|
+
if self.is_extended_generated:
|
|
61
|
+
return int(self.name.split('_')[-1])
|
|
62
|
+
raise ValueError(f"Port {self.name} is not extended generated.")
|
|
63
|
+
|
|
64
|
+
def _clone(self, node_map: dict[Node, Node]) -> Port:
|
|
65
|
+
return port_clone(self, node_map)
|
|
66
|
+
|
|
67
|
+
# node port
|
|
68
|
+
def create_port(name: str, type: type, node: Node = None, value: Any = None, port: Port = None) -> Port:
|
|
69
|
+
return Port(name, type, node, port, value)
|
|
70
|
+
|
|
71
|
+
# workflow input port
|
|
72
|
+
def create_workflow_input_port(name: str, port: Port, value: Any = None) -> Port:
|
|
73
|
+
if value is None:
|
|
74
|
+
value = port.value
|
|
75
|
+
return Port(name, port.type, port.node, port, value)
|
|
76
|
+
|
|
77
|
+
# sub workflow input port
|
|
78
|
+
# node is the node that the sub workflow is running on
|
|
79
|
+
def create_sub_workflow_input_port(name: str, node: Node, port: Port, value: Any = None) -> Port:
|
|
80
|
+
if value is None:
|
|
81
|
+
value = port.value
|
|
82
|
+
return Port(name, port.type, node, port, value)
|
|
83
|
+
|
|
84
|
+
PORT_DELIMITER = '|'
|
|
85
|
+
|
|
86
|
+
def parse_port_name(port_name: str) -> tuple[str, str]:
|
|
87
|
+
if PORT_DELIMITER not in port_name or len(port_name.split(PORT_DELIMITER)) != 2:
|
|
88
|
+
raise ValueError(f"Invalid port name: {port_name}")
|
|
89
|
+
return port_name.split(PORT_DELIMITER)
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import asyncio
|
|
3
|
+
from typing import AsyncIterator
|
|
4
|
+
from abc import ABC, abstractmethod
|
|
5
|
+
import uuid
|
|
6
|
+
from .node import Node
|
|
7
|
+
from .workflow_event import WorkflowResult
|
|
8
|
+
|
|
9
|
+
class Trigger(Node, ABC):
|
|
10
|
+
def __init__(self, name: str):
|
|
11
|
+
super().__init__(name)
|
|
12
|
+
self.trigger_queue = asyncio.Queue()
|
|
13
|
+
# for workflow result
|
|
14
|
+
self.result_queues: dict[uuid.UUID, asyncio.Queue[WorkflowResult]] = {}
|
|
15
|
+
# for node stream output
|
|
16
|
+
self.stream_queues: dict[uuid.UUID, asyncio.Queue[WorkflowResult]] = {}
|
|
17
|
+
|
|
18
|
+
@abstractmethod
|
|
19
|
+
async def _run(self) -> AsyncIterator[bool]:
|
|
20
|
+
...
|
|
21
|
+
|
|
22
|
+
@abstractmethod
|
|
23
|
+
async def _stop(self) -> AsyncIterator[bool]:
|
|
24
|
+
...
|
|
25
|
+
|
|
26
|
+
def trigger(self, task_id: uuid.UUID) -> bool:
|
|
27
|
+
self.prepare_output_edges(self.get_output_port_by_name('trigger'), True)
|
|
28
|
+
return task_id
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
from .once_trigger import OnceTrigger
|
|
2
|
+
from .period_trigger import PeriodTrigger
|
|
3
|
+
from .fast_api_trigger import FastAPITrigger
|
|
4
|
+
from .kafka_api_trigger import KafkaAPITrigger
|
|
5
|
+
from .websocket_api_trigger import WebSocketAPITrigger
|
|
6
|
+
from .a2a_api_trigger import A2AAPITrigger
|