service-forge 0.1.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of service-forge might be problematic. Click here for more details.
- service_forge/api/deprecated_websocket_api.py +86 -0
- service_forge/api/deprecated_websocket_manager.py +425 -0
- service_forge/api/http_api.py +148 -0
- service_forge/api/http_api_doc.py +455 -0
- service_forge/api/kafka_api.py +126 -0
- service_forge/api/routers/service/__init__.py +4 -0
- service_forge/api/routers/service/service_router.py +137 -0
- service_forge/api/routers/websocket/websocket_manager.py +83 -0
- service_forge/api/routers/websocket/websocket_router.py +78 -0
- service_forge/api/task_manager.py +141 -0
- service_forge/db/__init__.py +1 -0
- service_forge/db/database.py +240 -0
- service_forge/llm/__init__.py +62 -0
- service_forge/llm/llm.py +56 -0
- service_forge/model/__init__.py +0 -0
- service_forge/model/websocket.py +13 -0
- service_forge/proto/foo_input.py +5 -0
- service_forge/service.py +288 -0
- service_forge/service_config.py +158 -0
- service_forge/sft/cli.py +91 -0
- service_forge/sft/cmd/config_command.py +67 -0
- service_forge/sft/cmd/deploy_service.py +123 -0
- service_forge/sft/cmd/list_tars.py +41 -0
- service_forge/sft/cmd/service_command.py +149 -0
- service_forge/sft/cmd/upload_service.py +36 -0
- service_forge/sft/config/injector.py +119 -0
- service_forge/sft/config/injector_default_files.py +131 -0
- service_forge/sft/config/sf_metadata.py +30 -0
- service_forge/sft/config/sft_config.py +153 -0
- service_forge/sft/file/__init__.py +0 -0
- service_forge/sft/file/ignore_pattern.py +80 -0
- service_forge/sft/file/sft_file_manager.py +107 -0
- service_forge/sft/kubernetes/kubernetes_manager.py +257 -0
- service_forge/sft/util/assert_util.py +25 -0
- service_forge/sft/util/logger.py +16 -0
- service_forge/sft/util/name_util.py +8 -0
- service_forge/sft/util/yaml_utils.py +57 -0
- service_forge/utils/__init__.py +0 -0
- service_forge/utils/default_type_converter.py +12 -0
- service_forge/utils/register.py +39 -0
- service_forge/utils/type_converter.py +99 -0
- service_forge/utils/workflow_clone.py +124 -0
- service_forge/workflow/__init__.py +1 -0
- service_forge/workflow/context.py +14 -0
- service_forge/workflow/edge.py +24 -0
- service_forge/workflow/node.py +184 -0
- service_forge/workflow/nodes/__init__.py +8 -0
- service_forge/workflow/nodes/control/if_node.py +29 -0
- service_forge/workflow/nodes/control/switch_node.py +28 -0
- service_forge/workflow/nodes/input/console_input_node.py +26 -0
- service_forge/workflow/nodes/llm/query_llm_node.py +41 -0
- service_forge/workflow/nodes/nested/workflow_node.py +28 -0
- service_forge/workflow/nodes/output/kafka_output_node.py +27 -0
- service_forge/workflow/nodes/output/print_node.py +29 -0
- service_forge/workflow/nodes/test/if_console_input_node.py +33 -0
- service_forge/workflow/nodes/test/time_consuming_node.py +62 -0
- service_forge/workflow/port.py +89 -0
- service_forge/workflow/trigger.py +24 -0
- service_forge/workflow/triggers/__init__.py +6 -0
- service_forge/workflow/triggers/a2a_api_trigger.py +255 -0
- service_forge/workflow/triggers/fast_api_trigger.py +169 -0
- service_forge/workflow/triggers/kafka_api_trigger.py +44 -0
- service_forge/workflow/triggers/once_trigger.py +20 -0
- service_forge/workflow/triggers/period_trigger.py +26 -0
- service_forge/workflow/triggers/websocket_api_trigger.py +184 -0
- service_forge/workflow/workflow.py +210 -0
- service_forge/workflow/workflow_callback.py +141 -0
- service_forge/workflow/workflow_event.py +15 -0
- service_forge/workflow/workflow_factory.py +246 -0
- service_forge/workflow/workflow_group.py +27 -0
- service_forge/workflow/workflow_type.py +52 -0
- service_forge-0.1.11.dist-info/METADATA +98 -0
- service_forge-0.1.11.dist-info/RECORD +75 -0
- service_forge-0.1.11.dist-info/WHEEL +4 -0
- service_forge-0.1.11.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from abc import abstractmethod
|
|
5
|
+
from typing import TYPE_CHECKING, Any
|
|
6
|
+
from typing_extensions import override
|
|
7
|
+
from enum import Enum
|
|
8
|
+
from .workflow_event import WorkflowResult
|
|
9
|
+
from loguru import logger
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from .node import Node
|
|
13
|
+
from .workflow import Workflow
|
|
14
|
+
|
|
15
|
+
class CallbackEvent(Enum):
|
|
16
|
+
ON_WORKFLOW_START = "on_workflow_start"
|
|
17
|
+
ON_WORKFLOW_END = "on_workflow_end"
|
|
18
|
+
ON_WORKFLOW_ERROR = "on_workflow_error"
|
|
19
|
+
ON_NODE_START = "on_node_start"
|
|
20
|
+
ON_NODE_END = "on_node_end"
|
|
21
|
+
ON_NODE_OUTPUT = "on_node_output"
|
|
22
|
+
ON_NODE_STREAM_OUTPUT = "on_node_stream_output"
|
|
23
|
+
|
|
24
|
+
class WorkflowCallback:
|
|
25
|
+
@abstractmethod
|
|
26
|
+
async def on_workflow_start(self, workflow: Workflow) -> None:
|
|
27
|
+
...
|
|
28
|
+
|
|
29
|
+
@abstractmethod
|
|
30
|
+
async def on_workflow_end(self, workflow: Workflow, output: Any) -> None:
|
|
31
|
+
pass
|
|
32
|
+
|
|
33
|
+
@abstractmethod
|
|
34
|
+
async def on_workflow_error(self, workflow: Workflow, error: Any) -> None:
|
|
35
|
+
pass
|
|
36
|
+
|
|
37
|
+
@abstractmethod
|
|
38
|
+
async def on_node_start(self, node: Node) -> None:
|
|
39
|
+
...
|
|
40
|
+
|
|
41
|
+
@abstractmethod
|
|
42
|
+
async def on_node_end(self, node: Node) -> None:
|
|
43
|
+
...
|
|
44
|
+
|
|
45
|
+
@abstractmethod
|
|
46
|
+
async def on_node_stream_output(self, node: Node, output: Any) -> None:
|
|
47
|
+
...
|
|
48
|
+
|
|
49
|
+
class BuiltinWorkflowCallback(WorkflowCallback):
|
|
50
|
+
def __init__(self):
|
|
51
|
+
self._websocket_manager = None
|
|
52
|
+
|
|
53
|
+
def _get_websocket_manager(self):
|
|
54
|
+
if self._websocket_manager is None:
|
|
55
|
+
from service_forge.api.routers.websocket.websocket_manager import websocket_manager
|
|
56
|
+
self._websocket_manager = websocket_manager
|
|
57
|
+
return self._websocket_manager
|
|
58
|
+
|
|
59
|
+
def _serialize_result(self, result: Any) -> Any:
|
|
60
|
+
try:
|
|
61
|
+
json.dumps(result)
|
|
62
|
+
return result
|
|
63
|
+
except (TypeError, ValueError):
|
|
64
|
+
return str(result)
|
|
65
|
+
|
|
66
|
+
@override
|
|
67
|
+
async def on_workflow_start(self, workflow: Workflow) -> None:
|
|
68
|
+
...
|
|
69
|
+
|
|
70
|
+
@override
|
|
71
|
+
async def on_workflow_end(self, workflow: Workflow, output: Any) -> None:
|
|
72
|
+
workflow_result = WorkflowResult(result=output, is_end=True, is_error=False)
|
|
73
|
+
|
|
74
|
+
if workflow.task_id in workflow.real_trigger_node.result_queues:
|
|
75
|
+
workflow.real_trigger_node.result_queues[workflow.task_id].put_nowait(workflow_result)
|
|
76
|
+
if workflow.task_id in workflow.real_trigger_node.stream_queues:
|
|
77
|
+
workflow.real_trigger_node.stream_queues[workflow.task_id].put_nowait(workflow_result)
|
|
78
|
+
|
|
79
|
+
try:
|
|
80
|
+
manager = self._get_websocket_manager()
|
|
81
|
+
message = {
|
|
82
|
+
"type": "workflow_end",
|
|
83
|
+
"task_id": str(workflow.task_id),
|
|
84
|
+
"result": self._serialize_result(output),
|
|
85
|
+
"is_end": True,
|
|
86
|
+
"is_error": False
|
|
87
|
+
}
|
|
88
|
+
await manager.send_to_task(workflow.task_id, message)
|
|
89
|
+
except Exception as e:
|
|
90
|
+
logger.error(f"发送 workflow_end 消息到 websocket 失败: {e}")
|
|
91
|
+
|
|
92
|
+
@override
|
|
93
|
+
async def on_workflow_error(self, workflow: Workflow, error: Any) -> None:
|
|
94
|
+
workflow_result = WorkflowResult(result=error, is_end=False, is_error=True)
|
|
95
|
+
|
|
96
|
+
if workflow.task_id in workflow.real_trigger_node.result_queues:
|
|
97
|
+
workflow.real_trigger_node.result_queues[workflow.task_id].put_nowait(workflow_result)
|
|
98
|
+
if workflow.task_id in workflow.real_trigger_node.stream_queues:
|
|
99
|
+
workflow.real_trigger_node.stream_queues[workflow.task_id].put_nowait(workflow_result)
|
|
100
|
+
|
|
101
|
+
try:
|
|
102
|
+
manager = self._get_websocket_manager()
|
|
103
|
+
message = {
|
|
104
|
+
"type": "workflow_error",
|
|
105
|
+
"task_id": str(workflow.task_id),
|
|
106
|
+
"error": self._serialize_result(error),
|
|
107
|
+
"is_end": False,
|
|
108
|
+
"is_error": True
|
|
109
|
+
}
|
|
110
|
+
await manager.send_to_task(workflow.task_id, message)
|
|
111
|
+
except Exception as e:
|
|
112
|
+
logger.error(f"发送 workflow_error 消息到 websocket 失败: {e}")
|
|
113
|
+
|
|
114
|
+
@override
|
|
115
|
+
async def on_node_start(self, node: Node) -> None:
|
|
116
|
+
...
|
|
117
|
+
|
|
118
|
+
@override
|
|
119
|
+
async def on_node_end(self, node: Node) -> None:
|
|
120
|
+
...
|
|
121
|
+
|
|
122
|
+
@override
|
|
123
|
+
async def on_node_stream_output(self, node: Node, output: Any) -> None:
|
|
124
|
+
workflow_result = WorkflowResult(result=output, is_end=False, is_error=False)
|
|
125
|
+
|
|
126
|
+
if node.workflow.task_id in node.workflow.real_trigger_node.stream_queues:
|
|
127
|
+
node.workflow.real_trigger_node.stream_queues[node.workflow.task_id].put_nowait(workflow_result)
|
|
128
|
+
|
|
129
|
+
try:
|
|
130
|
+
manager = self._get_websocket_manager()
|
|
131
|
+
message = {
|
|
132
|
+
"type": "node_stream_output",
|
|
133
|
+
"task_id": str(node.workflow.task_id),
|
|
134
|
+
"node": node.name,
|
|
135
|
+
"output": self._serialize_result(output),
|
|
136
|
+
"is_end": False,
|
|
137
|
+
"is_error": False
|
|
138
|
+
}
|
|
139
|
+
await manager.send_to_task(node.workflow.task_id, message)
|
|
140
|
+
except Exception as e:
|
|
141
|
+
logger.error(f"发送 node_stream_output 消息到 websocket 失败: {e}")
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
class WorkflowResult:
|
|
6
|
+
def __init__(
|
|
7
|
+
self,
|
|
8
|
+
result: Any | None,
|
|
9
|
+
is_end: bool,
|
|
10
|
+
is_error: bool,
|
|
11
|
+
) -> None:
|
|
12
|
+
# when is_end is True, result is from the output port of the workflow
|
|
13
|
+
self.result = result
|
|
14
|
+
self.is_end = is_end
|
|
15
|
+
self.is_error = is_error
|
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
from omegaconf import OmegaConf
|
|
2
|
+
from typing import Callable, Awaitable, AsyncIterator, Any
|
|
3
|
+
from copy import deepcopy
|
|
4
|
+
|
|
5
|
+
from service_forge.workflow.workflow_callback import BuiltinWorkflowCallback
|
|
6
|
+
from .workflow import Workflow
|
|
7
|
+
from .workflow_group import WorkflowGroup
|
|
8
|
+
from .node import Node
|
|
9
|
+
from .edge import Edge
|
|
10
|
+
from .port import Port, parse_port_name, create_workflow_input_port, create_sub_workflow_input_port
|
|
11
|
+
from .node import node_register
|
|
12
|
+
from .nodes import *
|
|
13
|
+
from .triggers import *
|
|
14
|
+
from .context import Context
|
|
15
|
+
from ..db.database import DatabaseManager
|
|
16
|
+
|
|
17
|
+
WORKFLOW_KEY_NAME = 'name'
|
|
18
|
+
WORKFLOW_KEY_DESCRIPTION = 'description'
|
|
19
|
+
WORKFLOW_KEY_NODES = 'nodes'
|
|
20
|
+
WORKFLOW_KEY_INPUTS = 'inputs'
|
|
21
|
+
WORKFLOW_KEY_OUTPUTS = 'outputs'
|
|
22
|
+
|
|
23
|
+
NODE_KEY_NAME = 'name'
|
|
24
|
+
NODE_KEY_TYPE = 'type'
|
|
25
|
+
NODE_KEY_ARGS = 'args'
|
|
26
|
+
NODE_KEY_OUTPUTS = 'outputs'
|
|
27
|
+
# NODE_KEY_INPUT_PORTS = 'input_ports'
|
|
28
|
+
# NODE_KEY_OUTPUT_PORTS = 'output_ports'
|
|
29
|
+
NODE_KEY_SUB_WORKFLOWS = 'sub_workflows'
|
|
30
|
+
NODE_KEY_SUB_WORKFLOWS_INPUT_PORTS = 'sub_workflows_input_ports'
|
|
31
|
+
|
|
32
|
+
PORT_KEY_NAME = 'name'
|
|
33
|
+
PORT_KEY_PORT = 'port'
|
|
34
|
+
PORT_KEY_VALUE = 'value'
|
|
35
|
+
|
|
36
|
+
def parse_argument(arg: Any, service_env: dict[str, Any] = None) -> Any:
|
|
37
|
+
# TODO: support import variables
|
|
38
|
+
if type(arg) == str and arg.startswith(f'<{{') and arg.endswith(f'}}>'):
|
|
39
|
+
key = arg[2:-2]
|
|
40
|
+
if key not in service_env:
|
|
41
|
+
raise ValueError(f"Key {key} not found in service env.")
|
|
42
|
+
return service_env[key]
|
|
43
|
+
return arg
|
|
44
|
+
|
|
45
|
+
def create_workflow(
|
|
46
|
+
config_path: str = None,
|
|
47
|
+
service_env: dict[str, Any] = None,
|
|
48
|
+
config: dict = None,
|
|
49
|
+
workflows: WorkflowGroup = None,
|
|
50
|
+
_handle_stream_output: Callable[[str, AsyncIterator[str]], Awaitable[None]] | None = None,
|
|
51
|
+
_handle_query_user: Callable[[str, str], Awaitable[str]] | None = None,
|
|
52
|
+
database_manager: DatabaseManager = None,
|
|
53
|
+
) -> Workflow:
|
|
54
|
+
if config is None:
|
|
55
|
+
if config_path is None:
|
|
56
|
+
raise ValueError("Either config_path or config must be provided")
|
|
57
|
+
config = OmegaConf.to_object(OmegaConf.load(config_path))
|
|
58
|
+
|
|
59
|
+
if WORKFLOW_KEY_NAME not in config:
|
|
60
|
+
if config_path is None:
|
|
61
|
+
raise ValueError(f"{WORKFLOW_KEY_NAME} is required in workflow config in {config}.")
|
|
62
|
+
else:
|
|
63
|
+
raise ValueError(f"{WORKFLOW_KEY_NAME} is required in workflow config at {config_path}.")
|
|
64
|
+
|
|
65
|
+
if WORKFLOW_KEY_DESCRIPTION not in config:
|
|
66
|
+
config[WORKFLOW_KEY_DESCRIPTION] = ""
|
|
67
|
+
|
|
68
|
+
workflow = Workflow(
|
|
69
|
+
name = config[WORKFLOW_KEY_NAME],
|
|
70
|
+
description = config[WORKFLOW_KEY_DESCRIPTION],
|
|
71
|
+
nodes = [],
|
|
72
|
+
input_ports = [],
|
|
73
|
+
output_ports = [],
|
|
74
|
+
_handle_stream_output = _handle_stream_output,
|
|
75
|
+
_handle_query_user = _handle_query_user,
|
|
76
|
+
database_manager = database_manager,
|
|
77
|
+
# TODO: max_concurrent_runs
|
|
78
|
+
callbacks = [BuiltinWorkflowCallback()],
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
nodes: dict[str, Node] = {}
|
|
82
|
+
|
|
83
|
+
# Nodes
|
|
84
|
+
for node_config in config[WORKFLOW_KEY_NODES]:
|
|
85
|
+
params = {
|
|
86
|
+
"name": node_config[NODE_KEY_NAME],
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
node: Node = node_register.instance(node_config[NODE_KEY_TYPE], ignore_keys=['type'], kwargs=params)
|
|
90
|
+
|
|
91
|
+
# Context
|
|
92
|
+
node.context = Context(variables = {})
|
|
93
|
+
|
|
94
|
+
# Input ports
|
|
95
|
+
node.input_ports = deepcopy(node.DEFAULT_INPUT_PORTS)
|
|
96
|
+
for input_port in node.input_ports:
|
|
97
|
+
input_port.node = node
|
|
98
|
+
|
|
99
|
+
# Output ports
|
|
100
|
+
node.output_ports = deepcopy(node.DEFAULT_OUTPUT_PORTS)
|
|
101
|
+
for output_port in node.output_ports:
|
|
102
|
+
output_port.node = node
|
|
103
|
+
|
|
104
|
+
# Sub workflows
|
|
105
|
+
if node_key_sub_workflows := node_config.get(NODE_KEY_SUB_WORKFLOWS, None):
|
|
106
|
+
sub_workflows: WorkflowGroup = WorkflowGroup(workflows=[])
|
|
107
|
+
for sub_workflow_config in node_key_sub_workflows:
|
|
108
|
+
sub_workflow = workflows.get_workflow(sub_workflow_config['name'])
|
|
109
|
+
sub_workflows.add_workflow(deepcopy(sub_workflow))
|
|
110
|
+
node.sub_workflows = sub_workflows
|
|
111
|
+
|
|
112
|
+
# Sub workflows input ports
|
|
113
|
+
if node_key_sub_network_input_ports := node_config.get(NODE_KEY_SUB_WORKFLOWS_INPUT_PORTS, None):
|
|
114
|
+
for sub_workflow_input_port_config in node_key_sub_network_input_ports:
|
|
115
|
+
name = sub_workflow_input_port_config[PORT_KEY_NAME]
|
|
116
|
+
sub_workflow_name, sub_workflow_port_name = parse_port_name(sub_workflow_input_port_config[PORT_KEY_PORT])
|
|
117
|
+
sub_workflow = node.sub_workflows.get_workflow(sub_workflow_name)
|
|
118
|
+
if sub_workflow is None:
|
|
119
|
+
raise ValueError(f"{sub_workflow_name} is not a valid sub workflow.")
|
|
120
|
+
sub_workflow_port = sub_workflow.get_input_port_by_name(sub_workflow_port_name)
|
|
121
|
+
if sub_workflow_port is None:
|
|
122
|
+
raise ValueError(f"{sub_workflow_port_name} is not a valid input port.")
|
|
123
|
+
value = sub_workflow_input_port_config.get(PORT_KEY_VALUE, None)
|
|
124
|
+
node.input_ports.append(create_sub_workflow_input_port(name=name, node=node, port=sub_workflow_port, value=value))
|
|
125
|
+
|
|
126
|
+
# Sub workflows output ports
|
|
127
|
+
...
|
|
128
|
+
|
|
129
|
+
# Hooks
|
|
130
|
+
if _handle_query_user is None:
|
|
131
|
+
node.query_user = workflow.handle_query_user
|
|
132
|
+
else:
|
|
133
|
+
node.query_user = _handle_query_user
|
|
134
|
+
|
|
135
|
+
nodes[node_config[NODE_KEY_NAME]] = node
|
|
136
|
+
|
|
137
|
+
# Edges
|
|
138
|
+
for node_config in config[WORKFLOW_KEY_NODES]:
|
|
139
|
+
start_node = nodes[node_config[NODE_KEY_NAME]]
|
|
140
|
+
if NODE_KEY_OUTPUTS in node_config and node_config[NODE_KEY_OUTPUTS]:
|
|
141
|
+
for key, value in node_config[NODE_KEY_OUTPUTS].items():
|
|
142
|
+
if value is None:
|
|
143
|
+
continue
|
|
144
|
+
|
|
145
|
+
if type(value) is str:
|
|
146
|
+
value = [value]
|
|
147
|
+
|
|
148
|
+
for edge_value in value:
|
|
149
|
+
end_node_name, end_port_name = parse_port_name(edge_value)
|
|
150
|
+
end_node = nodes[end_node_name]
|
|
151
|
+
|
|
152
|
+
start_node.try_create_extended_output_port(key)
|
|
153
|
+
end_node.try_create_extended_input_port(end_port_name)
|
|
154
|
+
|
|
155
|
+
start_port = start_node.get_output_port_by_name(key)
|
|
156
|
+
end_port = end_node.get_input_port_by_name(end_port_name)
|
|
157
|
+
|
|
158
|
+
if start_port is None:
|
|
159
|
+
raise ValueError(f"{key} is not a valid output port.")
|
|
160
|
+
if end_port is None:
|
|
161
|
+
raise ValueError(f"{end_port_name} is not a valid input port.")
|
|
162
|
+
|
|
163
|
+
edge = Edge(start_node, end_node, start_port, end_port)
|
|
164
|
+
|
|
165
|
+
start_node.output_edges.append(edge)
|
|
166
|
+
end_node.input_edges.append(edge)
|
|
167
|
+
|
|
168
|
+
workflow.add_nodes(list(nodes.values()))
|
|
169
|
+
|
|
170
|
+
# Inputs
|
|
171
|
+
if workflow_key_inputs := config.get(WORKFLOW_KEY_INPUTS, None):
|
|
172
|
+
for port_config in workflow_key_inputs:
|
|
173
|
+
name = port_config[PORT_KEY_NAME]
|
|
174
|
+
node_name, node_port_name = parse_port_name(port_config[PORT_KEY_PORT])
|
|
175
|
+
if node_name not in nodes:
|
|
176
|
+
raise ValueError(f"{node_name} is not a valid node.")
|
|
177
|
+
node = nodes[node_name]
|
|
178
|
+
port = node.get_input_port_by_name(node_port_name)
|
|
179
|
+
if port is None:
|
|
180
|
+
raise ValueError(f"{node_port_name} is not a valid input port.")
|
|
181
|
+
value = port_config.get(PORT_KEY_VALUE, None)
|
|
182
|
+
workflow.input_ports.append(create_workflow_input_port(name=name, port=port, value=value))
|
|
183
|
+
|
|
184
|
+
# Outputs
|
|
185
|
+
if workflow_key_outputs := config.get(WORKFLOW_KEY_OUTPUTS, None):
|
|
186
|
+
for port_config in workflow_key_outputs:
|
|
187
|
+
name = port_config[PORT_KEY_NAME]
|
|
188
|
+
node_name, node_port_name = parse_port_name(port_config[PORT_KEY_PORT])
|
|
189
|
+
if node_name not in nodes:
|
|
190
|
+
raise ValueError(f"{node_name} is not a valid node.")
|
|
191
|
+
node = nodes[node_name]
|
|
192
|
+
port = node.get_output_port_by_name(node_port_name)
|
|
193
|
+
if port is None:
|
|
194
|
+
raise ValueError(f"{node_port_name} is not a valid output port.")
|
|
195
|
+
output_port = Port(name=name, type=Any, port=port)
|
|
196
|
+
workflow.output_ports.append(output_port)
|
|
197
|
+
edge = Edge(node, None, port, output_port)
|
|
198
|
+
node.output_edges.append(edge)
|
|
199
|
+
|
|
200
|
+
for node_config in config[WORKFLOW_KEY_NODES]:
|
|
201
|
+
node = nodes[node_config[NODE_KEY_NAME]]
|
|
202
|
+
# Arguments
|
|
203
|
+
if node_key_args := node_config.get(NODE_KEY_ARGS, None):
|
|
204
|
+
for key, value in node_key_args.items():
|
|
205
|
+
node.fill_input_by_name(key, parse_argument(value, service_env=service_env))
|
|
206
|
+
|
|
207
|
+
return workflow
|
|
208
|
+
|
|
209
|
+
def create_workflows(
|
|
210
|
+
config_path: str = None,
|
|
211
|
+
service_env: dict[str, Any] = None,
|
|
212
|
+
config: dict = None,
|
|
213
|
+
_handle_stream_output: Callable[[str, AsyncIterator[str]], Awaitable[None]] = None,
|
|
214
|
+
_handle_query_user: Callable[[str, str], Awaitable[str]] = None,
|
|
215
|
+
database_manager: DatabaseManager = None,
|
|
216
|
+
) -> WorkflowGroup:
|
|
217
|
+
WORKFLOW_KEY_WORKFLOWS = 'workflows'
|
|
218
|
+
WORKFLOW_KEY_MAIN_WORKFLOW_NAME = 'main'
|
|
219
|
+
|
|
220
|
+
if config is None:
|
|
221
|
+
if config_path is None:
|
|
222
|
+
raise ValueError("Either config_path or config must be provided")
|
|
223
|
+
config = OmegaConf.to_object(OmegaConf.load(config_path))
|
|
224
|
+
|
|
225
|
+
if WORKFLOW_KEY_WORKFLOWS not in config:
|
|
226
|
+
workflow = create_workflow(
|
|
227
|
+
config_path=config_path if config_path else None,
|
|
228
|
+
config=config,
|
|
229
|
+
service_env=service_env,
|
|
230
|
+
_handle_stream_output=_handle_stream_output,
|
|
231
|
+
_handle_query_user=_handle_query_user,
|
|
232
|
+
database_manager=database_manager,
|
|
233
|
+
)
|
|
234
|
+
return WorkflowGroup(workflows=[workflow], main_workflow_name=workflow.name)
|
|
235
|
+
|
|
236
|
+
workflows = WorkflowGroup(workflows=[], main_workflow_name=config.get(WORKFLOW_KEY_MAIN_WORKFLOW_NAME, None))
|
|
237
|
+
for workflow_config in config[WORKFLOW_KEY_WORKFLOWS]:
|
|
238
|
+
workflows.add_workflow(create_workflow(
|
|
239
|
+
config=workflow_config,
|
|
240
|
+
workflows=workflows,
|
|
241
|
+
service_env=service_env,
|
|
242
|
+
_handle_stream_output=_handle_stream_output,
|
|
243
|
+
_handle_query_user=_handle_query_user,
|
|
244
|
+
database_manager=database_manager,
|
|
245
|
+
))
|
|
246
|
+
return workflows
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from service_forge.workflow.workflow import Workflow
|
|
2
|
+
|
|
3
|
+
class WorkflowGroup:
|
|
4
|
+
def __init__(self, workflows: list[Workflow], main_workflow_name: str = "main") -> None:
|
|
5
|
+
self.workflows = workflows
|
|
6
|
+
self.main_workflow_name = main_workflow_name
|
|
7
|
+
|
|
8
|
+
def add_workflow(self, workflow: Workflow) -> None:
|
|
9
|
+
self.workflows.append(workflow)
|
|
10
|
+
|
|
11
|
+
def get_workflow(self, name: str) -> Workflow | None:
|
|
12
|
+
for workflow in self.workflows:
|
|
13
|
+
if workflow.name == name:
|
|
14
|
+
return workflow
|
|
15
|
+
return None
|
|
16
|
+
|
|
17
|
+
def get_main_workflow(self) -> Workflow:
|
|
18
|
+
return self.get_workflow(self.main_workflow_name)
|
|
19
|
+
|
|
20
|
+
async def run(self, name: str = None) -> None:
|
|
21
|
+
if name is None:
|
|
22
|
+
workflow = self.get_main_workflow()
|
|
23
|
+
else:
|
|
24
|
+
workflow = self.get_workflow(name)
|
|
25
|
+
if workflow is None:
|
|
26
|
+
raise ValueError(f"Workflow with name {name} not found in workflow group.")
|
|
27
|
+
await workflow.run()
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import os
|
|
3
|
+
import importlib
|
|
4
|
+
import inspect
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Type, Any
|
|
7
|
+
|
|
8
|
+
from service_forge.utils.register import Register
|
|
9
|
+
|
|
10
|
+
class WorkflowType:
|
|
11
|
+
CLASS_NOT_REQUIRED_TO_REGISTER = ['WorkflowType']
|
|
12
|
+
|
|
13
|
+
def __init__(self, name: str, type: type) -> None:
|
|
14
|
+
self.name = name
|
|
15
|
+
self.type = type
|
|
16
|
+
|
|
17
|
+
def __init_subclass__(cls) -> None:
|
|
18
|
+
if cls.__name__ not in WorkflowType.CLASS_NOT_REQUIRED_TO_REGISTER:
|
|
19
|
+
workflow_type_register.register(cls.__name__, cls)
|
|
20
|
+
return super().__init_subclass__()
|
|
21
|
+
|
|
22
|
+
workflow_type_register = Register[WorkflowType]()
|
|
23
|
+
|
|
24
|
+
def _load_proto_classes():
|
|
25
|
+
# TODO: load from config
|
|
26
|
+
proto_dir = Path(__file__).parent.parent / "proto"
|
|
27
|
+
|
|
28
|
+
if not proto_dir.exists():
|
|
29
|
+
return
|
|
30
|
+
|
|
31
|
+
for py_file in proto_dir.glob("*.py"):
|
|
32
|
+
if py_file.name == "__init__.py":
|
|
33
|
+
continue
|
|
34
|
+
|
|
35
|
+
module_name = py_file.stem
|
|
36
|
+
module_path = f"service_forge.proto.{module_name}"
|
|
37
|
+
|
|
38
|
+
try:
|
|
39
|
+
module = importlib.import_module(module_path)
|
|
40
|
+
|
|
41
|
+
for name, obj in inspect.getmembers(module, inspect.isclass):
|
|
42
|
+
if (not name.startswith('_') and
|
|
43
|
+
obj.__module__ == module_path and
|
|
44
|
+
hasattr(obj, '__bases__')):
|
|
45
|
+
|
|
46
|
+
workflow_type = WorkflowType(name, obj)
|
|
47
|
+
workflow_type_register.register(name, workflow_type)
|
|
48
|
+
|
|
49
|
+
except Exception as e:
|
|
50
|
+
print(f"Failed to load module {module_path}: {e}")
|
|
51
|
+
|
|
52
|
+
_load_proto_classes()
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: service-forge
|
|
3
|
+
Version: 0.1.11
|
|
4
|
+
Summary: Add your description here
|
|
5
|
+
Author-email: euxcet <zcc.qwer@gmail.com>
|
|
6
|
+
Requires-Python: >=3.11
|
|
7
|
+
Requires-Dist: a2a-sdk>=0.3.22
|
|
8
|
+
Requires-Dist: aiokafka>=0.12.0
|
|
9
|
+
Requires-Dist: alembic>=1.17.0
|
|
10
|
+
Requires-Dist: asyncpg>=0.30.0
|
|
11
|
+
Requires-Dist: dotenv>=0.9.9
|
|
12
|
+
Requires-Dist: fastapi>=0.119.1
|
|
13
|
+
Requires-Dist: greenlet>=3.2.4
|
|
14
|
+
Requires-Dist: jinja2>=3.1.6
|
|
15
|
+
Requires-Dist: kubernetes>=28.0.0
|
|
16
|
+
Requires-Dist: loguru>=0.7.3
|
|
17
|
+
Requires-Dist: omegaconf>=2.3.0
|
|
18
|
+
Requires-Dist: openai>=2.3.0
|
|
19
|
+
Requires-Dist: protobuf>=6.33.1
|
|
20
|
+
Requires-Dist: psycopg2-binary>=2.9.11
|
|
21
|
+
Requires-Dist: pydantic>=2.12.0
|
|
22
|
+
Requires-Dist: pymongo>=4.15.5
|
|
23
|
+
Requires-Dist: pytest-asyncio>=1.2.0
|
|
24
|
+
Requires-Dist: pytest>=8.4.2
|
|
25
|
+
Requires-Dist: python-dotenv>=1.1.1
|
|
26
|
+
Requires-Dist: python-jose>=3.5.0
|
|
27
|
+
Requires-Dist: python-multipart>=0.0.20
|
|
28
|
+
Requires-Dist: redis>=7.1.0
|
|
29
|
+
Requires-Dist: requests>=2.32.5
|
|
30
|
+
Requires-Dist: restrictedpython>=8.0
|
|
31
|
+
Requires-Dist: rich>=13.0.0
|
|
32
|
+
Requires-Dist: sqlalchemy>=2.0.44
|
|
33
|
+
Requires-Dist: typer>=0.12.0
|
|
34
|
+
Requires-Dist: uvicorn>=0.38.0
|
|
35
|
+
Requires-Dist: websockets>=15.0.1
|
|
36
|
+
Description-Content-Type: text/markdown
|
|
37
|
+
|
|
38
|
+
# Service Forge
|
|
39
|
+
|
|
40
|
+
Automated service creation and maintenance tool.
|
|
41
|
+
|
|
42
|
+
## Install
|
|
43
|
+
|
|
44
|
+
```bash
|
|
45
|
+
pip install -e .
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
## CLI Usage (sft)
|
|
49
|
+
|
|
50
|
+
Service Forge 提供了命令行工具 `sft` 用于服务管理。
|
|
51
|
+
|
|
52
|
+
### 服务上传和部署
|
|
53
|
+
|
|
54
|
+
```bash
|
|
55
|
+
# 上传服务(打包并上传到服务器)
|
|
56
|
+
sft upload [project_path]
|
|
57
|
+
|
|
58
|
+
# 列出本地已打包的服务包
|
|
59
|
+
sft list
|
|
60
|
+
|
|
61
|
+
# 部署服务(只在服务器上使用)
|
|
62
|
+
sft deploy <name> <version>
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
### 配置管理
|
|
66
|
+
|
|
67
|
+
```bash
|
|
68
|
+
# 列出所有配置项
|
|
69
|
+
sft config list
|
|
70
|
+
|
|
71
|
+
# 获取指定配置项的值
|
|
72
|
+
sft config get <key>
|
|
73
|
+
|
|
74
|
+
# 设置配置项的值
|
|
75
|
+
sft config set <key> <value>
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
### 服务管理
|
|
79
|
+
|
|
80
|
+
```bash
|
|
81
|
+
# 列出所有服务
|
|
82
|
+
sft service list
|
|
83
|
+
|
|
84
|
+
# 删除服务(只在服务器上使用)
|
|
85
|
+
sft service delete <service_name> [--force, -f]
|
|
86
|
+
|
|
87
|
+
# 查看服务日志(只在服务器上使用)
|
|
88
|
+
sft service logs <service_name> [--container, -c] [--tail, -n] [--follow, -f] [--previous, -p]
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
## TODO
|
|
92
|
+
|
|
93
|
+
- [x] 多次 trigger 并行执行
|
|
94
|
+
- [x] 支持 websocket 来做 trigger、输入和输出
|
|
95
|
+
- [x] 优化 websocket 客户端映射和重连支持
|
|
96
|
+
- [x] 节点和 workflow 运行情况的回调函数
|
|
97
|
+
- [ ] 支持 a2a
|
|
98
|
+
- [ ] workflow 执行异常处理
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
service_forge/service.py,sha256=D4lrpCrohUaG1lM2YdN1sfDVU_7EU1E01zonbZyktnk,12288
|
|
2
|
+
service_forge/service_config.py,sha256=3r0uQeysCcAuiotQcs8zUhfNrKk1Aky2zIasbyGQLlc,5003
|
|
3
|
+
service_forge/api/deprecated_websocket_api.py,sha256=E36-fpUPxzMJ2YGlCPeqwRbryk2FMMbQD_pbb8k1FYI,3343
|
|
4
|
+
service_forge/api/deprecated_websocket_manager.py,sha256=Xiwg3zwXRVi63sXmVH-TgbpL2XH_djyLeo96STm4cNM,16757
|
|
5
|
+
service_forge/api/http_api.py,sha256=RePGdIpxxWj3ZDMgbxa7-tpCFuMmtgRefUpZXo6TBD0,5488
|
|
6
|
+
service_forge/api/http_api_doc.py,sha256=ASlxvsIiUzDcMhVoumRjt9CfEMbh0O1U4ZLC9eobLF8,20235
|
|
7
|
+
service_forge/api/kafka_api.py,sha256=PInx2ZzKJRON7EaJFWroXkiOt_UeZY7WE6qK03gq4ak,4599
|
|
8
|
+
service_forge/api/task_manager.py,sha256=9Lk-NV4cBnuv9b8V6GVLWJJ4MCiAwCp5TVAwmYgqXbs,5269
|
|
9
|
+
service_forge/api/routers/service/__init__.py,sha256=hnoXBAxOzqCxtDHwoFNd6F1Yg6oGR3EHZhzrwjg1Dq4,130
|
|
10
|
+
service_forge/api/routers/service/service_router.py,sha256=E6mPMVx6dOn515h1m125n--fmAjb1z5ZatZDYjp7iNQ,5381
|
|
11
|
+
service_forge/api/routers/websocket/websocket_manager.py,sha256=j1AFqzXQhZZyaLQwhvZefXAS-zCOPzLcRMDEuusv6V0,3605
|
|
12
|
+
service_forge/api/routers/websocket/websocket_router.py,sha256=V0B7eQP8toO94-WbTrGraadXD3qeZ9lnKFcxwx6kLgM,3777
|
|
13
|
+
service_forge/db/__init__.py,sha256=EWLhH8bYsMOvRF_YXF6FgL3irKA6GZeLxSGvWDRM6f8,85
|
|
14
|
+
service_forge/db/database.py,sha256=JrfBjvLZT7Op-rSqbRJgNDikVGVasQCmx3mzg_HtIxA,9850
|
|
15
|
+
service_forge/llm/__init__.py,sha256=9sB5uqp2F8AboYV4jAvOQfMkVMWM811sUxBahJJ-_fE,2868
|
|
16
|
+
service_forge/llm/llm.py,sha256=Sar99FkTPBJqkB6dwX81ww_hJp8cPYxlg7Go-zXPyg0,1865
|
|
17
|
+
service_forge/model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
|
+
service_forge/model/websocket.py,sha256=YIUCW32sbHIEFPHjk5FiDM_rDe2aVD6OpzBQul2R5IM,267
|
|
19
|
+
service_forge/proto/foo_input.py,sha256=-POJZSIFrGdBGz7FqZZ03r5uztpc5Apin9A0Yxbk6YI,90
|
|
20
|
+
service_forge/sft/cli.py,sha256=stB_YPhZ7gAQeOxIq03-tLyl5VfU-gnRacAT05GSMis,2904
|
|
21
|
+
service_forge/sft/cmd/config_command.py,sha256=I9t2HG28S6lCXpExHyZUc47b_1yB3i51tCFVk5J6TTU,2382
|
|
22
|
+
service_forge/sft/cmd/deploy_service.py,sha256=5IYbCVI0Nlry1KXBhm9leJmr2bzUEXrSY-2BympLR0c,4686
|
|
23
|
+
service_forge/sft/cmd/list_tars.py,sha256=Z3zvu2JLb_wNbTwi5TZXL5cZ8PxYrKks9AxkOzoUd_Q,1380
|
|
24
|
+
service_forge/sft/cmd/service_command.py,sha256=69GMMN61KtuoEFuYzFJ74ivNt8RX8q0I6rbePfJfEwQ,5538
|
|
25
|
+
service_forge/sft/cmd/upload_service.py,sha256=86PvvJSXCZKH4BU6rLytuc45grX-sRnQnOHCo9zUaPY,1232
|
|
26
|
+
service_forge/sft/config/injector.py,sha256=T2MgQvrle6yDDDSFH8vQm6Yh_LbH-9C4nxAUA7QLrq8,5269
|
|
27
|
+
service_forge/sft/config/injector_default_files.py,sha256=aTMQ2Tla3wdpvdaD_5VP2X6oeZbI0X414FM9NbirnO4,2716
|
|
28
|
+
service_forge/sft/config/sf_metadata.py,sha256=Y9akhSCgOd11-oqRs3LIs8FL9pvWNw6hyy57fuFcBhc,866
|
|
29
|
+
service_forge/sft/config/sft_config.py,sha256=-Wmsl0b2SE97yyWnPdv8SKyorV3Yc-DbGeOPU4fTIDY,5346
|
|
30
|
+
service_forge/sft/file/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
31
|
+
service_forge/sft/file/ignore_pattern.py,sha256=UrVmR83wOx51XHFcZDTPp15dGYcvMTE5W1m07-SvHpw,2521
|
|
32
|
+
service_forge/sft/file/sft_file_manager.py,sha256=poIM77tZZg7vfwBdCsdQctBbCczVLQePdTwVINEABvE,4337
|
|
33
|
+
service_forge/sft/kubernetes/kubernetes_manager.py,sha256=IF2_X9U-k5Dx7EZuGrJ9lZ85ltbilrrZDfsl8qFyTu4,11339
|
|
34
|
+
service_forge/sft/util/assert_util.py,sha256=8HreVkOzs9_ClKiFqG4qsFn_yyDLo5uXYhYUPXlmDjM,828
|
|
35
|
+
service_forge/sft/util/logger.py,sha256=0Hi74IoxshE-wBgvBa2EZPXYj37tTrUYwlOBd9UMMMs,502
|
|
36
|
+
service_forge/sft/util/name_util.py,sha256=WSYHM6c7SZULXCFON7nmGqsvAPPs_wavd6QjCa4UbRQ,301
|
|
37
|
+
service_forge/sft/util/yaml_utils.py,sha256=9OhJNQlzj_C1NeQoUZVF8qpDovrE7RDWtNXe-H7tuNA,1703
|
|
38
|
+
service_forge/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
39
|
+
service_forge/utils/default_type_converter.py,sha256=CuUZpMATdTwgcV1M3lbK64znwmEG85Zt3y_QGXr9tYQ,625
|
|
40
|
+
service_forge/utils/register.py,sha256=nxiGQBCX238FoZZhsDoDdBMv_2QzeIZpM367HPNfaqM,874
|
|
41
|
+
service_forge/utils/type_converter.py,sha256=IRphYxyGA0ICwFvDMvqAnRnSUTpY2ZQXvTE5o99CKpo,3046
|
|
42
|
+
service_forge/utils/workflow_clone.py,sha256=rajbRrEuSkxATWn1-jZiYn_S9OypGMKUSpXayl7n0cM,4563
|
|
43
|
+
service_forge/workflow/__init__.py,sha256=9oh4qPyA33RugrUYRAlLmFtmQPUN2wxruFQE3omTJF8,49
|
|
44
|
+
service_forge/workflow/context.py,sha256=1PotSEN_l8Emd5p8_6mtXJngXGYd3NSbOs1EKHgvnlo,346
|
|
45
|
+
service_forge/workflow/edge.py,sha256=88Ex-9_dHAGD38OHgiqP0DrfxK0FrhvDAxThR3ilUi4,627
|
|
46
|
+
service_forge/workflow/node.py,sha256=jPzXuixmZBL6-_jjXikFZmierlu3SOBJOSdQxTyV0FY,7536
|
|
47
|
+
service_forge/workflow/port.py,sha256=JVj0JrnQeOWCsp7n48Cm03bfmO8r3V5oTSEsC-HTGPE,2967
|
|
48
|
+
service_forge/workflow/trigger.py,sha256=1XYY-zk_3ei53lA8UfBw6IQGfLkQDrSY4rhVrfhHEWA,796
|
|
49
|
+
service_forge/workflow/workflow.py,sha256=KzfPhbJLkoIb6wBQLTPthVjJ5lYXaMF2QhmA3rb7PTk,7939
|
|
50
|
+
service_forge/workflow/workflow_callback.py,sha256=S__F7s-7l5LgkIXcZMcG68qCyc8NgdWQX81F0hKWL1U,5135
|
|
51
|
+
service_forge/workflow/workflow_event.py,sha256=QG1VFJwUUF1bTKKPKvqBICnYxkBwpfYDEoAuxwQYhhE,371
|
|
52
|
+
service_forge/workflow/workflow_factory.py,sha256=ecaOI26KHdm2ho-niZcEnZpLacb3kML-6Pb4i9cDK1A,10202
|
|
53
|
+
service_forge/workflow/workflow_group.py,sha256=ZZJpK92JyKjrLflNY0MRis1WUcB4Pkgde-UxWZW-IiY,977
|
|
54
|
+
service_forge/workflow/workflow_type.py,sha256=zRc-gL2LBE-gOgTUCU5-VDWeGUzuQahkHIg98ipEvQg,1629
|
|
55
|
+
service_forge/workflow/nodes/__init__.py,sha256=AUOoFUAMgRwfLiHNkjnDnWToMSe2AeV5vJO3NCG1eLw,381
|
|
56
|
+
service_forge/workflow/nodes/control/if_node.py,sha256=fBRnSsz0SHW5Hcf4993Y1PsUOUt2Cg9I_zcduHUnMuI,643
|
|
57
|
+
service_forge/workflow/nodes/control/switch_node.py,sha256=27mxdcQijSawNM5Fx6LdC1MB66vAoLAAI74DQyfqPqI,708
|
|
58
|
+
service_forge/workflow/nodes/input/console_input_node.py,sha256=GhQjWRgYy3aRfYUfaEql-_Xi10cca75oz-vmSZkR54w,683
|
|
59
|
+
service_forge/workflow/nodes/llm/query_llm_node.py,sha256=uwcWO6Q-qAG16yJqEGLJau8YxcJBGsBJoGVUBQTBWZk,1155
|
|
60
|
+
service_forge/workflow/nodes/nested/workflow_node.py,sha256=h5NXhRCUGaoNmqMV2PXR6JNwTCM8MCRVA6ocOjoDzhs,747
|
|
61
|
+
service_forge/workflow/nodes/output/kafka_output_node.py,sha256=mC6qRMGsuwU6qXAfXA-0ZFZudrlwmgRYOJRULUrtH40,682
|
|
62
|
+
service_forge/workflow/nodes/output/print_node.py,sha256=OSgeRQOd3dq88a1plx30g9-VB793RbXnIa5X8MF9fCo,656
|
|
63
|
+
service_forge/workflow/nodes/test/if_console_input_node.py,sha256=CtKHkFqr8PN974_iGP2VSBmNpXZ-KumRHCpoRR5RyF8,956
|
|
64
|
+
service_forge/workflow/nodes/test/time_consuming_node.py,sha256=gB2qw2DdjRf82z1158u36nSnCHrheHaxscAzPRnXNyk,1813
|
|
65
|
+
service_forge/workflow/triggers/__init__.py,sha256=iQ0WEYu6JgL191Y9XslMhZ7jS7JO8bL3SZ9YqIw5LCM,269
|
|
66
|
+
service_forge/workflow/triggers/a2a_api_trigger.py,sha256=TCFNRKlj6rXjPU2pjTD0HCpYgs_o6KcwCat9byxvq1s,8520
|
|
67
|
+
service_forge/workflow/triggers/fast_api_trigger.py,sha256=uRlDAh5PmaLE3fGnXhs825HmLJ1oxCiQ93rIqe4orSc,6093
|
|
68
|
+
service_forge/workflow/triggers/kafka_api_trigger.py,sha256=jGnb4dkvh01Jy3CIOvgcQQ3RjsCZCGyXp2NxzCUrd6Y,1498
|
|
69
|
+
service_forge/workflow/triggers/once_trigger.py,sha256=X0MidqEDNATzYEQY_MwpwKEqgzBFn1lc9q8ci14Mpt8,508
|
|
70
|
+
service_forge/workflow/triggers/period_trigger.py,sha256=WE55k5ZpCmu666MPPQC5XSP_L_OzGlwt7v8mAnw4Qas,703
|
|
71
|
+
service_forge/workflow/triggers/websocket_api_trigger.py,sha256=a7rJpdEMcrkdsJ0FtFUJOeFf2sn93dEmH9hhvCj16CI,6731
|
|
72
|
+
service_forge-0.1.11.dist-info/METADATA,sha256=45d_RyoVahkhRu_vqCdrfRANTAKtEAWanK58eJie2GM,2308
|
|
73
|
+
service_forge-0.1.11.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
74
|
+
service_forge-0.1.11.dist-info/entry_points.txt,sha256=WHntHW7GAyKQUEeMcMvHDZ7_xAb0-cZeAK4iJeu9lm8,51
|
|
75
|
+
service_forge-0.1.11.dist-info/RECORD,,
|