service-forge 0.1.11__py3-none-any.whl → 0.1.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of service-forge might be problematic. Click here for more details.
- service_forge/api/http_api.py +4 -0
- service_forge/api/routers/feedback/feedback_router.py +148 -0
- service_forge/api/routers/service/service_router.py +22 -32
- service_forge/current_service.py +14 -0
- service_forge/db/database.py +46 -32
- service_forge/db/migrations/feedback_migration.py +154 -0
- service_forge/db/models/__init__.py +0 -0
- service_forge/db/models/feedback.py +33 -0
- service_forge/llm/__init__.py +5 -0
- service_forge/model/feedback.py +30 -0
- service_forge/service.py +118 -126
- service_forge/service_config.py +42 -156
- service_forge/sft/cli.py +39 -0
- service_forge/sft/cmd/remote_deploy.py +160 -0
- service_forge/sft/cmd/remote_list_tars.py +111 -0
- service_forge/sft/config/injector.py +46 -24
- service_forge/sft/config/injector_default_files.py +1 -1
- service_forge/sft/config/sft_config.py +55 -8
- service_forge/storage/__init__.py +5 -0
- service_forge/storage/feedback_storage.py +245 -0
- service_forge/utils/default_type_converter.py +1 -1
- service_forge/utils/type_converter.py +5 -0
- service_forge/utils/workflow_clone.py +3 -2
- service_forge/workflow/node.py +8 -0
- service_forge/workflow/nodes/llm/query_llm_node.py +1 -1
- service_forge/workflow/trigger.py +4 -0
- service_forge/workflow/triggers/a2a_api_trigger.py +2 -0
- service_forge/workflow/triggers/fast_api_trigger.py +32 -0
- service_forge/workflow/triggers/kafka_api_trigger.py +3 -0
- service_forge/workflow/triggers/once_trigger.py +4 -1
- service_forge/workflow/triggers/period_trigger.py +4 -1
- service_forge/workflow/triggers/websocket_api_trigger.py +15 -11
- service_forge/workflow/workflow.py +74 -31
- service_forge/workflow/workflow_callback.py +3 -2
- service_forge/workflow/workflow_config.py +66 -0
- service_forge/workflow/workflow_factory.py +86 -85
- service_forge/workflow/workflow_group.py +33 -9
- {service_forge-0.1.11.dist-info → service_forge-0.1.24.dist-info}/METADATA +1 -1
- {service_forge-0.1.11.dist-info → service_forge-0.1.24.dist-info}/RECORD +41 -31
- service_forge/api/routers/service/__init__.py +0 -4
- {service_forge-0.1.11.dist-info → service_forge-0.1.24.dist-info}/WHEEL +0 -0
- {service_forge-0.1.11.dist-info → service_forge-0.1.24.dist-info}/entry_points.txt +0 -0
service_forge/service.py
CHANGED
|
@@ -4,54 +4,43 @@ import os
|
|
|
4
4
|
import asyncio
|
|
5
5
|
import threading
|
|
6
6
|
import uuid
|
|
7
|
-
from
|
|
7
|
+
from loguru import logger
|
|
8
|
+
from typing import Callable, AsyncIterator, Awaitable, Any, TYPE_CHECKING
|
|
8
9
|
from service_forge.workflow.node import node_register
|
|
9
|
-
from service_forge.workflow.workflow_factory import
|
|
10
|
+
from service_forge.workflow.workflow_factory import create_workflow_group
|
|
10
11
|
from service_forge.api.http_api import start_fastapi_server
|
|
11
12
|
from service_forge.api.kafka_api import start_kafka_server
|
|
12
13
|
from service_forge.db.database import DatabaseManager
|
|
13
|
-
from loguru import logger
|
|
14
|
-
from typing import Callable, AsyncIterator, Awaitable, Any, TYPE_CHECKING
|
|
15
14
|
from service_forge.api.http_api_doc import generate_service_http_api_doc
|
|
16
|
-
from service_forge.api.routers.service.service_router import set_service
|
|
17
15
|
from service_forge.sft.config.sf_metadata import SfMetadata
|
|
16
|
+
from service_forge.service_config import ServiceConfig
|
|
17
|
+
from service_forge.current_service import set_service
|
|
18
18
|
|
|
19
19
|
if TYPE_CHECKING:
|
|
20
20
|
from service_forge.workflow.workflow_group import WorkflowGroup
|
|
21
|
+
from service_forge.workflow.workflow import Workflow
|
|
21
22
|
|
|
22
23
|
class Service:
|
|
23
24
|
def __init__(
|
|
24
25
|
self,
|
|
25
26
|
metadata: SfMetadata,
|
|
27
|
+
config: ServiceConfig,
|
|
26
28
|
config_path: str,
|
|
27
|
-
workflow_config_paths: list[str],
|
|
28
|
-
_handle_stream_output: Callable[[str, AsyncIterator[str]], Awaitable[None]] = None,
|
|
29
|
-
_handle_query_user: Callable[[str, str], Awaitable[str]] = None,
|
|
30
|
-
enable_http: bool = True,
|
|
31
|
-
http_host: str = "0.0.0.0",
|
|
32
|
-
http_port: int = 8000,
|
|
33
|
-
enable_kafka: bool = True,
|
|
34
|
-
kafka_host: str = "localhost",
|
|
35
|
-
kafka_port: int = 9092,
|
|
36
29
|
service_env: dict[str, Any] = None,
|
|
37
30
|
database_manager: DatabaseManager = None,
|
|
31
|
+
_handle_stream_output: Callable[[str, AsyncIterator[str]], Awaitable[None]] = None,
|
|
32
|
+
_handle_query_user: Callable[[str, str], Awaitable[str]] = None,
|
|
38
33
|
) -> None:
|
|
39
34
|
self.metadata = metadata
|
|
35
|
+
self.config = config
|
|
40
36
|
self.config_path = config_path
|
|
41
|
-
self.workflow_config_paths = workflow_config_paths
|
|
42
37
|
self._handle_stream_output = _handle_stream_output
|
|
43
38
|
self._handle_query_user = _handle_query_user
|
|
44
|
-
self.enable_http = enable_http
|
|
45
|
-
self.http_host = http_host
|
|
46
|
-
self.http_port = http_port
|
|
47
|
-
self.enable_kafka = enable_kafka
|
|
48
|
-
self.kafka_host = kafka_host
|
|
49
|
-
self.kafka_port = kafka_port
|
|
50
39
|
self.service_env = {} if service_env is None else service_env
|
|
51
40
|
self.database_manager = database_manager
|
|
52
41
|
self.workflow_groups: list[WorkflowGroup] = []
|
|
53
42
|
self.workflow_tasks: dict[str, asyncio.Task] = {} # workflow_name -> task mapping
|
|
54
|
-
self.workflow_config_map: dict[
|
|
43
|
+
self.workflow_config_map: dict[uuid.UUID, str] = {} # workflow_id -> config_path mapping
|
|
55
44
|
self.fastapi_thread: threading.Thread | None = None
|
|
56
45
|
self.fastapi_loop: asyncio.AbstractEventLoop | None = None
|
|
57
46
|
|
|
@@ -70,22 +59,24 @@ class Service:
|
|
|
70
59
|
async def start(self):
|
|
71
60
|
set_service(self)
|
|
72
61
|
|
|
73
|
-
if self.enable_http:
|
|
74
|
-
fastapi_task = asyncio.create_task(start_fastapi_server(self.http_host, self.http_port))
|
|
62
|
+
if self.config.enable_http:
|
|
63
|
+
fastapi_task = asyncio.create_task(start_fastapi_server(self.config.http_host, self.config.http_port))
|
|
75
64
|
doc_task = asyncio.create_task(generate_service_http_api_doc(self))
|
|
76
65
|
else:
|
|
77
66
|
fastapi_task = None
|
|
78
67
|
doc_task = None
|
|
79
|
-
|
|
80
|
-
|
|
68
|
+
|
|
69
|
+
if self.config.enable_kafka:
|
|
70
|
+
kafka_task = asyncio.create_task(start_kafka_server(f"{self.config.kafka_host}:{self.config.kafka_port}"))
|
|
81
71
|
else:
|
|
82
72
|
kafka_task = None
|
|
83
73
|
|
|
84
74
|
workflow_tasks: list[asyncio.Task] = []
|
|
85
75
|
|
|
86
|
-
for workflow_config_path in self.
|
|
87
|
-
|
|
88
|
-
|
|
76
|
+
for workflow_config_path in self.config.workflows:
|
|
77
|
+
logger.info(f"Loading workflow from {self.parse_workflow_path(workflow_config_path)}")
|
|
78
|
+
workflow_group = create_workflow_group(
|
|
79
|
+
config_path=self.parse_workflow_path(workflow_config_path),
|
|
89
80
|
service_env=self.service_env,
|
|
90
81
|
_handle_stream_output=self._handle_stream_output,
|
|
91
82
|
_handle_query_user=self._handle_query_user,
|
|
@@ -95,7 +86,7 @@ class Service:
|
|
|
95
86
|
main_workflow = workflow_group.get_main_workflow()
|
|
96
87
|
task = asyncio.create_task(workflow_group.run())
|
|
97
88
|
workflow_tasks.append(task)
|
|
98
|
-
self.workflow_tasks[main_workflow.
|
|
89
|
+
self.workflow_tasks[main_workflow.id] = task
|
|
99
90
|
self.workflow_config_map[main_workflow.name] = workflow_config_path
|
|
100
91
|
|
|
101
92
|
try:
|
|
@@ -138,120 +129,123 @@ class Service:
|
|
|
138
129
|
else:
|
|
139
130
|
return os.path.join(os.path.dirname(self.config_path), workflow_config_path)
|
|
140
131
|
|
|
141
|
-
def get_workflow_group_by_name(self, workflow_name: str) -> WorkflowGroup | None:
|
|
132
|
+
def get_workflow_group_by_name(self, workflow_name: str, workflow_version: str, allow_none: bool = True) -> WorkflowGroup | None:
|
|
142
133
|
for workflow_group in self.workflow_groups:
|
|
143
|
-
if workflow_group.
|
|
134
|
+
if workflow_group.get_workflow_by_name(workflow_name, workflow_version) is not None:
|
|
144
135
|
return workflow_group
|
|
136
|
+
if not allow_none:
|
|
137
|
+
raise ValueError(f"Workflow group with name {workflow_name} and version {workflow_version} not found in service {self.name}")
|
|
138
|
+
return None
|
|
139
|
+
|
|
140
|
+
def get_workflow_group_by_id(self, workflow_id: str, allow_none: bool = True) -> WorkflowGroup | None:
|
|
141
|
+
for workflow_group in self.workflow_groups:
|
|
142
|
+
if workflow_group.get_workflow_by_id(workflow_id) is not None:
|
|
143
|
+
return workflow_group
|
|
144
|
+
if not allow_none:
|
|
145
|
+
raise ValueError(f"Workflow group with id {workflow_id} not found in service {self.name}")
|
|
145
146
|
return None
|
|
146
|
-
|
|
147
|
-
def trigger_workflow(self, workflow_name: str, trigger_name: str, **kwargs) -> uuid.UUID:
|
|
148
|
-
workflow_group = self.get_workflow_group_by_name(workflow_name)
|
|
149
|
-
if workflow_group is None:
|
|
150
|
-
logger.error(f"Workflow {workflow_name} not found")
|
|
151
|
-
return False
|
|
152
|
-
|
|
153
|
-
workflow = workflow_group.get_main_workflow()
|
|
154
|
-
if workflow is None:
|
|
155
|
-
logger.error(f"Workflow {workflow_name} not found")
|
|
156
|
-
return False
|
|
157
147
|
|
|
148
|
+
def trigger_workflow(self, workflow_group: WorkflowGroup, trigger_name: str, **kwargs) -> uuid.UUID:
|
|
149
|
+
workflow = workflow_group.get_main_workflow(allow_none=False)
|
|
158
150
|
return workflow.trigger(trigger_name, **kwargs)
|
|
159
151
|
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
152
|
+
def trigger_workflow_by_name(self, workflow_name: str, workflow_version: str, trigger_name: str, **kwargs) -> uuid.UUID:
|
|
153
|
+
workflow_group = self.get_workflow_group_by_name(workflow_name, workflow_version, allow_none=False)
|
|
154
|
+
return self.trigger_workflow(workflow_group, trigger_name, **kwargs)
|
|
155
|
+
|
|
156
|
+
def trigger_workflow_by_id(self, workflow_id: str, trigger_name: str, **kwargs) -> uuid.UUID:
|
|
157
|
+
workflow_group = self.get_workflow_group_by_id(workflow_id, allow_none=False)
|
|
158
|
+
return self.trigger_workflow(workflow_group, trigger_name, **kwargs)
|
|
159
|
+
|
|
160
|
+
def start_workflow(self, workflow_group: WorkflowGroup) -> bool:
|
|
161
|
+
workflow = workflow_group.get_main_workflow(allow_none=False)
|
|
162
|
+
if workflow.id in self.workflow_tasks:
|
|
163
|
+
task = self.workflow_tasks[workflow.id]
|
|
163
164
|
if not task.done():
|
|
164
|
-
logger.warning(f"Workflow {
|
|
165
|
+
logger.warning(f"Workflow {workflow.id} is already running")
|
|
165
166
|
return False
|
|
166
|
-
del self.workflow_tasks[
|
|
167
|
-
|
|
168
|
-
workflow_group = self.get_workflow_group_by_name(workflow_name)
|
|
169
|
-
if workflow_group is None:
|
|
170
|
-
logger.error(f"Workflow {workflow_name} not found")
|
|
171
|
-
return False
|
|
167
|
+
del self.workflow_tasks[workflow.id]
|
|
172
168
|
|
|
173
|
-
task = asyncio.create_task(workflow_group.run(
|
|
174
|
-
self.workflow_tasks[
|
|
175
|
-
logger.info(f"Started workflow {
|
|
169
|
+
task = asyncio.create_task(workflow_group.run())
|
|
170
|
+
self.workflow_tasks[workflow.id] = task
|
|
171
|
+
logger.info(f"Started workflow {workflow.id}")
|
|
176
172
|
return True
|
|
173
|
+
|
|
174
|
+
def start_workflow_by_name(self, workflow_name: str, workflow_version: str) -> bool:
|
|
175
|
+
workflow_group = self.get_workflow_group_by_name(workflow_name, workflow_version, allow_none=False)
|
|
176
|
+
return self.start_workflow(workflow_group)
|
|
177
177
|
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
178
|
+
def start_workflow_by_id(self, workflow_id: str) -> bool:
|
|
179
|
+
workflow_group = self.get_workflow_group_by_id(workflow_id, allow_none=False)
|
|
180
|
+
return self.start_workflow(workflow_group)
|
|
181
|
+
|
|
182
|
+
async def stop_workflow(self, workflow_group: WorkflowGroup) -> bool:
|
|
183
|
+
workflow = workflow_group.get_main_workflow(allow_none=False)
|
|
184
|
+
if workflow.id not in self.workflow_tasks:
|
|
185
|
+
logger.warning(f"Workflow {workflow.id} is not running")
|
|
181
186
|
return False
|
|
182
|
-
|
|
183
|
-
task = self.workflow_tasks[workflow_name]
|
|
187
|
+
task = self.workflow_tasks[workflow.id]
|
|
184
188
|
if task.done():
|
|
185
|
-
logger.warning(f"Workflow {
|
|
186
|
-
del self.workflow_tasks[
|
|
189
|
+
logger.warning(f"Workflow {workflow.id} is already stopped")
|
|
190
|
+
del self.workflow_tasks[workflow.id]
|
|
187
191
|
return False
|
|
188
|
-
|
|
189
192
|
task.cancel()
|
|
193
|
+
await workflow.stop()
|
|
190
194
|
try:
|
|
191
195
|
await task
|
|
192
196
|
except asyncio.CancelledError:
|
|
193
197
|
pass
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
logger.info(f"Stopped workflow {workflow_name}")
|
|
198
|
+
del self.workflow_tasks[workflow.id]
|
|
199
|
+
logger.info(f"Stopped workflow {workflow.id}")
|
|
197
200
|
return True
|
|
201
|
+
|
|
202
|
+
async def stop_workflow_by_name(self, workflow_name: str, workflow_version: str) -> bool:
|
|
203
|
+
workflow_group = self.get_workflow_group_by_name(workflow_name, workflow_version, allow_none=False)
|
|
204
|
+
return await self.stop_workflow(workflow_group)
|
|
198
205
|
|
|
199
|
-
async def
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
self.workflow_groups.append(workflow_group)
|
|
227
|
-
main_workflow = workflow_group.get_main_workflow()
|
|
228
|
-
actual_name = workflow_name if workflow_name else main_workflow.name
|
|
229
|
-
|
|
230
|
-
if workflow_name and workflow_name != main_workflow.name:
|
|
231
|
-
actual_name = main_workflow.name
|
|
232
|
-
|
|
233
|
-
if actual_name in self.workflow_tasks:
|
|
234
|
-
await self.stop_workflow(actual_name)
|
|
235
|
-
|
|
236
|
-
task = asyncio.create_task(workflow_group.run(actual_name))
|
|
237
|
-
self.workflow_tasks[actual_name] = task
|
|
238
|
-
self.workflow_config_map[actual_name] = config_identifier
|
|
239
|
-
|
|
240
|
-
logger.info(f"Loaded and started workflow {actual_name} from {config_identifier}")
|
|
241
|
-
return True
|
|
242
|
-
except Exception as e:
|
|
243
|
-
logger.error(f"Failed to load workflow from {config_path or 'config_dict'}: {e}")
|
|
244
|
-
return False
|
|
206
|
+
async def stop_workflow_by_id(self, workflow_id: str) -> bool:
|
|
207
|
+
workflow_group = self.get_workflow_group_by_id(workflow_id, allow_none=False)
|
|
208
|
+
return await self.stop_workflow(workflow_group)
|
|
209
|
+
|
|
210
|
+
async def load_workflow_from_config(self, config_path: str = None, config: dict = None) -> uuid.UUID:
|
|
211
|
+
workflow_group = create_workflow_group(
|
|
212
|
+
config_path=config_path,
|
|
213
|
+
config=config,
|
|
214
|
+
service_env=self.service_env,
|
|
215
|
+
_handle_stream_output=self._handle_stream_output,
|
|
216
|
+
_handle_query_user=self._handle_query_user,
|
|
217
|
+
database_manager=self.database_manager,
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
for workflow in workflow_group.workflows:
|
|
221
|
+
existing_workflow_group = self.get_workflow_group_by_name(workflow.name, workflow.version)
|
|
222
|
+
if existing_workflow_group is not None:
|
|
223
|
+
raise ValueError(f"Workflow group with name {workflow.name} and version {workflow.version} already exists")
|
|
224
|
+
|
|
225
|
+
self.workflow_groups.append(workflow_group)
|
|
226
|
+
main_workflow = workflow_group.get_main_workflow()
|
|
227
|
+
|
|
228
|
+
if main_workflow.id in self.workflow_tasks:
|
|
229
|
+
await self.stop_workflow(workflow_group)
|
|
230
|
+
|
|
231
|
+
self.start_workflow(workflow_group)
|
|
232
|
+
return main_workflow.id
|
|
245
233
|
|
|
246
234
|
def get_service_status(self) -> dict[str, Any]:
|
|
247
235
|
workflow_statuses = []
|
|
248
236
|
for workflow_group in self.workflow_groups:
|
|
249
237
|
for workflow in workflow_group.workflows:
|
|
238
|
+
workflow_id = workflow.id
|
|
239
|
+
workflow_version = workflow.version
|
|
240
|
+
workflow_config = workflow.config
|
|
250
241
|
workflow_name = workflow.name
|
|
251
|
-
is_running =
|
|
242
|
+
is_running = workflow_id in self.workflow_tasks and not self.workflow_tasks[workflow_id].done()
|
|
252
243
|
config_path = self.workflow_config_map.get(workflow_name, "unknown")
|
|
253
244
|
workflow_statuses.append({
|
|
254
245
|
"name": workflow_name,
|
|
246
|
+
"id": workflow_id,
|
|
247
|
+
"version": workflow_version,
|
|
248
|
+
"config": workflow_config,
|
|
255
249
|
"description": workflow.description,
|
|
256
250
|
"status": "running" if is_running else "stopped",
|
|
257
251
|
"config_path": config_path,
|
|
@@ -265,23 +259,21 @@ class Service:
|
|
|
265
259
|
}
|
|
266
260
|
|
|
267
261
|
@staticmethod
|
|
268
|
-
def from_config(metadata, service_env: dict[str, Any] = None) -> Service:
|
|
269
|
-
config
|
|
262
|
+
def from_config(metadata: SfMetadata, service_env: dict[str, Any] = None, config: ServiceConfig = None) -> Service:
|
|
263
|
+
if config is not None:
|
|
264
|
+
config_path = None
|
|
265
|
+
else:
|
|
266
|
+
config_path = metadata.service_config
|
|
267
|
+
config = ServiceConfig.from_yaml_file(config_path)
|
|
270
268
|
database_manager = DatabaseManager.from_config(config=config)
|
|
271
269
|
return Service(
|
|
272
270
|
metadata=metadata,
|
|
273
|
-
config_path=
|
|
274
|
-
|
|
275
|
-
_handle_stream_output=None,
|
|
276
|
-
_handle_query_user=None,
|
|
277
|
-
enable_http=config.get('enable_http', True),
|
|
278
|
-
http_host=config.get('http_host', '0.0.0.0'),
|
|
279
|
-
http_port=config.get('http_port', 8000),
|
|
280
|
-
enable_kafka=config.get('enable_kafka', True),
|
|
281
|
-
kafka_host=config.get('kafka_host', 'localhost'),
|
|
282
|
-
kafka_port=config.get('kafka_port', 9092),
|
|
271
|
+
config_path=config_path,
|
|
272
|
+
config=config,
|
|
283
273
|
service_env=service_env,
|
|
284
274
|
database_manager=database_manager,
|
|
275
|
+
_handle_stream_output=None,
|
|
276
|
+
_handle_query_user=None,
|
|
285
277
|
)
|
|
286
278
|
|
|
287
279
|
def create_service(config_path: str, name: str, version: str, service_env: dict[str, Any] = None) -> Service:
|
service_forge/service_config.py
CHANGED
|
@@ -1,158 +1,44 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
return
|
|
45
|
-
name=config['name'],
|
|
46
|
-
postgres_user=config.get('postgres_user', None),
|
|
47
|
-
postgres_password=config.get('postgres_password', None),
|
|
48
|
-
postgres_host=config.get('postgres_host', None),
|
|
49
|
-
postgres_port=config.get('postgres_port', None),
|
|
50
|
-
postgres_db=config.get('postgres_db', None),
|
|
51
|
-
|
|
52
|
-
mongo_host=config.get('mongo_host', None),
|
|
53
|
-
mongo_port=config.get('mongo_port', None),
|
|
54
|
-
mongo_user=config.get('mongo_user', None),
|
|
55
|
-
mongo_password=config.get('mongo_password', None),
|
|
56
|
-
mongo_db=config.get('mongo_db', None),
|
|
57
|
-
|
|
58
|
-
redis_host=config.get('redis_host', None),
|
|
59
|
-
redis_port=config.get('redis_port', None),
|
|
60
|
-
redis_password=config.get('redis_password', None),
|
|
61
|
-
)
|
|
62
|
-
|
|
63
|
-
def to_dict(self) -> dict:
|
|
64
|
-
return {
|
|
65
|
-
'name': self.name,
|
|
66
|
-
|
|
67
|
-
'postgres_user': self.postgres_user,
|
|
68
|
-
'postgres_password': self.postgres_password,
|
|
69
|
-
'postgres_host': self.postgres_host,
|
|
70
|
-
'postgres_port': self.postgres_port,
|
|
71
|
-
'postgres_db': self.postgres_db,
|
|
72
|
-
|
|
73
|
-
'mongo_host': self.mongo_host,
|
|
74
|
-
'mongo_port': self.mongo_port,
|
|
75
|
-
'mongo_user': self.mongo_user,
|
|
76
|
-
'mongo_password': self.mongo_password,
|
|
77
|
-
'mongo_db': self.mongo_db,
|
|
78
|
-
|
|
79
|
-
'redis_host': self.redis_host,
|
|
80
|
-
'redis_port': self.redis_port,
|
|
81
|
-
'redis_password': self.redis_password,
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
class ServiceConfig:
|
|
85
|
-
def __init__(
|
|
86
|
-
self,
|
|
87
|
-
name: str,
|
|
88
|
-
workflows: list[str],
|
|
89
|
-
enable_http: bool,
|
|
90
|
-
http_host: str,
|
|
91
|
-
http_port: int,
|
|
92
|
-
enable_kafka: bool,
|
|
93
|
-
kafka_host: str,
|
|
94
|
-
kafka_port: int,
|
|
95
|
-
databases: list[ServiceDatabaseConfig],
|
|
96
|
-
) -> None:
|
|
97
|
-
self.name = name
|
|
98
|
-
self.workflows = workflows
|
|
99
|
-
self.enable_http = enable_http
|
|
100
|
-
self.http_host = http_host
|
|
101
|
-
self.http_port = http_port
|
|
102
|
-
self.enable_kafka = enable_kafka
|
|
103
|
-
self.kafka_host = kafka_host
|
|
104
|
-
self.kafka_port = kafka_port
|
|
105
|
-
self.databases = databases
|
|
106
|
-
|
|
107
|
-
@staticmethod
|
|
108
|
-
def from_dict(config: dict) -> ServiceConfig:
|
|
109
|
-
return ServiceConfig(
|
|
110
|
-
name=config['name'],
|
|
111
|
-
workflows=config['workflows'],
|
|
112
|
-
enable_http=config['enable_http'],
|
|
113
|
-
http_host=config['http_host'],
|
|
114
|
-
http_port=config['http_port'],
|
|
115
|
-
enable_kafka=config['enable_kafka'],
|
|
116
|
-
kafka_host=config['kafka_host'],
|
|
117
|
-
kafka_port=config['kafka_port'],
|
|
118
|
-
databases=[ServiceDatabaseConfig.from_dict(database) for database in config['databases']],
|
|
119
|
-
)
|
|
120
|
-
|
|
121
|
-
def to_dict(self) -> dict:
|
|
122
|
-
return {
|
|
123
|
-
'name': self.name,
|
|
124
|
-
'workflows': self.workflows,
|
|
125
|
-
'enable_http': self.enable_http,
|
|
126
|
-
'http_host': self.http_host,
|
|
127
|
-
'http_port': self.http_port,
|
|
128
|
-
'enable_kafka': self.enable_kafka,
|
|
129
|
-
'kafka_host': self.kafka_host,
|
|
130
|
-
'kafka_port': self.kafka_port,
|
|
131
|
-
'databases': [database.to_dict() for database in self.databases],
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
# name: tag_service
|
|
135
|
-
# workflows:
|
|
136
|
-
# # - ./workflow/kafka_workflow.yaml
|
|
137
|
-
# - ./workflow/query_tags_workflow.yaml
|
|
138
|
-
# - ./workflow/create_tag_workflow.yaml
|
|
139
|
-
# - ./workflow/update_tag_workflow.yaml
|
|
140
|
-
# - ./workflow/delete_tag_workflow.yaml
|
|
141
|
-
# - ./workflow/get_tags_from_record.yaml
|
|
142
|
-
|
|
143
|
-
# enable_http: true
|
|
144
|
-
# enable_kafka: false
|
|
145
|
-
|
|
146
|
-
# # Following configs will be auto-injected by sft.
|
|
147
|
-
# http_host: 0.0.0.0
|
|
148
|
-
# http_port: 37200
|
|
149
|
-
# kafka_host: localhost
|
|
150
|
-
# kafka_port: 9092
|
|
151
|
-
|
|
152
|
-
# databases:
|
|
153
|
-
# - name: tag
|
|
154
|
-
# postgres_user: postgres
|
|
155
|
-
# postgres_password: "gnBGWg7aL4"
|
|
156
|
-
# postgres_host: second-brain-postgres-postgresql
|
|
157
|
-
# postgres_port: 5432
|
|
158
|
-
# postgres_db: tag-service-tag
|
|
3
|
+
import yaml
|
|
4
|
+
from pydantic import BaseModel
|
|
5
|
+
|
|
6
|
+
class ServiceFeedbackConfig(BaseModel):
|
|
7
|
+
api_url: str
|
|
8
|
+
api_timeout: int = 5
|
|
9
|
+
|
|
10
|
+
class ServiceDatabaseConfig(BaseModel):
|
|
11
|
+
name: str
|
|
12
|
+
postgres_user: str | None = None
|
|
13
|
+
postgres_password: str | None = None
|
|
14
|
+
postgres_host: str | None = None
|
|
15
|
+
postgres_port: int | None = None
|
|
16
|
+
postgres_db: str | None = None
|
|
17
|
+
|
|
18
|
+
mongo_host: str | None = None
|
|
19
|
+
mongo_port: int | None = None
|
|
20
|
+
mongo_user: str | None = None
|
|
21
|
+
mongo_password: str | None = None
|
|
22
|
+
mongo_db: str | None = None
|
|
23
|
+
|
|
24
|
+
redis_host: str | None = None
|
|
25
|
+
redis_port: int | None = None
|
|
26
|
+
redis_password: str | None = None
|
|
27
|
+
|
|
28
|
+
class ServiceConfig(BaseModel):
|
|
29
|
+
name: str
|
|
30
|
+
workflows: list[str]
|
|
31
|
+
enable_http: bool
|
|
32
|
+
http_host: str | None = None
|
|
33
|
+
http_port: int | None = None
|
|
34
|
+
enable_kafka: bool
|
|
35
|
+
kafka_host: str | None = None
|
|
36
|
+
kafka_port: int | None = None
|
|
37
|
+
databases: list[ServiceDatabaseConfig] | None = None
|
|
38
|
+
feedback: ServiceFeedbackConfig | None = None
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
def from_yaml_file(cls, filepath: str) -> ServiceConfig:
|
|
42
|
+
with open(filepath, 'r', encoding='utf-8') as f:
|
|
43
|
+
data = yaml.safe_load(f)
|
|
44
|
+
return cls(**data)
|
service_forge/sft/cli.py
CHANGED
|
@@ -9,6 +9,8 @@ from service_forge.sft.cmd.upload_service import upload_service
|
|
|
9
9
|
from service_forge.sft.cmd.deploy_service import deploy_service
|
|
10
10
|
from service_forge.sft.cmd.config_command import list_config, get_config, set_config
|
|
11
11
|
from service_forge.sft.cmd.service_command import list_services, delete_service, show_service_logs
|
|
12
|
+
from service_forge.sft.cmd.remote_list_tars import remote_list_tars
|
|
13
|
+
from service_forge.sft.cmd.remote_deploy import remote_deploy_tar, remote_list_and_deploy
|
|
12
14
|
|
|
13
15
|
app = typer.Typer(
|
|
14
16
|
name="sft",
|
|
@@ -33,6 +35,43 @@ def list_tars_command() -> None:
|
|
|
33
35
|
def deploy_service_command(name: str, version: str) -> None:
|
|
34
36
|
deploy_service(name, version)
|
|
35
37
|
|
|
38
|
+
@app.command(name="remote-list")
|
|
39
|
+
def remote_list_tars_command(
|
|
40
|
+
url: str = typer.Option(
|
|
41
|
+
None,
|
|
42
|
+
"--url",
|
|
43
|
+
"-u",
|
|
44
|
+
help="Service Center URL (default: http://localhost:5000 or from service_center_address config)"
|
|
45
|
+
)
|
|
46
|
+
) -> None:
|
|
47
|
+
"""List tar packages and their status on remote server"""
|
|
48
|
+
remote_list_tars(url)
|
|
49
|
+
|
|
50
|
+
@app.command(name="remote-deploy")
|
|
51
|
+
def remote_deploy_command(
|
|
52
|
+
filename: str = typer.Argument(help="Filename of the tar package to deploy"),
|
|
53
|
+
url: str = typer.Option(
|
|
54
|
+
None,
|
|
55
|
+
"--url",
|
|
56
|
+
"-u",
|
|
57
|
+
help="Service Center URL (default: http://localhost:5000 or from service_center_address config)"
|
|
58
|
+
)
|
|
59
|
+
) -> None:
|
|
60
|
+
"""Remote deploy specified tar package"""
|
|
61
|
+
remote_deploy_tar(filename, url)
|
|
62
|
+
|
|
63
|
+
@app.command(name="remote-deploy-interactive")
|
|
64
|
+
def remote_deploy_interactive_command(
|
|
65
|
+
url: str = typer.Option(
|
|
66
|
+
None,
|
|
67
|
+
"--url",
|
|
68
|
+
"-u",
|
|
69
|
+
help="Service Center URL (default: http://localhost:5000 or from service_center_address config)"
|
|
70
|
+
)
|
|
71
|
+
) -> None:
|
|
72
|
+
"""Interactive remote deployment of tar packages (list available packages first, then select for deployment)"""
|
|
73
|
+
remote_list_and_deploy(url)
|
|
74
|
+
|
|
36
75
|
config_app = typer.Typer(
|
|
37
76
|
name="config",
|
|
38
77
|
help="Configuration management commands",
|