service-forge 0.1.11__py3-none-any.whl → 0.1.21__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of service-forge might be problematic. Click here for more details.
- service_forge/api/http_api.py +4 -0
- service_forge/api/routers/feedback/feedback_router.py +148 -0
- service_forge/api/routers/service/service_router.py +22 -32
- service_forge/current_service.py +14 -0
- service_forge/db/database.py +29 -32
- service_forge/db/migrations/feedback_migration.py +154 -0
- service_forge/db/models/__init__.py +0 -0
- service_forge/db/models/feedback.py +33 -0
- service_forge/llm/__init__.py +5 -0
- service_forge/model/feedback.py +30 -0
- service_forge/service.py +118 -126
- service_forge/service_config.py +42 -156
- service_forge/sft/config/injector.py +33 -23
- service_forge/sft/config/sft_config.py +55 -8
- service_forge/storage/__init__.py +5 -0
- service_forge/storage/feedback_storage.py +245 -0
- service_forge/utils/workflow_clone.py +3 -2
- service_forge/workflow/node.py +8 -0
- service_forge/workflow/nodes/llm/query_llm_node.py +1 -1
- service_forge/workflow/trigger.py +4 -0
- service_forge/workflow/triggers/a2a_api_trigger.py +2 -0
- service_forge/workflow/triggers/fast_api_trigger.py +32 -0
- service_forge/workflow/triggers/kafka_api_trigger.py +3 -0
- service_forge/workflow/triggers/once_trigger.py +4 -1
- service_forge/workflow/triggers/period_trigger.py +4 -1
- service_forge/workflow/triggers/websocket_api_trigger.py +15 -11
- service_forge/workflow/workflow.py +26 -4
- service_forge/workflow/workflow_config.py +66 -0
- service_forge/workflow/workflow_factory.py +86 -85
- service_forge/workflow/workflow_group.py +33 -9
- {service_forge-0.1.11.dist-info → service_forge-0.1.21.dist-info}/METADATA +1 -1
- {service_forge-0.1.11.dist-info → service_forge-0.1.21.dist-info}/RECORD +34 -26
- service_forge/api/routers/service/__init__.py +0 -4
- {service_forge-0.1.11.dist-info → service_forge-0.1.21.dist-info}/WHEEL +0 -0
- {service_forge-0.1.11.dist-info → service_forge-0.1.21.dist-info}/entry_points.txt +0 -0
service_forge/service.py
CHANGED
|
@@ -4,54 +4,43 @@ import os
|
|
|
4
4
|
import asyncio
|
|
5
5
|
import threading
|
|
6
6
|
import uuid
|
|
7
|
-
from
|
|
7
|
+
from loguru import logger
|
|
8
|
+
from typing import Callable, AsyncIterator, Awaitable, Any, TYPE_CHECKING
|
|
8
9
|
from service_forge.workflow.node import node_register
|
|
9
|
-
from service_forge.workflow.workflow_factory import
|
|
10
|
+
from service_forge.workflow.workflow_factory import create_workflow_group
|
|
10
11
|
from service_forge.api.http_api import start_fastapi_server
|
|
11
12
|
from service_forge.api.kafka_api import start_kafka_server
|
|
12
13
|
from service_forge.db.database import DatabaseManager
|
|
13
|
-
from loguru import logger
|
|
14
|
-
from typing import Callable, AsyncIterator, Awaitable, Any, TYPE_CHECKING
|
|
15
14
|
from service_forge.api.http_api_doc import generate_service_http_api_doc
|
|
16
|
-
from service_forge.api.routers.service.service_router import set_service
|
|
17
15
|
from service_forge.sft.config.sf_metadata import SfMetadata
|
|
16
|
+
from service_forge.service_config import ServiceConfig
|
|
17
|
+
from service_forge.current_service import set_service
|
|
18
18
|
|
|
19
19
|
if TYPE_CHECKING:
|
|
20
20
|
from service_forge.workflow.workflow_group import WorkflowGroup
|
|
21
|
+
from service_forge.workflow.workflow import Workflow
|
|
21
22
|
|
|
22
23
|
class Service:
|
|
23
24
|
def __init__(
|
|
24
25
|
self,
|
|
25
26
|
metadata: SfMetadata,
|
|
27
|
+
config: ServiceConfig,
|
|
26
28
|
config_path: str,
|
|
27
|
-
workflow_config_paths: list[str],
|
|
28
|
-
_handle_stream_output: Callable[[str, AsyncIterator[str]], Awaitable[None]] = None,
|
|
29
|
-
_handle_query_user: Callable[[str, str], Awaitable[str]] = None,
|
|
30
|
-
enable_http: bool = True,
|
|
31
|
-
http_host: str = "0.0.0.0",
|
|
32
|
-
http_port: int = 8000,
|
|
33
|
-
enable_kafka: bool = True,
|
|
34
|
-
kafka_host: str = "localhost",
|
|
35
|
-
kafka_port: int = 9092,
|
|
36
29
|
service_env: dict[str, Any] = None,
|
|
37
30
|
database_manager: DatabaseManager = None,
|
|
31
|
+
_handle_stream_output: Callable[[str, AsyncIterator[str]], Awaitable[None]] = None,
|
|
32
|
+
_handle_query_user: Callable[[str, str], Awaitable[str]] = None,
|
|
38
33
|
) -> None:
|
|
39
34
|
self.metadata = metadata
|
|
35
|
+
self.config = config
|
|
40
36
|
self.config_path = config_path
|
|
41
|
-
self.workflow_config_paths = workflow_config_paths
|
|
42
37
|
self._handle_stream_output = _handle_stream_output
|
|
43
38
|
self._handle_query_user = _handle_query_user
|
|
44
|
-
self.enable_http = enable_http
|
|
45
|
-
self.http_host = http_host
|
|
46
|
-
self.http_port = http_port
|
|
47
|
-
self.enable_kafka = enable_kafka
|
|
48
|
-
self.kafka_host = kafka_host
|
|
49
|
-
self.kafka_port = kafka_port
|
|
50
39
|
self.service_env = {} if service_env is None else service_env
|
|
51
40
|
self.database_manager = database_manager
|
|
52
41
|
self.workflow_groups: list[WorkflowGroup] = []
|
|
53
42
|
self.workflow_tasks: dict[str, asyncio.Task] = {} # workflow_name -> task mapping
|
|
54
|
-
self.workflow_config_map: dict[
|
|
43
|
+
self.workflow_config_map: dict[uuid.UUID, str] = {} # workflow_id -> config_path mapping
|
|
55
44
|
self.fastapi_thread: threading.Thread | None = None
|
|
56
45
|
self.fastapi_loop: asyncio.AbstractEventLoop | None = None
|
|
57
46
|
|
|
@@ -70,22 +59,24 @@ class Service:
|
|
|
70
59
|
async def start(self):
|
|
71
60
|
set_service(self)
|
|
72
61
|
|
|
73
|
-
if self.enable_http:
|
|
74
|
-
fastapi_task = asyncio.create_task(start_fastapi_server(self.http_host, self.http_port))
|
|
62
|
+
if self.config.enable_http:
|
|
63
|
+
fastapi_task = asyncio.create_task(start_fastapi_server(self.config.http_host, self.config.http_port))
|
|
75
64
|
doc_task = asyncio.create_task(generate_service_http_api_doc(self))
|
|
76
65
|
else:
|
|
77
66
|
fastapi_task = None
|
|
78
67
|
doc_task = None
|
|
79
|
-
|
|
80
|
-
|
|
68
|
+
|
|
69
|
+
if self.config.enable_kafka:
|
|
70
|
+
kafka_task = asyncio.create_task(start_kafka_server(f"{self.config.kafka_host}:{self.config.kafka_port}"))
|
|
81
71
|
else:
|
|
82
72
|
kafka_task = None
|
|
83
73
|
|
|
84
74
|
workflow_tasks: list[asyncio.Task] = []
|
|
85
75
|
|
|
86
|
-
for workflow_config_path in self.
|
|
87
|
-
|
|
88
|
-
|
|
76
|
+
for workflow_config_path in self.config.workflows:
|
|
77
|
+
logger.info(f"Loading workflow from {self.parse_workflow_path(workflow_config_path)}")
|
|
78
|
+
workflow_group = create_workflow_group(
|
|
79
|
+
config_path=self.parse_workflow_path(workflow_config_path),
|
|
89
80
|
service_env=self.service_env,
|
|
90
81
|
_handle_stream_output=self._handle_stream_output,
|
|
91
82
|
_handle_query_user=self._handle_query_user,
|
|
@@ -95,7 +86,7 @@ class Service:
|
|
|
95
86
|
main_workflow = workflow_group.get_main_workflow()
|
|
96
87
|
task = asyncio.create_task(workflow_group.run())
|
|
97
88
|
workflow_tasks.append(task)
|
|
98
|
-
self.workflow_tasks[main_workflow.
|
|
89
|
+
self.workflow_tasks[main_workflow.id] = task
|
|
99
90
|
self.workflow_config_map[main_workflow.name] = workflow_config_path
|
|
100
91
|
|
|
101
92
|
try:
|
|
@@ -138,120 +129,123 @@ class Service:
|
|
|
138
129
|
else:
|
|
139
130
|
return os.path.join(os.path.dirname(self.config_path), workflow_config_path)
|
|
140
131
|
|
|
141
|
-
def get_workflow_group_by_name(self, workflow_name: str) -> WorkflowGroup | None:
|
|
132
|
+
def get_workflow_group_by_name(self, workflow_name: str, workflow_version: str, allow_none: bool = True) -> WorkflowGroup | None:
|
|
142
133
|
for workflow_group in self.workflow_groups:
|
|
143
|
-
if workflow_group.
|
|
134
|
+
if workflow_group.get_workflow_by_name(workflow_name, workflow_version) is not None:
|
|
144
135
|
return workflow_group
|
|
136
|
+
if not allow_none:
|
|
137
|
+
raise ValueError(f"Workflow group with name {workflow_name} and version {workflow_version} not found in service {self.name}")
|
|
138
|
+
return None
|
|
139
|
+
|
|
140
|
+
def get_workflow_group_by_id(self, workflow_id: str, allow_none: bool = True) -> WorkflowGroup | None:
|
|
141
|
+
for workflow_group in self.workflow_groups:
|
|
142
|
+
if workflow_group.get_workflow_by_id(workflow_id) is not None:
|
|
143
|
+
return workflow_group
|
|
144
|
+
if not allow_none:
|
|
145
|
+
raise ValueError(f"Workflow group with id {workflow_id} not found in service {self.name}")
|
|
145
146
|
return None
|
|
146
|
-
|
|
147
|
-
def trigger_workflow(self, workflow_name: str, trigger_name: str, **kwargs) -> uuid.UUID:
|
|
148
|
-
workflow_group = self.get_workflow_group_by_name(workflow_name)
|
|
149
|
-
if workflow_group is None:
|
|
150
|
-
logger.error(f"Workflow {workflow_name} not found")
|
|
151
|
-
return False
|
|
152
|
-
|
|
153
|
-
workflow = workflow_group.get_main_workflow()
|
|
154
|
-
if workflow is None:
|
|
155
|
-
logger.error(f"Workflow {workflow_name} not found")
|
|
156
|
-
return False
|
|
157
147
|
|
|
148
|
+
def trigger_workflow(self, workflow_group: WorkflowGroup, trigger_name: str, **kwargs) -> uuid.UUID:
|
|
149
|
+
workflow = workflow_group.get_main_workflow(allow_none=False)
|
|
158
150
|
return workflow.trigger(trigger_name, **kwargs)
|
|
159
151
|
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
152
|
+
def trigger_workflow_by_name(self, workflow_name: str, workflow_version: str, trigger_name: str, **kwargs) -> uuid.UUID:
|
|
153
|
+
workflow_group = self.get_workflow_group_by_name(workflow_name, workflow_version, allow_none=False)
|
|
154
|
+
return self.trigger_workflow(workflow_group, trigger_name, **kwargs)
|
|
155
|
+
|
|
156
|
+
def trigger_workflow_by_id(self, workflow_id: str, trigger_name: str, **kwargs) -> uuid.UUID:
|
|
157
|
+
workflow_group = self.get_workflow_group_by_id(workflow_id, allow_none=False)
|
|
158
|
+
return self.trigger_workflow(workflow_group, trigger_name, **kwargs)
|
|
159
|
+
|
|
160
|
+
def start_workflow(self, workflow_group: WorkflowGroup) -> bool:
|
|
161
|
+
workflow = workflow_group.get_main_workflow(allow_none=False)
|
|
162
|
+
if workflow.id in self.workflow_tasks:
|
|
163
|
+
task = self.workflow_tasks[workflow.id]
|
|
163
164
|
if not task.done():
|
|
164
|
-
logger.warning(f"Workflow {
|
|
165
|
+
logger.warning(f"Workflow {workflow.id} is already running")
|
|
165
166
|
return False
|
|
166
|
-
del self.workflow_tasks[
|
|
167
|
-
|
|
168
|
-
workflow_group = self.get_workflow_group_by_name(workflow_name)
|
|
169
|
-
if workflow_group is None:
|
|
170
|
-
logger.error(f"Workflow {workflow_name} not found")
|
|
171
|
-
return False
|
|
167
|
+
del self.workflow_tasks[workflow.id]
|
|
172
168
|
|
|
173
|
-
task = asyncio.create_task(workflow_group.run(
|
|
174
|
-
self.workflow_tasks[
|
|
175
|
-
logger.info(f"Started workflow {
|
|
169
|
+
task = asyncio.create_task(workflow_group.run())
|
|
170
|
+
self.workflow_tasks[workflow.id] = task
|
|
171
|
+
logger.info(f"Started workflow {workflow.id}")
|
|
176
172
|
return True
|
|
173
|
+
|
|
174
|
+
def start_workflow_by_name(self, workflow_name: str, workflow_version: str) -> bool:
|
|
175
|
+
workflow_group = self.get_workflow_group_by_name(workflow_name, workflow_version, allow_none=False)
|
|
176
|
+
return self.start_workflow(workflow_group)
|
|
177
177
|
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
178
|
+
def start_workflow_by_id(self, workflow_id: str) -> bool:
|
|
179
|
+
workflow_group = self.get_workflow_group_by_id(workflow_id, allow_none=False)
|
|
180
|
+
return self.start_workflow(workflow_group)
|
|
181
|
+
|
|
182
|
+
async def stop_workflow(self, workflow_group: WorkflowGroup) -> bool:
|
|
183
|
+
workflow = workflow_group.get_main_workflow(allow_none=False)
|
|
184
|
+
if workflow.id not in self.workflow_tasks:
|
|
185
|
+
logger.warning(f"Workflow {workflow.id} is not running")
|
|
181
186
|
return False
|
|
182
|
-
|
|
183
|
-
task = self.workflow_tasks[workflow_name]
|
|
187
|
+
task = self.workflow_tasks[workflow.id]
|
|
184
188
|
if task.done():
|
|
185
|
-
logger.warning(f"Workflow {
|
|
186
|
-
del self.workflow_tasks[
|
|
189
|
+
logger.warning(f"Workflow {workflow.id} is already stopped")
|
|
190
|
+
del self.workflow_tasks[workflow.id]
|
|
187
191
|
return False
|
|
188
|
-
|
|
189
192
|
task.cancel()
|
|
193
|
+
await workflow.stop()
|
|
190
194
|
try:
|
|
191
195
|
await task
|
|
192
196
|
except asyncio.CancelledError:
|
|
193
197
|
pass
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
logger.info(f"Stopped workflow {workflow_name}")
|
|
198
|
+
del self.workflow_tasks[workflow.id]
|
|
199
|
+
logger.info(f"Stopped workflow {workflow.id}")
|
|
197
200
|
return True
|
|
201
|
+
|
|
202
|
+
async def stop_workflow_by_name(self, workflow_name: str, workflow_version: str) -> bool:
|
|
203
|
+
workflow_group = self.get_workflow_group_by_name(workflow_name, workflow_version, allow_none=False)
|
|
204
|
+
return await self.stop_workflow(workflow_group)
|
|
198
205
|
|
|
199
|
-
async def
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
self.workflow_groups.append(workflow_group)
|
|
227
|
-
main_workflow = workflow_group.get_main_workflow()
|
|
228
|
-
actual_name = workflow_name if workflow_name else main_workflow.name
|
|
229
|
-
|
|
230
|
-
if workflow_name and workflow_name != main_workflow.name:
|
|
231
|
-
actual_name = main_workflow.name
|
|
232
|
-
|
|
233
|
-
if actual_name in self.workflow_tasks:
|
|
234
|
-
await self.stop_workflow(actual_name)
|
|
235
|
-
|
|
236
|
-
task = asyncio.create_task(workflow_group.run(actual_name))
|
|
237
|
-
self.workflow_tasks[actual_name] = task
|
|
238
|
-
self.workflow_config_map[actual_name] = config_identifier
|
|
239
|
-
|
|
240
|
-
logger.info(f"Loaded and started workflow {actual_name} from {config_identifier}")
|
|
241
|
-
return True
|
|
242
|
-
except Exception as e:
|
|
243
|
-
logger.error(f"Failed to load workflow from {config_path or 'config_dict'}: {e}")
|
|
244
|
-
return False
|
|
206
|
+
async def stop_workflow_by_id(self, workflow_id: str) -> bool:
|
|
207
|
+
workflow_group = self.get_workflow_group_by_id(workflow_id, allow_none=False)
|
|
208
|
+
return await self.stop_workflow(workflow_group)
|
|
209
|
+
|
|
210
|
+
async def load_workflow_from_config(self, config_path: str = None, config: dict = None) -> uuid.UUID:
|
|
211
|
+
workflow_group = create_workflow_group(
|
|
212
|
+
config_path=config_path,
|
|
213
|
+
config=config,
|
|
214
|
+
service_env=self.service_env,
|
|
215
|
+
_handle_stream_output=self._handle_stream_output,
|
|
216
|
+
_handle_query_user=self._handle_query_user,
|
|
217
|
+
database_manager=self.database_manager,
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
for workflow in workflow_group.workflows:
|
|
221
|
+
existing_workflow_group = self.get_workflow_group_by_name(workflow.name, workflow.version)
|
|
222
|
+
if existing_workflow_group is not None:
|
|
223
|
+
raise ValueError(f"Workflow group with name {workflow.name} and version {workflow.version} already exists")
|
|
224
|
+
|
|
225
|
+
self.workflow_groups.append(workflow_group)
|
|
226
|
+
main_workflow = workflow_group.get_main_workflow()
|
|
227
|
+
|
|
228
|
+
if main_workflow.id in self.workflow_tasks:
|
|
229
|
+
await self.stop_workflow(workflow_group)
|
|
230
|
+
|
|
231
|
+
self.start_workflow(workflow_group)
|
|
232
|
+
return main_workflow.id
|
|
245
233
|
|
|
246
234
|
def get_service_status(self) -> dict[str, Any]:
|
|
247
235
|
workflow_statuses = []
|
|
248
236
|
for workflow_group in self.workflow_groups:
|
|
249
237
|
for workflow in workflow_group.workflows:
|
|
238
|
+
workflow_id = workflow.id
|
|
239
|
+
workflow_version = workflow.version
|
|
240
|
+
workflow_config = workflow.config
|
|
250
241
|
workflow_name = workflow.name
|
|
251
|
-
is_running =
|
|
242
|
+
is_running = workflow_id in self.workflow_tasks and not self.workflow_tasks[workflow_id].done()
|
|
252
243
|
config_path = self.workflow_config_map.get(workflow_name, "unknown")
|
|
253
244
|
workflow_statuses.append({
|
|
254
245
|
"name": workflow_name,
|
|
246
|
+
"id": workflow_id,
|
|
247
|
+
"version": workflow_version,
|
|
248
|
+
"config": workflow_config,
|
|
255
249
|
"description": workflow.description,
|
|
256
250
|
"status": "running" if is_running else "stopped",
|
|
257
251
|
"config_path": config_path,
|
|
@@ -265,23 +259,21 @@ class Service:
|
|
|
265
259
|
}
|
|
266
260
|
|
|
267
261
|
@staticmethod
|
|
268
|
-
def from_config(metadata, service_env: dict[str, Any] = None) -> Service:
|
|
269
|
-
config
|
|
262
|
+
def from_config(metadata: SfMetadata, service_env: dict[str, Any] = None, config: ServiceConfig = None) -> Service:
|
|
263
|
+
if config is not None:
|
|
264
|
+
config_path = None
|
|
265
|
+
else:
|
|
266
|
+
config_path = metadata.service_config
|
|
267
|
+
config = ServiceConfig.from_yaml_file(config_path)
|
|
270
268
|
database_manager = DatabaseManager.from_config(config=config)
|
|
271
269
|
return Service(
|
|
272
270
|
metadata=metadata,
|
|
273
|
-
config_path=
|
|
274
|
-
|
|
275
|
-
_handle_stream_output=None,
|
|
276
|
-
_handle_query_user=None,
|
|
277
|
-
enable_http=config.get('enable_http', True),
|
|
278
|
-
http_host=config.get('http_host', '0.0.0.0'),
|
|
279
|
-
http_port=config.get('http_port', 8000),
|
|
280
|
-
enable_kafka=config.get('enable_kafka', True),
|
|
281
|
-
kafka_host=config.get('kafka_host', 'localhost'),
|
|
282
|
-
kafka_port=config.get('kafka_port', 9092),
|
|
271
|
+
config_path=config_path,
|
|
272
|
+
config=config,
|
|
283
273
|
service_env=service_env,
|
|
284
274
|
database_manager=database_manager,
|
|
275
|
+
_handle_stream_output=None,
|
|
276
|
+
_handle_query_user=None,
|
|
285
277
|
)
|
|
286
278
|
|
|
287
279
|
def create_service(config_path: str, name: str, version: str, service_env: dict[str, Any] = None) -> Service:
|
service_forge/service_config.py
CHANGED
|
@@ -1,158 +1,44 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
return
|
|
45
|
-
name=config['name'],
|
|
46
|
-
postgres_user=config.get('postgres_user', None),
|
|
47
|
-
postgres_password=config.get('postgres_password', None),
|
|
48
|
-
postgres_host=config.get('postgres_host', None),
|
|
49
|
-
postgres_port=config.get('postgres_port', None),
|
|
50
|
-
postgres_db=config.get('postgres_db', None),
|
|
51
|
-
|
|
52
|
-
mongo_host=config.get('mongo_host', None),
|
|
53
|
-
mongo_port=config.get('mongo_port', None),
|
|
54
|
-
mongo_user=config.get('mongo_user', None),
|
|
55
|
-
mongo_password=config.get('mongo_password', None),
|
|
56
|
-
mongo_db=config.get('mongo_db', None),
|
|
57
|
-
|
|
58
|
-
redis_host=config.get('redis_host', None),
|
|
59
|
-
redis_port=config.get('redis_port', None),
|
|
60
|
-
redis_password=config.get('redis_password', None),
|
|
61
|
-
)
|
|
62
|
-
|
|
63
|
-
def to_dict(self) -> dict:
|
|
64
|
-
return {
|
|
65
|
-
'name': self.name,
|
|
66
|
-
|
|
67
|
-
'postgres_user': self.postgres_user,
|
|
68
|
-
'postgres_password': self.postgres_password,
|
|
69
|
-
'postgres_host': self.postgres_host,
|
|
70
|
-
'postgres_port': self.postgres_port,
|
|
71
|
-
'postgres_db': self.postgres_db,
|
|
72
|
-
|
|
73
|
-
'mongo_host': self.mongo_host,
|
|
74
|
-
'mongo_port': self.mongo_port,
|
|
75
|
-
'mongo_user': self.mongo_user,
|
|
76
|
-
'mongo_password': self.mongo_password,
|
|
77
|
-
'mongo_db': self.mongo_db,
|
|
78
|
-
|
|
79
|
-
'redis_host': self.redis_host,
|
|
80
|
-
'redis_port': self.redis_port,
|
|
81
|
-
'redis_password': self.redis_password,
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
class ServiceConfig:
|
|
85
|
-
def __init__(
|
|
86
|
-
self,
|
|
87
|
-
name: str,
|
|
88
|
-
workflows: list[str],
|
|
89
|
-
enable_http: bool,
|
|
90
|
-
http_host: str,
|
|
91
|
-
http_port: int,
|
|
92
|
-
enable_kafka: bool,
|
|
93
|
-
kafka_host: str,
|
|
94
|
-
kafka_port: int,
|
|
95
|
-
databases: list[ServiceDatabaseConfig],
|
|
96
|
-
) -> None:
|
|
97
|
-
self.name = name
|
|
98
|
-
self.workflows = workflows
|
|
99
|
-
self.enable_http = enable_http
|
|
100
|
-
self.http_host = http_host
|
|
101
|
-
self.http_port = http_port
|
|
102
|
-
self.enable_kafka = enable_kafka
|
|
103
|
-
self.kafka_host = kafka_host
|
|
104
|
-
self.kafka_port = kafka_port
|
|
105
|
-
self.databases = databases
|
|
106
|
-
|
|
107
|
-
@staticmethod
|
|
108
|
-
def from_dict(config: dict) -> ServiceConfig:
|
|
109
|
-
return ServiceConfig(
|
|
110
|
-
name=config['name'],
|
|
111
|
-
workflows=config['workflows'],
|
|
112
|
-
enable_http=config['enable_http'],
|
|
113
|
-
http_host=config['http_host'],
|
|
114
|
-
http_port=config['http_port'],
|
|
115
|
-
enable_kafka=config['enable_kafka'],
|
|
116
|
-
kafka_host=config['kafka_host'],
|
|
117
|
-
kafka_port=config['kafka_port'],
|
|
118
|
-
databases=[ServiceDatabaseConfig.from_dict(database) for database in config['databases']],
|
|
119
|
-
)
|
|
120
|
-
|
|
121
|
-
def to_dict(self) -> dict:
|
|
122
|
-
return {
|
|
123
|
-
'name': self.name,
|
|
124
|
-
'workflows': self.workflows,
|
|
125
|
-
'enable_http': self.enable_http,
|
|
126
|
-
'http_host': self.http_host,
|
|
127
|
-
'http_port': self.http_port,
|
|
128
|
-
'enable_kafka': self.enable_kafka,
|
|
129
|
-
'kafka_host': self.kafka_host,
|
|
130
|
-
'kafka_port': self.kafka_port,
|
|
131
|
-
'databases': [database.to_dict() for database in self.databases],
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
# name: tag_service
|
|
135
|
-
# workflows:
|
|
136
|
-
# # - ./workflow/kafka_workflow.yaml
|
|
137
|
-
# - ./workflow/query_tags_workflow.yaml
|
|
138
|
-
# - ./workflow/create_tag_workflow.yaml
|
|
139
|
-
# - ./workflow/update_tag_workflow.yaml
|
|
140
|
-
# - ./workflow/delete_tag_workflow.yaml
|
|
141
|
-
# - ./workflow/get_tags_from_record.yaml
|
|
142
|
-
|
|
143
|
-
# enable_http: true
|
|
144
|
-
# enable_kafka: false
|
|
145
|
-
|
|
146
|
-
# # Following configs will be auto-injected by sft.
|
|
147
|
-
# http_host: 0.0.0.0
|
|
148
|
-
# http_port: 37200
|
|
149
|
-
# kafka_host: localhost
|
|
150
|
-
# kafka_port: 9092
|
|
151
|
-
|
|
152
|
-
# databases:
|
|
153
|
-
# - name: tag
|
|
154
|
-
# postgres_user: postgres
|
|
155
|
-
# postgres_password: "gnBGWg7aL4"
|
|
156
|
-
# postgres_host: second-brain-postgres-postgresql
|
|
157
|
-
# postgres_port: 5432
|
|
158
|
-
# postgres_db: tag-service-tag
|
|
3
|
+
import yaml
|
|
4
|
+
from pydantic import BaseModel
|
|
5
|
+
|
|
6
|
+
class ServiceFeedbackConfig(BaseModel):
|
|
7
|
+
api_url: str
|
|
8
|
+
api_timeout: int = 5
|
|
9
|
+
|
|
10
|
+
class ServiceDatabaseConfig(BaseModel):
|
|
11
|
+
name: str
|
|
12
|
+
postgres_user: str | None = None
|
|
13
|
+
postgres_password: str | None = None
|
|
14
|
+
postgres_host: str | None = None
|
|
15
|
+
postgres_port: int | None = None
|
|
16
|
+
postgres_db: str | None = None
|
|
17
|
+
|
|
18
|
+
mongo_host: str | None = None
|
|
19
|
+
mongo_port: int | None = None
|
|
20
|
+
mongo_user: str | None = None
|
|
21
|
+
mongo_password: str | None = None
|
|
22
|
+
mongo_db: str | None = None
|
|
23
|
+
|
|
24
|
+
redis_host: str | None = None
|
|
25
|
+
redis_port: int | None = None
|
|
26
|
+
redis_password: str | None = None
|
|
27
|
+
|
|
28
|
+
class ServiceConfig(BaseModel):
|
|
29
|
+
name: str
|
|
30
|
+
workflows: list[str]
|
|
31
|
+
enable_http: bool
|
|
32
|
+
http_host: str | None = None
|
|
33
|
+
http_port: int | None = None
|
|
34
|
+
enable_kafka: bool
|
|
35
|
+
kafka_host: str | None = None
|
|
36
|
+
kafka_port: int | None = None
|
|
37
|
+
databases: list[ServiceDatabaseConfig] | None = None
|
|
38
|
+
feedback: ServiceFeedbackConfig | None = None
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
def from_yaml_file(cls, filepath: str) -> ServiceConfig:
|
|
42
|
+
with open(filepath, 'r', encoding='utf-8') as f:
|
|
43
|
+
data = yaml.safe_load(f)
|
|
44
|
+
return cls(**data)
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
|
|
1
|
+
import yaml
|
|
2
2
|
from pathlib import Path
|
|
3
3
|
from service_forge.sft.util.logger import log_info, log_error
|
|
4
4
|
from service_forge.sft.config.injector_default_files import *
|
|
5
5
|
from service_forge.sft.config.sf_metadata import load_metadata
|
|
6
6
|
from service_forge.sft.config.sft_config import sft_config
|
|
7
|
-
from service_forge.service_config import ServiceConfig
|
|
7
|
+
from service_forge.service_config import ServiceConfig, ServiceFeedbackConfig
|
|
8
8
|
from service_forge.sft.util.name_util import get_service_name
|
|
9
9
|
from service_forge.sft.util.yaml_utils import load_sf_metadata_as_string
|
|
10
10
|
|
|
@@ -60,31 +60,41 @@ class Injector:
|
|
|
60
60
|
|
|
61
61
|
def inject_service_config(self) -> None:
|
|
62
62
|
service_config_path = self.project_dir / Path(self.metadata.service_config)
|
|
63
|
-
|
|
63
|
+
|
|
64
|
+
config = ServiceConfig.from_yaml_file(service_config_path)
|
|
64
65
|
|
|
65
66
|
config.http_port = sft_config.inject_http_port
|
|
66
67
|
config.kafka_host = sft_config.inject_kafka_host
|
|
67
68
|
config.kafka_port = sft_config.inject_kafka_port
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
database.postgres_host
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
database.mongo_host
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
database.redis_host
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
69
|
+
if config.databases is not None:
|
|
70
|
+
for database in config.databases:
|
|
71
|
+
if database.postgres_host is not None:
|
|
72
|
+
database.postgres_host = sft_config.inject_postgres_host
|
|
73
|
+
database.postgres_port = sft_config.inject_postgres_port
|
|
74
|
+
database.postgres_user = sft_config.inject_postgres_user
|
|
75
|
+
database.postgres_password = sft_config.inject_postgres_password
|
|
76
|
+
database.postgres_db = self.service_name
|
|
77
|
+
if database.mongo_host is not None:
|
|
78
|
+
database.mongo_host = sft_config.inject_mongo_host
|
|
79
|
+
database.mongo_port = sft_config.inject_mongo_port
|
|
80
|
+
database.mongo_user = sft_config.inject_mongo_user
|
|
81
|
+
database.mongo_password = sft_config.inject_mongo_password
|
|
82
|
+
database.mongo_db = sft_config.inject_mongo_db
|
|
83
|
+
if database.redis_host is not None:
|
|
84
|
+
database.redis_host = sft_config.inject_redis_host
|
|
85
|
+
database.redis_port = sft_config.inject_redis_port
|
|
86
|
+
database.redis_password = sft_config.inject_redis_password
|
|
87
|
+
if config.feedback is not None:
|
|
88
|
+
config.feedback.api_url = sft_config.inject_feedback_api_url
|
|
89
|
+
config.feedback.api_timeout = sft_config.inject_feedback_api_timeout
|
|
90
|
+
else:
|
|
91
|
+
config.feedback = ServiceFeedbackConfig(
|
|
92
|
+
api_url=sft_config.inject_feedback_api_url,
|
|
93
|
+
api_timeout=sft_config.inject_feedback_api_timeout,
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
with open(service_config_path, "w", encoding="utf-8") as f:
|
|
97
|
+
yaml.dump(config.model_dump(), f, allow_unicode=True, indent=2)
|
|
88
98
|
|
|
89
99
|
def inject_ingress(self) -> None:
|
|
90
100
|
ingress_yaml = DEFAULT_TRAEFIK_INGRESS_YAML.format(
|