service-forge 0.1.11__py3-none-any.whl → 0.1.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of service-forge might be problematic. Click here for more details.

Files changed (42) hide show
  1. service_forge/api/http_api.py +4 -0
  2. service_forge/api/routers/feedback/feedback_router.py +148 -0
  3. service_forge/api/routers/service/service_router.py +22 -32
  4. service_forge/current_service.py +14 -0
  5. service_forge/db/database.py +46 -32
  6. service_forge/db/migrations/feedback_migration.py +154 -0
  7. service_forge/db/models/__init__.py +0 -0
  8. service_forge/db/models/feedback.py +33 -0
  9. service_forge/llm/__init__.py +5 -0
  10. service_forge/model/feedback.py +30 -0
  11. service_forge/service.py +118 -126
  12. service_forge/service_config.py +42 -156
  13. service_forge/sft/cli.py +39 -0
  14. service_forge/sft/cmd/remote_deploy.py +160 -0
  15. service_forge/sft/cmd/remote_list_tars.py +111 -0
  16. service_forge/sft/config/injector.py +46 -24
  17. service_forge/sft/config/injector_default_files.py +1 -1
  18. service_forge/sft/config/sft_config.py +55 -8
  19. service_forge/storage/__init__.py +5 -0
  20. service_forge/storage/feedback_storage.py +245 -0
  21. service_forge/utils/default_type_converter.py +1 -1
  22. service_forge/utils/type_converter.py +5 -0
  23. service_forge/utils/workflow_clone.py +3 -2
  24. service_forge/workflow/node.py +8 -0
  25. service_forge/workflow/nodes/llm/query_llm_node.py +1 -1
  26. service_forge/workflow/trigger.py +4 -0
  27. service_forge/workflow/triggers/a2a_api_trigger.py +2 -0
  28. service_forge/workflow/triggers/fast_api_trigger.py +32 -0
  29. service_forge/workflow/triggers/kafka_api_trigger.py +3 -0
  30. service_forge/workflow/triggers/once_trigger.py +4 -1
  31. service_forge/workflow/triggers/period_trigger.py +4 -1
  32. service_forge/workflow/triggers/websocket_api_trigger.py +15 -11
  33. service_forge/workflow/workflow.py +74 -31
  34. service_forge/workflow/workflow_callback.py +3 -2
  35. service_forge/workflow/workflow_config.py +66 -0
  36. service_forge/workflow/workflow_factory.py +86 -85
  37. service_forge/workflow/workflow_group.py +33 -9
  38. {service_forge-0.1.11.dist-info → service_forge-0.1.24.dist-info}/METADATA +1 -1
  39. {service_forge-0.1.11.dist-info → service_forge-0.1.24.dist-info}/RECORD +41 -31
  40. service_forge/api/routers/service/__init__.py +0 -4
  41. {service_forge-0.1.11.dist-info → service_forge-0.1.24.dist-info}/WHEEL +0 -0
  42. {service_forge-0.1.11.dist-info → service_forge-0.1.24.dist-info}/entry_points.txt +0 -0
service_forge/service.py CHANGED
@@ -4,54 +4,43 @@ import os
4
4
  import asyncio
5
5
  import threading
6
6
  import uuid
7
- from omegaconf import OmegaConf
7
+ from loguru import logger
8
+ from typing import Callable, AsyncIterator, Awaitable, Any, TYPE_CHECKING
8
9
  from service_forge.workflow.node import node_register
9
- from service_forge.workflow.workflow_factory import create_workflows
10
+ from service_forge.workflow.workflow_factory import create_workflow_group
10
11
  from service_forge.api.http_api import start_fastapi_server
11
12
  from service_forge.api.kafka_api import start_kafka_server
12
13
  from service_forge.db.database import DatabaseManager
13
- from loguru import logger
14
- from typing import Callable, AsyncIterator, Awaitable, Any, TYPE_CHECKING
15
14
  from service_forge.api.http_api_doc import generate_service_http_api_doc
16
- from service_forge.api.routers.service.service_router import set_service
17
15
  from service_forge.sft.config.sf_metadata import SfMetadata
16
+ from service_forge.service_config import ServiceConfig
17
+ from service_forge.current_service import set_service
18
18
 
19
19
  if TYPE_CHECKING:
20
20
  from service_forge.workflow.workflow_group import WorkflowGroup
21
+ from service_forge.workflow.workflow import Workflow
21
22
 
22
23
  class Service:
23
24
  def __init__(
24
25
  self,
25
26
  metadata: SfMetadata,
27
+ config: ServiceConfig,
26
28
  config_path: str,
27
- workflow_config_paths: list[str],
28
- _handle_stream_output: Callable[[str, AsyncIterator[str]], Awaitable[None]] = None,
29
- _handle_query_user: Callable[[str, str], Awaitable[str]] = None,
30
- enable_http: bool = True,
31
- http_host: str = "0.0.0.0",
32
- http_port: int = 8000,
33
- enable_kafka: bool = True,
34
- kafka_host: str = "localhost",
35
- kafka_port: int = 9092,
36
29
  service_env: dict[str, Any] = None,
37
30
  database_manager: DatabaseManager = None,
31
+ _handle_stream_output: Callable[[str, AsyncIterator[str]], Awaitable[None]] = None,
32
+ _handle_query_user: Callable[[str, str], Awaitable[str]] = None,
38
33
  ) -> None:
39
34
  self.metadata = metadata
35
+ self.config = config
40
36
  self.config_path = config_path
41
- self.workflow_config_paths = workflow_config_paths
42
37
  self._handle_stream_output = _handle_stream_output
43
38
  self._handle_query_user = _handle_query_user
44
- self.enable_http = enable_http
45
- self.http_host = http_host
46
- self.http_port = http_port
47
- self.enable_kafka = enable_kafka
48
- self.kafka_host = kafka_host
49
- self.kafka_port = kafka_port
50
39
  self.service_env = {} if service_env is None else service_env
51
40
  self.database_manager = database_manager
52
41
  self.workflow_groups: list[WorkflowGroup] = []
53
42
  self.workflow_tasks: dict[str, asyncio.Task] = {} # workflow_name -> task mapping
54
- self.workflow_config_map: dict[str, str] = {} # workflow_name -> config_path mapping
43
+ self.workflow_config_map: dict[uuid.UUID, str] = {} # workflow_id -> config_path mapping
55
44
  self.fastapi_thread: threading.Thread | None = None
56
45
  self.fastapi_loop: asyncio.AbstractEventLoop | None = None
57
46
 
@@ -70,22 +59,24 @@ class Service:
70
59
  async def start(self):
71
60
  set_service(self)
72
61
 
73
- if self.enable_http:
74
- fastapi_task = asyncio.create_task(start_fastapi_server(self.http_host, self.http_port))
62
+ if self.config.enable_http:
63
+ fastapi_task = asyncio.create_task(start_fastapi_server(self.config.http_host, self.config.http_port))
75
64
  doc_task = asyncio.create_task(generate_service_http_api_doc(self))
76
65
  else:
77
66
  fastapi_task = None
78
67
  doc_task = None
79
- if self.enable_kafka:
80
- kafka_task = asyncio.create_task(start_kafka_server(f"{self.kafka_host}:{self.kafka_port}"))
68
+
69
+ if self.config.enable_kafka:
70
+ kafka_task = asyncio.create_task(start_kafka_server(f"{self.config.kafka_host}:{self.config.kafka_port}"))
81
71
  else:
82
72
  kafka_task = None
83
73
 
84
74
  workflow_tasks: list[asyncio.Task] = []
85
75
 
86
- for workflow_config_path in self.workflow_config_paths:
87
- workflow_group = create_workflows(
88
- self.parse_workflow_path(workflow_config_path),
76
+ for workflow_config_path in self.config.workflows:
77
+ logger.info(f"Loading workflow from {self.parse_workflow_path(workflow_config_path)}")
78
+ workflow_group = create_workflow_group(
79
+ config_path=self.parse_workflow_path(workflow_config_path),
89
80
  service_env=self.service_env,
90
81
  _handle_stream_output=self._handle_stream_output,
91
82
  _handle_query_user=self._handle_query_user,
@@ -95,7 +86,7 @@ class Service:
95
86
  main_workflow = workflow_group.get_main_workflow()
96
87
  task = asyncio.create_task(workflow_group.run())
97
88
  workflow_tasks.append(task)
98
- self.workflow_tasks[main_workflow.name] = task
89
+ self.workflow_tasks[main_workflow.id] = task
99
90
  self.workflow_config_map[main_workflow.name] = workflow_config_path
100
91
 
101
92
  try:
@@ -138,120 +129,123 @@ class Service:
138
129
  else:
139
130
  return os.path.join(os.path.dirname(self.config_path), workflow_config_path)
140
131
 
141
- def get_workflow_group_by_name(self, workflow_name: str) -> WorkflowGroup | None:
132
+ def get_workflow_group_by_name(self, workflow_name: str, workflow_version: str, allow_none: bool = True) -> WorkflowGroup | None:
142
133
  for workflow_group in self.workflow_groups:
143
- if workflow_group.get_workflow(workflow_name) is not None:
134
+ if workflow_group.get_workflow_by_name(workflow_name, workflow_version) is not None:
144
135
  return workflow_group
136
+ if not allow_none:
137
+ raise ValueError(f"Workflow group with name {workflow_name} and version {workflow_version} not found in service {self.name}")
138
+ return None
139
+
140
+ def get_workflow_group_by_id(self, workflow_id: str, allow_none: bool = True) -> WorkflowGroup | None:
141
+ for workflow_group in self.workflow_groups:
142
+ if workflow_group.get_workflow_by_id(workflow_id) is not None:
143
+ return workflow_group
144
+ if not allow_none:
145
+ raise ValueError(f"Workflow group with id {workflow_id} not found in service {self.name}")
145
146
  return None
146
-
147
- def trigger_workflow(self, workflow_name: str, trigger_name: str, **kwargs) -> uuid.UUID:
148
- workflow_group = self.get_workflow_group_by_name(workflow_name)
149
- if workflow_group is None:
150
- logger.error(f"Workflow {workflow_name} not found")
151
- return False
152
-
153
- workflow = workflow_group.get_main_workflow()
154
- if workflow is None:
155
- logger.error(f"Workflow {workflow_name} not found")
156
- return False
157
147
 
148
+ def trigger_workflow(self, workflow_group: WorkflowGroup, trigger_name: str, **kwargs) -> uuid.UUID:
149
+ workflow = workflow_group.get_main_workflow(allow_none=False)
158
150
  return workflow.trigger(trigger_name, **kwargs)
159
151
 
160
- async def start_workflow(self, workflow_name: str) -> bool:
161
- if workflow_name in self.workflow_tasks:
162
- task = self.workflow_tasks[workflow_name]
152
+ def trigger_workflow_by_name(self, workflow_name: str, workflow_version: str, trigger_name: str, **kwargs) -> uuid.UUID:
153
+ workflow_group = self.get_workflow_group_by_name(workflow_name, workflow_version, allow_none=False)
154
+ return self.trigger_workflow(workflow_group, trigger_name, **kwargs)
155
+
156
+ def trigger_workflow_by_id(self, workflow_id: str, trigger_name: str, **kwargs) -> uuid.UUID:
157
+ workflow_group = self.get_workflow_group_by_id(workflow_id, allow_none=False)
158
+ return self.trigger_workflow(workflow_group, trigger_name, **kwargs)
159
+
160
+ def start_workflow(self, workflow_group: WorkflowGroup) -> bool:
161
+ workflow = workflow_group.get_main_workflow(allow_none=False)
162
+ if workflow.id in self.workflow_tasks:
163
+ task = self.workflow_tasks[workflow.id]
163
164
  if not task.done():
164
- logger.warning(f"Workflow {workflow_name} is already running")
165
+ logger.warning(f"Workflow {workflow.id} is already running")
165
166
  return False
166
- del self.workflow_tasks[workflow_name]
167
-
168
- workflow_group = self.get_workflow_group_by_name(workflow_name)
169
- if workflow_group is None:
170
- logger.error(f"Workflow {workflow_name} not found")
171
- return False
167
+ del self.workflow_tasks[workflow.id]
172
168
 
173
- task = asyncio.create_task(workflow_group.run(workflow_name))
174
- self.workflow_tasks[workflow_name] = task
175
- logger.info(f"Started workflow {workflow_name}")
169
+ task = asyncio.create_task(workflow_group.run())
170
+ self.workflow_tasks[workflow.id] = task
171
+ logger.info(f"Started workflow {workflow.id}")
176
172
  return True
173
+
174
+ def start_workflow_by_name(self, workflow_name: str, workflow_version: str) -> bool:
175
+ workflow_group = self.get_workflow_group_by_name(workflow_name, workflow_version, allow_none=False)
176
+ return self.start_workflow(workflow_group)
177
177
 
178
- async def stop_workflow(self, workflow_name: str) -> bool:
179
- if workflow_name not in self.workflow_tasks:
180
- logger.warning(f"Workflow {workflow_name} is not running")
178
+ def start_workflow_by_id(self, workflow_id: str) -> bool:
179
+ workflow_group = self.get_workflow_group_by_id(workflow_id, allow_none=False)
180
+ return self.start_workflow(workflow_group)
181
+
182
+ async def stop_workflow(self, workflow_group: WorkflowGroup) -> bool:
183
+ workflow = workflow_group.get_main_workflow(allow_none=False)
184
+ if workflow.id not in self.workflow_tasks:
185
+ logger.warning(f"Workflow {workflow.id} is not running")
181
186
  return False
182
-
183
- task = self.workflow_tasks[workflow_name]
187
+ task = self.workflow_tasks[workflow.id]
184
188
  if task.done():
185
- logger.warning(f"Workflow {workflow_name} is already stopped")
186
- del self.workflow_tasks[workflow_name]
189
+ logger.warning(f"Workflow {workflow.id} is already stopped")
190
+ del self.workflow_tasks[workflow.id]
187
191
  return False
188
-
189
192
  task.cancel()
193
+ await workflow.stop()
190
194
  try:
191
195
  await task
192
196
  except asyncio.CancelledError:
193
197
  pass
194
-
195
- del self.workflow_tasks[workflow_name]
196
- logger.info(f"Stopped workflow {workflow_name}")
198
+ del self.workflow_tasks[workflow.id]
199
+ logger.info(f"Stopped workflow {workflow.id}")
197
200
  return True
201
+
202
+ async def stop_workflow_by_name(self, workflow_name: str, workflow_version: str) -> bool:
203
+ workflow_group = self.get_workflow_group_by_name(workflow_name, workflow_version, allow_none=False)
204
+ return await self.stop_workflow(workflow_group)
198
205
 
199
- async def load_workflow_from_config(self, config_path: str = None, config: dict = None, workflow_name: str = None) -> bool:
200
- try:
201
- if config is None:
202
- if config_path is None:
203
- raise ValueError("Either config_path or config must be provided")
204
- if os.path.isabs(config_path):
205
- full_path = config_path
206
- else:
207
- full_path = self.parse_workflow_path(config_path)
208
- workflow_group = create_workflows(
209
- config_path=full_path,
210
- service_env=self.service_env,
211
- _handle_stream_output=self._handle_stream_output,
212
- _handle_query_user=self._handle_query_user,
213
- database_manager=self.database_manager,
214
- )
215
- config_identifier = config_path
216
- else:
217
- workflow_group = create_workflows(
218
- config=config,
219
- service_env=self.service_env,
220
- _handle_stream_output=self._handle_stream_output,
221
- _handle_query_user=self._handle_query_user,
222
- database_manager=self.database_manager,
223
- )
224
- config_identifier = config_path if config_path else "config_dict"
225
-
226
- self.workflow_groups.append(workflow_group)
227
- main_workflow = workflow_group.get_main_workflow()
228
- actual_name = workflow_name if workflow_name else main_workflow.name
229
-
230
- if workflow_name and workflow_name != main_workflow.name:
231
- actual_name = main_workflow.name
232
-
233
- if actual_name in self.workflow_tasks:
234
- await self.stop_workflow(actual_name)
235
-
236
- task = asyncio.create_task(workflow_group.run(actual_name))
237
- self.workflow_tasks[actual_name] = task
238
- self.workflow_config_map[actual_name] = config_identifier
239
-
240
- logger.info(f"Loaded and started workflow {actual_name} from {config_identifier}")
241
- return True
242
- except Exception as e:
243
- logger.error(f"Failed to load workflow from {config_path or 'config_dict'}: {e}")
244
- return False
206
+ async def stop_workflow_by_id(self, workflow_id: str) -> bool:
207
+ workflow_group = self.get_workflow_group_by_id(workflow_id, allow_none=False)
208
+ return await self.stop_workflow(workflow_group)
209
+
210
+ async def load_workflow_from_config(self, config_path: str = None, config: dict = None) -> uuid.UUID:
211
+ workflow_group = create_workflow_group(
212
+ config_path=config_path,
213
+ config=config,
214
+ service_env=self.service_env,
215
+ _handle_stream_output=self._handle_stream_output,
216
+ _handle_query_user=self._handle_query_user,
217
+ database_manager=self.database_manager,
218
+ )
219
+
220
+ for workflow in workflow_group.workflows:
221
+ existing_workflow_group = self.get_workflow_group_by_name(workflow.name, workflow.version)
222
+ if existing_workflow_group is not None:
223
+ raise ValueError(f"Workflow group with name {workflow.name} and version {workflow.version} already exists")
224
+
225
+ self.workflow_groups.append(workflow_group)
226
+ main_workflow = workflow_group.get_main_workflow()
227
+
228
+ if main_workflow.id in self.workflow_tasks:
229
+ await self.stop_workflow(workflow_group)
230
+
231
+ self.start_workflow(workflow_group)
232
+ return main_workflow.id
245
233
 
246
234
  def get_service_status(self) -> dict[str, Any]:
247
235
  workflow_statuses = []
248
236
  for workflow_group in self.workflow_groups:
249
237
  for workflow in workflow_group.workflows:
238
+ workflow_id = workflow.id
239
+ workflow_version = workflow.version
240
+ workflow_config = workflow.config
250
241
  workflow_name = workflow.name
251
- is_running = workflow_name in self.workflow_tasks and not self.workflow_tasks[workflow_name].done()
242
+ is_running = workflow_id in self.workflow_tasks and not self.workflow_tasks[workflow_id].done()
252
243
  config_path = self.workflow_config_map.get(workflow_name, "unknown")
253
244
  workflow_statuses.append({
254
245
  "name": workflow_name,
246
+ "id": workflow_id,
247
+ "version": workflow_version,
248
+ "config": workflow_config,
255
249
  "description": workflow.description,
256
250
  "status": "running" if is_running else "stopped",
257
251
  "config_path": config_path,
@@ -265,23 +259,21 @@ class Service:
265
259
  }
266
260
 
267
261
  @staticmethod
268
- def from_config(metadata, service_env: dict[str, Any] = None) -> Service:
269
- config = OmegaConf.to_object(OmegaConf.load(metadata.service_config))
262
+ def from_config(metadata: SfMetadata, service_env: dict[str, Any] = None, config: ServiceConfig = None) -> Service:
263
+ if config is not None:
264
+ config_path = None
265
+ else:
266
+ config_path = metadata.service_config
267
+ config = ServiceConfig.from_yaml_file(config_path)
270
268
  database_manager = DatabaseManager.from_config(config=config)
271
269
  return Service(
272
270
  metadata=metadata,
273
- config_path=metadata.service_config,
274
- workflow_config_paths=config.get('workflows', []),
275
- _handle_stream_output=None,
276
- _handle_query_user=None,
277
- enable_http=config.get('enable_http', True),
278
- http_host=config.get('http_host', '0.0.0.0'),
279
- http_port=config.get('http_port', 8000),
280
- enable_kafka=config.get('enable_kafka', True),
281
- kafka_host=config.get('kafka_host', 'localhost'),
282
- kafka_port=config.get('kafka_port', 9092),
271
+ config_path=config_path,
272
+ config=config,
283
273
  service_env=service_env,
284
274
  database_manager=database_manager,
275
+ _handle_stream_output=None,
276
+ _handle_query_user=None,
285
277
  )
286
278
 
287
279
  def create_service(config_path: str, name: str, version: str, service_env: dict[str, Any] = None) -> Service:
@@ -1,158 +1,44 @@
1
1
  from __future__ import annotations
2
2
 
3
- class ServiceDatabaseConfig:
4
- def __init__(
5
- self,
6
- name: str,
7
-
8
- postgres_user: str,
9
- postgres_password: str,
10
- postgres_host: str,
11
- postgres_port: int,
12
- postgres_db: str,
13
-
14
- mongo_host: str,
15
- mongo_port: int,
16
- mongo_user: str,
17
- mongo_password: str,
18
- mongo_db: str,
19
-
20
- redis_host: str,
21
- redis_port: int,
22
- redis_password: str,
23
- ) -> None:
24
- self.name = name
25
-
26
- self.postgres_user = postgres_user
27
- self.postgres_password = postgres_password
28
- self.postgres_host = postgres_host
29
- self.postgres_port = postgres_port
30
- self.postgres_db = postgres_db
31
-
32
- self.mongo_host = mongo_host
33
- self.mongo_port = mongo_port
34
- self.mongo_user = mongo_user
35
- self.mongo_password = mongo_password
36
- self.mongo_db = mongo_db
37
-
38
- self.redis_host = redis_host
39
- self.redis_port = redis_port
40
- self.redis_password = redis_password
41
-
42
- @staticmethod
43
- def from_dict(config: dict) -> ServiceDatabaseConfig:
44
- return ServiceDatabaseConfig(
45
- name=config['name'],
46
- postgres_user=config.get('postgres_user', None),
47
- postgres_password=config.get('postgres_password', None),
48
- postgres_host=config.get('postgres_host', None),
49
- postgres_port=config.get('postgres_port', None),
50
- postgres_db=config.get('postgres_db', None),
51
-
52
- mongo_host=config.get('mongo_host', None),
53
- mongo_port=config.get('mongo_port', None),
54
- mongo_user=config.get('mongo_user', None),
55
- mongo_password=config.get('mongo_password', None),
56
- mongo_db=config.get('mongo_db', None),
57
-
58
- redis_host=config.get('redis_host', None),
59
- redis_port=config.get('redis_port', None),
60
- redis_password=config.get('redis_password', None),
61
- )
62
-
63
- def to_dict(self) -> dict:
64
- return {
65
- 'name': self.name,
66
-
67
- 'postgres_user': self.postgres_user,
68
- 'postgres_password': self.postgres_password,
69
- 'postgres_host': self.postgres_host,
70
- 'postgres_port': self.postgres_port,
71
- 'postgres_db': self.postgres_db,
72
-
73
- 'mongo_host': self.mongo_host,
74
- 'mongo_port': self.mongo_port,
75
- 'mongo_user': self.mongo_user,
76
- 'mongo_password': self.mongo_password,
77
- 'mongo_db': self.mongo_db,
78
-
79
- 'redis_host': self.redis_host,
80
- 'redis_port': self.redis_port,
81
- 'redis_password': self.redis_password,
82
- }
83
-
84
- class ServiceConfig:
85
- def __init__(
86
- self,
87
- name: str,
88
- workflows: list[str],
89
- enable_http: bool,
90
- http_host: str,
91
- http_port: int,
92
- enable_kafka: bool,
93
- kafka_host: str,
94
- kafka_port: int,
95
- databases: list[ServiceDatabaseConfig],
96
- ) -> None:
97
- self.name = name
98
- self.workflows = workflows
99
- self.enable_http = enable_http
100
- self.http_host = http_host
101
- self.http_port = http_port
102
- self.enable_kafka = enable_kafka
103
- self.kafka_host = kafka_host
104
- self.kafka_port = kafka_port
105
- self.databases = databases
106
-
107
- @staticmethod
108
- def from_dict(config: dict) -> ServiceConfig:
109
- return ServiceConfig(
110
- name=config['name'],
111
- workflows=config['workflows'],
112
- enable_http=config['enable_http'],
113
- http_host=config['http_host'],
114
- http_port=config['http_port'],
115
- enable_kafka=config['enable_kafka'],
116
- kafka_host=config['kafka_host'],
117
- kafka_port=config['kafka_port'],
118
- databases=[ServiceDatabaseConfig.from_dict(database) for database in config['databases']],
119
- )
120
-
121
- def to_dict(self) -> dict:
122
- return {
123
- 'name': self.name,
124
- 'workflows': self.workflows,
125
- 'enable_http': self.enable_http,
126
- 'http_host': self.http_host,
127
- 'http_port': self.http_port,
128
- 'enable_kafka': self.enable_kafka,
129
- 'kafka_host': self.kafka_host,
130
- 'kafka_port': self.kafka_port,
131
- 'databases': [database.to_dict() for database in self.databases],
132
- }
133
-
134
- # name: tag_service
135
- # workflows:
136
- # # - ./workflow/kafka_workflow.yaml
137
- # - ./workflow/query_tags_workflow.yaml
138
- # - ./workflow/create_tag_workflow.yaml
139
- # - ./workflow/update_tag_workflow.yaml
140
- # - ./workflow/delete_tag_workflow.yaml
141
- # - ./workflow/get_tags_from_record.yaml
142
-
143
- # enable_http: true
144
- # enable_kafka: false
145
-
146
- # # Following configs will be auto-injected by sft.
147
- # http_host: 0.0.0.0
148
- # http_port: 37200
149
- # kafka_host: localhost
150
- # kafka_port: 9092
151
-
152
- # databases:
153
- # - name: tag
154
- # postgres_user: postgres
155
- # postgres_password: "gnBGWg7aL4"
156
- # postgres_host: second-brain-postgres-postgresql
157
- # postgres_port: 5432
158
- # postgres_db: tag-service-tag
3
+ import yaml
4
+ from pydantic import BaseModel
5
+
6
+ class ServiceFeedbackConfig(BaseModel):
7
+ api_url: str
8
+ api_timeout: int = 5
9
+
10
+ class ServiceDatabaseConfig(BaseModel):
11
+ name: str
12
+ postgres_user: str | None = None
13
+ postgres_password: str | None = None
14
+ postgres_host: str | None = None
15
+ postgres_port: int | None = None
16
+ postgres_db: str | None = None
17
+
18
+ mongo_host: str | None = None
19
+ mongo_port: int | None = None
20
+ mongo_user: str | None = None
21
+ mongo_password: str | None = None
22
+ mongo_db: str | None = None
23
+
24
+ redis_host: str | None = None
25
+ redis_port: int | None = None
26
+ redis_password: str | None = None
27
+
28
+ class ServiceConfig(BaseModel):
29
+ name: str
30
+ workflows: list[str]
31
+ enable_http: bool
32
+ http_host: str | None = None
33
+ http_port: int | None = None
34
+ enable_kafka: bool
35
+ kafka_host: str | None = None
36
+ kafka_port: int | None = None
37
+ databases: list[ServiceDatabaseConfig] | None = None
38
+ feedback: ServiceFeedbackConfig | None = None
39
+
40
+ @classmethod
41
+ def from_yaml_file(cls, filepath: str) -> ServiceConfig:
42
+ with open(filepath, 'r', encoding='utf-8') as f:
43
+ data = yaml.safe_load(f)
44
+ return cls(**data)
service_forge/sft/cli.py CHANGED
@@ -9,6 +9,8 @@ from service_forge.sft.cmd.upload_service import upload_service
9
9
  from service_forge.sft.cmd.deploy_service import deploy_service
10
10
  from service_forge.sft.cmd.config_command import list_config, get_config, set_config
11
11
  from service_forge.sft.cmd.service_command import list_services, delete_service, show_service_logs
12
+ from service_forge.sft.cmd.remote_list_tars import remote_list_tars
13
+ from service_forge.sft.cmd.remote_deploy import remote_deploy_tar, remote_list_and_deploy
12
14
 
13
15
  app = typer.Typer(
14
16
  name="sft",
@@ -33,6 +35,43 @@ def list_tars_command() -> None:
33
35
  def deploy_service_command(name: str, version: str) -> None:
34
36
  deploy_service(name, version)
35
37
 
38
+ @app.command(name="remote-list")
39
+ def remote_list_tars_command(
40
+ url: str = typer.Option(
41
+ None,
42
+ "--url",
43
+ "-u",
44
+ help="Service Center URL (default: http://localhost:5000 or from service_center_address config)"
45
+ )
46
+ ) -> None:
47
+ """List tar packages and their status on remote server"""
48
+ remote_list_tars(url)
49
+
50
+ @app.command(name="remote-deploy")
51
+ def remote_deploy_command(
52
+ filename: str = typer.Argument(help="Filename of the tar package to deploy"),
53
+ url: str = typer.Option(
54
+ None,
55
+ "--url",
56
+ "-u",
57
+ help="Service Center URL (default: http://localhost:5000 or from service_center_address config)"
58
+ )
59
+ ) -> None:
60
+ """Remote deploy specified tar package"""
61
+ remote_deploy_tar(filename, url)
62
+
63
+ @app.command(name="remote-deploy-interactive")
64
+ def remote_deploy_interactive_command(
65
+ url: str = typer.Option(
66
+ None,
67
+ "--url",
68
+ "-u",
69
+ help="Service Center URL (default: http://localhost:5000 or from service_center_address config)"
70
+ )
71
+ ) -> None:
72
+ """Interactive remote deployment of tar packages (list available packages first, then select for deployment)"""
73
+ remote_list_and_deploy(url)
74
+
36
75
  config_app = typer.Typer(
37
76
  name="config",
38
77
  help="Configuration management commands",