service-forge 0.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of service-forge might be problematic. Click here for more details.

Files changed (75) hide show
  1. service_forge/api/deprecated_websocket_api.py +86 -0
  2. service_forge/api/deprecated_websocket_manager.py +425 -0
  3. service_forge/api/http_api.py +148 -0
  4. service_forge/api/http_api_doc.py +455 -0
  5. service_forge/api/kafka_api.py +126 -0
  6. service_forge/api/routers/service/__init__.py +4 -0
  7. service_forge/api/routers/service/service_router.py +137 -0
  8. service_forge/api/routers/websocket/websocket_manager.py +83 -0
  9. service_forge/api/routers/websocket/websocket_router.py +78 -0
  10. service_forge/api/task_manager.py +141 -0
  11. service_forge/db/__init__.py +1 -0
  12. service_forge/db/database.py +240 -0
  13. service_forge/llm/__init__.py +62 -0
  14. service_forge/llm/llm.py +56 -0
  15. service_forge/model/__init__.py +0 -0
  16. service_forge/model/websocket.py +13 -0
  17. service_forge/proto/foo_input.py +5 -0
  18. service_forge/service.py +288 -0
  19. service_forge/service_config.py +158 -0
  20. service_forge/sft/cli.py +91 -0
  21. service_forge/sft/cmd/config_command.py +67 -0
  22. service_forge/sft/cmd/deploy_service.py +123 -0
  23. service_forge/sft/cmd/list_tars.py +41 -0
  24. service_forge/sft/cmd/service_command.py +149 -0
  25. service_forge/sft/cmd/upload_service.py +36 -0
  26. service_forge/sft/config/injector.py +119 -0
  27. service_forge/sft/config/injector_default_files.py +131 -0
  28. service_forge/sft/config/sf_metadata.py +30 -0
  29. service_forge/sft/config/sft_config.py +153 -0
  30. service_forge/sft/file/__init__.py +0 -0
  31. service_forge/sft/file/ignore_pattern.py +80 -0
  32. service_forge/sft/file/sft_file_manager.py +107 -0
  33. service_forge/sft/kubernetes/kubernetes_manager.py +257 -0
  34. service_forge/sft/util/assert_util.py +25 -0
  35. service_forge/sft/util/logger.py +16 -0
  36. service_forge/sft/util/name_util.py +8 -0
  37. service_forge/sft/util/yaml_utils.py +57 -0
  38. service_forge/utils/__init__.py +0 -0
  39. service_forge/utils/default_type_converter.py +12 -0
  40. service_forge/utils/register.py +39 -0
  41. service_forge/utils/type_converter.py +99 -0
  42. service_forge/utils/workflow_clone.py +124 -0
  43. service_forge/workflow/__init__.py +1 -0
  44. service_forge/workflow/context.py +14 -0
  45. service_forge/workflow/edge.py +24 -0
  46. service_forge/workflow/node.py +184 -0
  47. service_forge/workflow/nodes/__init__.py +8 -0
  48. service_forge/workflow/nodes/control/if_node.py +29 -0
  49. service_forge/workflow/nodes/control/switch_node.py +28 -0
  50. service_forge/workflow/nodes/input/console_input_node.py +26 -0
  51. service_forge/workflow/nodes/llm/query_llm_node.py +41 -0
  52. service_forge/workflow/nodes/nested/workflow_node.py +28 -0
  53. service_forge/workflow/nodes/output/kafka_output_node.py +27 -0
  54. service_forge/workflow/nodes/output/print_node.py +29 -0
  55. service_forge/workflow/nodes/test/if_console_input_node.py +33 -0
  56. service_forge/workflow/nodes/test/time_consuming_node.py +62 -0
  57. service_forge/workflow/port.py +89 -0
  58. service_forge/workflow/trigger.py +24 -0
  59. service_forge/workflow/triggers/__init__.py +6 -0
  60. service_forge/workflow/triggers/a2a_api_trigger.py +255 -0
  61. service_forge/workflow/triggers/fast_api_trigger.py +169 -0
  62. service_forge/workflow/triggers/kafka_api_trigger.py +44 -0
  63. service_forge/workflow/triggers/once_trigger.py +20 -0
  64. service_forge/workflow/triggers/period_trigger.py +26 -0
  65. service_forge/workflow/triggers/websocket_api_trigger.py +184 -0
  66. service_forge/workflow/workflow.py +210 -0
  67. service_forge/workflow/workflow_callback.py +141 -0
  68. service_forge/workflow/workflow_event.py +15 -0
  69. service_forge/workflow/workflow_factory.py +246 -0
  70. service_forge/workflow/workflow_group.py +27 -0
  71. service_forge/workflow/workflow_type.py +52 -0
  72. service_forge-0.1.11.dist-info/METADATA +98 -0
  73. service_forge-0.1.11.dist-info/RECORD +75 -0
  74. service_forge-0.1.11.dist-info/WHEEL +4 -0
  75. service_forge-0.1.11.dist-info/entry_points.txt +2 -0
@@ -0,0 +1,62 @@
1
+ import os
2
+ from .llm import LLM
3
+ from enum import Enum
4
+ from typing import Iterator
5
+
6
+ _llm_dicts = {}
7
+
8
+ class Model(Enum):
9
+ GPT_4_1_NANO = "gpt-4.1-nano"
10
+ QWEN_TURBO_LATEST = "qwen-turbo-latest"
11
+ QWEN_PLUS_LATEST = "qwen-plus-latest"
12
+ QWEN_MAX_LATEST = "qwen-max-latest"
13
+ DOUBO_SEED_1_6_250615 = "doubao-seed-1-6-250615"
14
+ DOUBO_SEED_1_6_THINKING_250615 = "doubao-seed-1-6-thinking-250615"
15
+ DOUBO_SEED_1_6_FLASH_250615 = "doubao-seed-1-6-flash-250615"
16
+ DEEPSEEK_V3_250324 = "deepseek-v3-250324"
17
+ AZURE_GPT_4O_MINI = "azure-gpt-4o-mini"
18
+
19
+ def provider(self) -> str:
20
+ if self.value.startswith("gpt"):
21
+ return "openai"
22
+ elif self.value.startswith("qwen"):
23
+ return "dashscope"
24
+ elif self.value.startswith("doubao"):
25
+ return "doubao"
26
+ elif self.value.startswith("deepseek"):
27
+ return "deepseek"
28
+ elif self.value.startswith("azure"):
29
+ return "azure"
30
+ raise ValueError(f"Invalid model: {self.value}")
31
+
32
+ def get_model(model: str) -> Model:
33
+ if model in Model.__members__:
34
+ return Model[model]
35
+
36
+ model = model.upper().replace("-", "_")
37
+ if model in Model.__members__:
38
+ return Model[model]
39
+
40
+ raise ValueError(f"Invalid model: {model}")
41
+
42
+ def get_llm(provider: str) -> LLM:
43
+ if provider not in _llm_dicts:
44
+ if provider == "openai":
45
+ _llm_dicts[provider] = LLM(os.environ.get("OPENAI_API_KEY", ""), os.environ.get("OPENAI_BASE_URL", ""), int(os.environ.get("OPENAI_TIMEOUT", 2000)))
46
+ elif provider == "doubao":
47
+ _llm_dicts[provider] = LLM(os.environ.get("DOUBAO_API_KEY", ""), os.environ.get("DOUBAO_BASE_URL", ""), int(os.environ.get("DOUBAO_TIMEOUT", 2000)))
48
+ elif provider == "dashscope":
49
+ _llm_dicts[provider] = LLM(os.environ.get("DASHSCOPE_API_KEY", ""), os.environ.get("DASHSCOPE_BASE_URL", ""), int(os.environ.get("DASHSCOPE_TIMEOUT", 2000)))
50
+ elif provider == "deepseek":
51
+ _llm_dicts[provider] = LLM(os.environ.get("DEEPSEEK_API_KEY", ""), os.environ.get("DEEPSEEK_BASE_URL", ""), int(os.environ.get("DEEPSEEK_TIMEOUT", 2000)))
52
+ elif provider == "azure":
53
+ _llm_dicts[provider] = LLM(os.environ.get("AZURE_API_KEY", ""), os.environ.get("AZURE_BASE_URL", ""), int(os.environ.get("AZURE_TIMEOUT", 2000)), os.environ.get("AZURE_API_VERSION", ""))
54
+ else:
55
+ raise ValueError(f"Invalid provider: {provider}")
56
+ return _llm_dicts[provider]
57
+
58
+ def chat(input: str, system_prompt: str, model: Model, temperature: float) -> str:
59
+ return get_llm(model.provider()).chat(input, system_prompt, model.value, temperature)
60
+
61
+ def chat_stream(input: str, system_prompt: str, model: Model, temperature: float) -> Iterator[str]:
62
+ return get_llm(model.provider()).chat_stream(input, system_prompt, model.value, temperature)
@@ -0,0 +1,56 @@
1
+ import random
2
+ from openai import OpenAI
3
+ from openai import AzureOpenAI
4
+ from typing import Iterator
5
+
6
+ class LLM():
7
+ def __init__(self, api_key: str, base_url: str, timeout: int, api_version: str | None = None):
8
+ if api_version is not None:
9
+ self.client = AzureOpenAI(
10
+ api_key=api_key,
11
+ azure_endpoint=base_url,
12
+ timeout=timeout,
13
+ api_version=api_version,
14
+ )
15
+ else:
16
+ self.client = OpenAI(
17
+ api_key=api_key,
18
+ base_url=base_url,
19
+ timeout=timeout,
20
+ )
21
+
22
+ def chat(self, input: str, system_prompt: str, model: str, temperature: float) -> str:
23
+ if model.startswith("azure"):
24
+ model = model.replace("azure-", "")
25
+
26
+ response = self.client.chat.completions.create(
27
+ model=model,
28
+ messages=[
29
+ {"role": "system", "content": system_prompt},
30
+ {"role": "user", "content": input},
31
+ ],
32
+ temperature=temperature,
33
+ )
34
+
35
+ if response.choices[0].message.content is None:
36
+ return "Error"
37
+ else:
38
+ return response.choices[0].message.content
39
+
40
+ def chat_stream(self, input: str, system_prompt: str, model: str, temperature: float) -> Iterator[str]:
41
+ if model.startswith("azure"):
42
+ model = model.replace("azure-", "")
43
+
44
+ stream = self.client.chat.completions.create(
45
+ model=model,
46
+ messages=[
47
+ {"role": "system", "content": system_prompt},
48
+ {"role": "user", "content": input},
49
+ ],
50
+ temperature=temperature,
51
+ stream=True,
52
+ )
53
+
54
+ for chunk in stream:
55
+ if chunk.choices[0].delta.content is not None:
56
+ yield chunk.choices[0].delta.content
File without changes
@@ -0,0 +1,13 @@
1
+
2
+ from pydantic import BaseModel
3
+
4
+ class WebSocketMessage(BaseModel):
5
+ """WebSocket消息模型"""
6
+ message: str = None
7
+ client_id: str
8
+
9
+ class WebSocketResponse(BaseModel):
10
+ """WebSocket响应模型"""
11
+ status: str
12
+ message: str
13
+ data: dict = None
@@ -0,0 +1,5 @@
1
+ from pydantic import BaseModel
2
+
3
+ class FooInput(BaseModel):
4
+ user_id: int
5
+ data: str
@@ -0,0 +1,288 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import asyncio
5
+ import threading
6
+ import uuid
7
+ from omegaconf import OmegaConf
8
+ from service_forge.workflow.node import node_register
9
+ from service_forge.workflow.workflow_factory import create_workflows
10
+ from service_forge.api.http_api import start_fastapi_server
11
+ from service_forge.api.kafka_api import start_kafka_server
12
+ from service_forge.db.database import DatabaseManager
13
+ from loguru import logger
14
+ from typing import Callable, AsyncIterator, Awaitable, Any, TYPE_CHECKING
15
+ from service_forge.api.http_api_doc import generate_service_http_api_doc
16
+ from service_forge.api.routers.service.service_router import set_service
17
+ from service_forge.sft.config.sf_metadata import SfMetadata
18
+
19
+ if TYPE_CHECKING:
20
+ from service_forge.workflow.workflow_group import WorkflowGroup
21
+
22
+ class Service:
23
+ def __init__(
24
+ self,
25
+ metadata: SfMetadata,
26
+ config_path: str,
27
+ workflow_config_paths: list[str],
28
+ _handle_stream_output: Callable[[str, AsyncIterator[str]], Awaitable[None]] = None,
29
+ _handle_query_user: Callable[[str, str], Awaitable[str]] = None,
30
+ enable_http: bool = True,
31
+ http_host: str = "0.0.0.0",
32
+ http_port: int = 8000,
33
+ enable_kafka: bool = True,
34
+ kafka_host: str = "localhost",
35
+ kafka_port: int = 9092,
36
+ service_env: dict[str, Any] = None,
37
+ database_manager: DatabaseManager = None,
38
+ ) -> None:
39
+ self.metadata = metadata
40
+ self.config_path = config_path
41
+ self.workflow_config_paths = workflow_config_paths
42
+ self._handle_stream_output = _handle_stream_output
43
+ self._handle_query_user = _handle_query_user
44
+ self.enable_http = enable_http
45
+ self.http_host = http_host
46
+ self.http_port = http_port
47
+ self.enable_kafka = enable_kafka
48
+ self.kafka_host = kafka_host
49
+ self.kafka_port = kafka_port
50
+ self.service_env = {} if service_env is None else service_env
51
+ self.database_manager = database_manager
52
+ self.workflow_groups: list[WorkflowGroup] = []
53
+ self.workflow_tasks: dict[str, asyncio.Task] = {} # workflow_name -> task mapping
54
+ self.workflow_config_map: dict[str, str] = {} # workflow_name -> config_path mapping
55
+ self.fastapi_thread: threading.Thread | None = None
56
+ self.fastapi_loop: asyncio.AbstractEventLoop | None = None
57
+
58
+ @property
59
+ def name(self) -> str:
60
+ return self.metadata.name
61
+
62
+ @property
63
+ def version(self) -> str:
64
+ return self.metadata.version
65
+
66
+ @property
67
+ def description(self) -> str:
68
+ return self.metadata.description
69
+
70
+ async def start(self):
71
+ set_service(self)
72
+
73
+ if self.enable_http:
74
+ fastapi_task = asyncio.create_task(start_fastapi_server(self.http_host, self.http_port))
75
+ doc_task = asyncio.create_task(generate_service_http_api_doc(self))
76
+ else:
77
+ fastapi_task = None
78
+ doc_task = None
79
+ if self.enable_kafka:
80
+ kafka_task = asyncio.create_task(start_kafka_server(f"{self.kafka_host}:{self.kafka_port}"))
81
+ else:
82
+ kafka_task = None
83
+
84
+ workflow_tasks: list[asyncio.Task] = []
85
+
86
+ for workflow_config_path in self.workflow_config_paths:
87
+ workflow_group = create_workflows(
88
+ self.parse_workflow_path(workflow_config_path),
89
+ service_env=self.service_env,
90
+ _handle_stream_output=self._handle_stream_output,
91
+ _handle_query_user=self._handle_query_user,
92
+ database_manager=self.database_manager,
93
+ )
94
+ self.workflow_groups.append(workflow_group)
95
+ main_workflow = workflow_group.get_main_workflow()
96
+ task = asyncio.create_task(workflow_group.run())
97
+ workflow_tasks.append(task)
98
+ self.workflow_tasks[main_workflow.name] = task
99
+ self.workflow_config_map[main_workflow.name] = workflow_config_path
100
+
101
+ try:
102
+ core_tasks = []
103
+ if fastapi_task:
104
+ core_tasks.append(fastapi_task)
105
+ if doc_task:
106
+ core_tasks.append(doc_task)
107
+ if kafka_task:
108
+ core_tasks.append(kafka_task)
109
+
110
+ all_tasks = core_tasks + workflow_tasks
111
+ results = await asyncio.gather(*all_tasks, return_exceptions=True)
112
+
113
+ # Check core tasks
114
+ for i, result in enumerate(results[:len(core_tasks)]):
115
+ if isinstance(result, Exception):
116
+ logger.error(f"Error in service {self.name} core task {i}: {result}")
117
+ raise result
118
+
119
+ # Check workflow tasks
120
+ for i, result in enumerate(results[len(core_tasks):], start=len(core_tasks)):
121
+ if isinstance(result, Exception) and not isinstance(result, asyncio.CancelledError):
122
+ # Workflow task exception should not stop the service
123
+ logger.error(f"Error in service {self.name} workflow task {i}: {result}")
124
+
125
+ except Exception as e:
126
+ logger.error(f"Error in service {self.name}: {e}")
127
+ if fastapi_task:
128
+ fastapi_task.cancel()
129
+ if kafka_task:
130
+ kafka_task.cancel()
131
+ for workflow_task in workflow_tasks:
132
+ workflow_task.cancel()
133
+ raise
134
+
135
+ def parse_workflow_path(self, workflow_config_path: str) -> str:
136
+ if os.path.isabs(workflow_config_path):
137
+ return workflow_config_path
138
+ else:
139
+ return os.path.join(os.path.dirname(self.config_path), workflow_config_path)
140
+
141
+ def get_workflow_group_by_name(self, workflow_name: str) -> WorkflowGroup | None:
142
+ for workflow_group in self.workflow_groups:
143
+ if workflow_group.get_workflow(workflow_name) is not None:
144
+ return workflow_group
145
+ return None
146
+
147
+ def trigger_workflow(self, workflow_name: str, trigger_name: str, **kwargs) -> uuid.UUID:
148
+ workflow_group = self.get_workflow_group_by_name(workflow_name)
149
+ if workflow_group is None:
150
+ logger.error(f"Workflow {workflow_name} not found")
151
+ return False
152
+
153
+ workflow = workflow_group.get_main_workflow()
154
+ if workflow is None:
155
+ logger.error(f"Workflow {workflow_name} not found")
156
+ return False
157
+
158
+ return workflow.trigger(trigger_name, **kwargs)
159
+
160
+ async def start_workflow(self, workflow_name: str) -> bool:
161
+ if workflow_name in self.workflow_tasks:
162
+ task = self.workflow_tasks[workflow_name]
163
+ if not task.done():
164
+ logger.warning(f"Workflow {workflow_name} is already running")
165
+ return False
166
+ del self.workflow_tasks[workflow_name]
167
+
168
+ workflow_group = self.get_workflow_group_by_name(workflow_name)
169
+ if workflow_group is None:
170
+ logger.error(f"Workflow {workflow_name} not found")
171
+ return False
172
+
173
+ task = asyncio.create_task(workflow_group.run(workflow_name))
174
+ self.workflow_tasks[workflow_name] = task
175
+ logger.info(f"Started workflow {workflow_name}")
176
+ return True
177
+
178
+ async def stop_workflow(self, workflow_name: str) -> bool:
179
+ if workflow_name not in self.workflow_tasks:
180
+ logger.warning(f"Workflow {workflow_name} is not running")
181
+ return False
182
+
183
+ task = self.workflow_tasks[workflow_name]
184
+ if task.done():
185
+ logger.warning(f"Workflow {workflow_name} is already stopped")
186
+ del self.workflow_tasks[workflow_name]
187
+ return False
188
+
189
+ task.cancel()
190
+ try:
191
+ await task
192
+ except asyncio.CancelledError:
193
+ pass
194
+
195
+ del self.workflow_tasks[workflow_name]
196
+ logger.info(f"Stopped workflow {workflow_name}")
197
+ return True
198
+
199
+ async def load_workflow_from_config(self, config_path: str = None, config: dict = None, workflow_name: str = None) -> bool:
200
+ try:
201
+ if config is None:
202
+ if config_path is None:
203
+ raise ValueError("Either config_path or config must be provided")
204
+ if os.path.isabs(config_path):
205
+ full_path = config_path
206
+ else:
207
+ full_path = self.parse_workflow_path(config_path)
208
+ workflow_group = create_workflows(
209
+ config_path=full_path,
210
+ service_env=self.service_env,
211
+ _handle_stream_output=self._handle_stream_output,
212
+ _handle_query_user=self._handle_query_user,
213
+ database_manager=self.database_manager,
214
+ )
215
+ config_identifier = config_path
216
+ else:
217
+ workflow_group = create_workflows(
218
+ config=config,
219
+ service_env=self.service_env,
220
+ _handle_stream_output=self._handle_stream_output,
221
+ _handle_query_user=self._handle_query_user,
222
+ database_manager=self.database_manager,
223
+ )
224
+ config_identifier = config_path if config_path else "config_dict"
225
+
226
+ self.workflow_groups.append(workflow_group)
227
+ main_workflow = workflow_group.get_main_workflow()
228
+ actual_name = workflow_name if workflow_name else main_workflow.name
229
+
230
+ if workflow_name and workflow_name != main_workflow.name:
231
+ actual_name = main_workflow.name
232
+
233
+ if actual_name in self.workflow_tasks:
234
+ await self.stop_workflow(actual_name)
235
+
236
+ task = asyncio.create_task(workflow_group.run(actual_name))
237
+ self.workflow_tasks[actual_name] = task
238
+ self.workflow_config_map[actual_name] = config_identifier
239
+
240
+ logger.info(f"Loaded and started workflow {actual_name} from {config_identifier}")
241
+ return True
242
+ except Exception as e:
243
+ logger.error(f"Failed to load workflow from {config_path or 'config_dict'}: {e}")
244
+ return False
245
+
246
+ def get_service_status(self) -> dict[str, Any]:
247
+ workflow_statuses = []
248
+ for workflow_group in self.workflow_groups:
249
+ for workflow in workflow_group.workflows:
250
+ workflow_name = workflow.name
251
+ is_running = workflow_name in self.workflow_tasks and not self.workflow_tasks[workflow_name].done()
252
+ config_path = self.workflow_config_map.get(workflow_name, "unknown")
253
+ workflow_statuses.append({
254
+ "name": workflow_name,
255
+ "description": workflow.description,
256
+ "status": "running" if is_running else "stopped",
257
+ "config_path": config_path,
258
+ })
259
+
260
+ return {
261
+ "name": self.name,
262
+ "version": self.version,
263
+ "description": self.description,
264
+ "workflows": workflow_statuses,
265
+ }
266
+
267
+ @staticmethod
268
+ def from_config(metadata, service_env: dict[str, Any] = None) -> Service:
269
+ config = OmegaConf.to_object(OmegaConf.load(metadata.service_config))
270
+ database_manager = DatabaseManager.from_config(config=config)
271
+ return Service(
272
+ metadata=metadata,
273
+ config_path=metadata.service_config,
274
+ workflow_config_paths=config.get('workflows', []),
275
+ _handle_stream_output=None,
276
+ _handle_query_user=None,
277
+ enable_http=config.get('enable_http', True),
278
+ http_host=config.get('http_host', '0.0.0.0'),
279
+ http_port=config.get('http_port', 8000),
280
+ enable_kafka=config.get('enable_kafka', True),
281
+ kafka_host=config.get('kafka_host', 'localhost'),
282
+ kafka_port=config.get('kafka_port', 9092),
283
+ service_env=service_env,
284
+ database_manager=database_manager,
285
+ )
286
+
287
+ def create_service(config_path: str, name: str, version: str, service_env: dict[str, Any] = None) -> Service:
288
+ return Service.from_config(config_path, name, version, service_env)
@@ -0,0 +1,158 @@
1
+ from __future__ import annotations
2
+
3
+ class ServiceDatabaseConfig:
4
+ def __init__(
5
+ self,
6
+ name: str,
7
+
8
+ postgres_user: str,
9
+ postgres_password: str,
10
+ postgres_host: str,
11
+ postgres_port: int,
12
+ postgres_db: str,
13
+
14
+ mongo_host: str,
15
+ mongo_port: int,
16
+ mongo_user: str,
17
+ mongo_password: str,
18
+ mongo_db: str,
19
+
20
+ redis_host: str,
21
+ redis_port: int,
22
+ redis_password: str,
23
+ ) -> None:
24
+ self.name = name
25
+
26
+ self.postgres_user = postgres_user
27
+ self.postgres_password = postgres_password
28
+ self.postgres_host = postgres_host
29
+ self.postgres_port = postgres_port
30
+ self.postgres_db = postgres_db
31
+
32
+ self.mongo_host = mongo_host
33
+ self.mongo_port = mongo_port
34
+ self.mongo_user = mongo_user
35
+ self.mongo_password = mongo_password
36
+ self.mongo_db = mongo_db
37
+
38
+ self.redis_host = redis_host
39
+ self.redis_port = redis_port
40
+ self.redis_password = redis_password
41
+
42
+ @staticmethod
43
+ def from_dict(config: dict) -> ServiceDatabaseConfig:
44
+ return ServiceDatabaseConfig(
45
+ name=config['name'],
46
+ postgres_user=config.get('postgres_user', None),
47
+ postgres_password=config.get('postgres_password', None),
48
+ postgres_host=config.get('postgres_host', None),
49
+ postgres_port=config.get('postgres_port', None),
50
+ postgres_db=config.get('postgres_db', None),
51
+
52
+ mongo_host=config.get('mongo_host', None),
53
+ mongo_port=config.get('mongo_port', None),
54
+ mongo_user=config.get('mongo_user', None),
55
+ mongo_password=config.get('mongo_password', None),
56
+ mongo_db=config.get('mongo_db', None),
57
+
58
+ redis_host=config.get('redis_host', None),
59
+ redis_port=config.get('redis_port', None),
60
+ redis_password=config.get('redis_password', None),
61
+ )
62
+
63
+ def to_dict(self) -> dict:
64
+ return {
65
+ 'name': self.name,
66
+
67
+ 'postgres_user': self.postgres_user,
68
+ 'postgres_password': self.postgres_password,
69
+ 'postgres_host': self.postgres_host,
70
+ 'postgres_port': self.postgres_port,
71
+ 'postgres_db': self.postgres_db,
72
+
73
+ 'mongo_host': self.mongo_host,
74
+ 'mongo_port': self.mongo_port,
75
+ 'mongo_user': self.mongo_user,
76
+ 'mongo_password': self.mongo_password,
77
+ 'mongo_db': self.mongo_db,
78
+
79
+ 'redis_host': self.redis_host,
80
+ 'redis_port': self.redis_port,
81
+ 'redis_password': self.redis_password,
82
+ }
83
+
84
+ class ServiceConfig:
85
+ def __init__(
86
+ self,
87
+ name: str,
88
+ workflows: list[str],
89
+ enable_http: bool,
90
+ http_host: str,
91
+ http_port: int,
92
+ enable_kafka: bool,
93
+ kafka_host: str,
94
+ kafka_port: int,
95
+ databases: list[ServiceDatabaseConfig],
96
+ ) -> None:
97
+ self.name = name
98
+ self.workflows = workflows
99
+ self.enable_http = enable_http
100
+ self.http_host = http_host
101
+ self.http_port = http_port
102
+ self.enable_kafka = enable_kafka
103
+ self.kafka_host = kafka_host
104
+ self.kafka_port = kafka_port
105
+ self.databases = databases
106
+
107
+ @staticmethod
108
+ def from_dict(config: dict) -> ServiceConfig:
109
+ return ServiceConfig(
110
+ name=config['name'],
111
+ workflows=config['workflows'],
112
+ enable_http=config['enable_http'],
113
+ http_host=config['http_host'],
114
+ http_port=config['http_port'],
115
+ enable_kafka=config['enable_kafka'],
116
+ kafka_host=config['kafka_host'],
117
+ kafka_port=config['kafka_port'],
118
+ databases=[ServiceDatabaseConfig.from_dict(database) for database in config['databases']],
119
+ )
120
+
121
+ def to_dict(self) -> dict:
122
+ return {
123
+ 'name': self.name,
124
+ 'workflows': self.workflows,
125
+ 'enable_http': self.enable_http,
126
+ 'http_host': self.http_host,
127
+ 'http_port': self.http_port,
128
+ 'enable_kafka': self.enable_kafka,
129
+ 'kafka_host': self.kafka_host,
130
+ 'kafka_port': self.kafka_port,
131
+ 'databases': [database.to_dict() for database in self.databases],
132
+ }
133
+
134
+ # name: tag_service
135
+ # workflows:
136
+ # # - ./workflow/kafka_workflow.yaml
137
+ # - ./workflow/query_tags_workflow.yaml
138
+ # - ./workflow/create_tag_workflow.yaml
139
+ # - ./workflow/update_tag_workflow.yaml
140
+ # - ./workflow/delete_tag_workflow.yaml
141
+ # - ./workflow/get_tags_from_record.yaml
142
+
143
+ # enable_http: true
144
+ # enable_kafka: false
145
+
146
+ # # Following configs will be auto-injected by sft.
147
+ # http_host: 0.0.0.0
148
+ # http_port: 37200
149
+ # kafka_host: localhost
150
+ # kafka_port: 9092
151
+
152
+ # databases:
153
+ # - name: tag
154
+ # postgres_user: postgres
155
+ # postgres_password: "gnBGWg7aL4"
156
+ # postgres_host: second-brain-postgres-postgresql
157
+ # postgres_port: 5432
158
+ # postgres_db: tag-service-tag
@@ -0,0 +1,91 @@
1
+ #!/usr/bin/env python3
2
+ import sys
3
+ from typing import Optional
4
+
5
+ import typer
6
+ from service_forge.sft.util.logger import log_error, log_info
7
+ from service_forge.sft.cmd.list_tars import list_tars
8
+ from service_forge.sft.cmd.upload_service import upload_service
9
+ from service_forge.sft.cmd.deploy_service import deploy_service
10
+ from service_forge.sft.cmd.config_command import list_config, get_config, set_config
11
+ from service_forge.sft.cmd.service_command import list_services, delete_service, show_service_logs
12
+
13
+ app = typer.Typer(
14
+ name="sft",
15
+ help="Service Forge CLI - Service management tool",
16
+ add_completion=False,
17
+ )
18
+
19
+ @app.command(name="upload")
20
+ def upload_service_command(
21
+ project_path: Optional[str] = typer.Argument(
22
+ default=".",
23
+ help="Project path, default is the current directory"
24
+ )
25
+ ) -> None:
26
+ upload_service(project_path)
27
+
28
+ @app.command(name="list")
29
+ def list_tars_command() -> None:
30
+ list_tars()
31
+
32
+ @app.command(name="deploy")
33
+ def deploy_service_command(name: str, version: str) -> None:
34
+ deploy_service(name, version)
35
+
36
+ config_app = typer.Typer(
37
+ name="config",
38
+ help="Configuration management commands",
39
+ add_completion=False,
40
+ )
41
+
42
+ @config_app.command(name="list")
43
+ def config_list_command() -> None:
44
+ list_config()
45
+
46
+ @config_app.command(name="get")
47
+ def config_get_command(
48
+ key: str = typer.Argument(help="Configuration item key")
49
+ ) -> None:
50
+ get_config(key)
51
+
52
+ @config_app.command(name="set")
53
+ def config_set_command(
54
+ key: str = typer.Argument(help="Configuration item key"),
55
+ value: str = typer.Argument(help="Configuration item value")
56
+ ) -> None:
57
+ set_config(key, value)
58
+
59
+ app.add_typer(config_app)
60
+
61
+ service_app = typer.Typer(
62
+ name="service",
63
+ help="Kubernetes service management commands",
64
+ add_completion=False,
65
+ )
66
+
67
+ @service_app.command(name="list")
68
+ def service_list_command() -> None:
69
+ list_services()
70
+
71
+ @service_app.command(name="delete")
72
+ def service_delete_command(
73
+ service_name: str = typer.Argument(help="Service name to delete (must start with sf-)"),
74
+ force: bool = typer.Option(False, "--force", "-f", help="Force delete")
75
+ ) -> None:
76
+ delete_service(service_name, force)
77
+
78
+ @service_app.command(name="logs")
79
+ def service_logs_command(
80
+ service_name: str = typer.Argument(help="Service name to view logs for (must start with sf-)"),
81
+ container: Optional[str] = typer.Option(None, "--container", "-c", help="Container name (if pod has multiple containers)"),
82
+ tail: int = typer.Option(100, "--tail", "-n", help="Number of lines to show from the end of logs"),
83
+ follow: bool = typer.Option(False, "--follow", "-f", help="Follow log output"),
84
+ previous: bool = typer.Option(False, "--previous", "-p", help="Get logs from previous instance of container")
85
+ ) -> None:
86
+ show_service_logs(service_name, container, tail, follow, previous)
87
+
88
+ app.add_typer(service_app)
89
+
90
+ def main() -> None:
91
+ app()