service-forge 0.1.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of service-forge might be problematic. Click here for more details.

Files changed (83) hide show
  1. service_forge/api/deprecated_websocket_api.py +86 -0
  2. service_forge/api/deprecated_websocket_manager.py +425 -0
  3. service_forge/api/http_api.py +152 -0
  4. service_forge/api/http_api_doc.py +455 -0
  5. service_forge/api/kafka_api.py +126 -0
  6. service_forge/api/routers/feedback/feedback_router.py +148 -0
  7. service_forge/api/routers/service/service_router.py +127 -0
  8. service_forge/api/routers/websocket/websocket_manager.py +83 -0
  9. service_forge/api/routers/websocket/websocket_router.py +78 -0
  10. service_forge/api/task_manager.py +141 -0
  11. service_forge/current_service.py +14 -0
  12. service_forge/db/__init__.py +1 -0
  13. service_forge/db/database.py +237 -0
  14. service_forge/db/migrations/feedback_migration.py +154 -0
  15. service_forge/db/models/__init__.py +0 -0
  16. service_forge/db/models/feedback.py +33 -0
  17. service_forge/llm/__init__.py +67 -0
  18. service_forge/llm/llm.py +56 -0
  19. service_forge/model/__init__.py +0 -0
  20. service_forge/model/feedback.py +30 -0
  21. service_forge/model/websocket.py +13 -0
  22. service_forge/proto/foo_input.py +5 -0
  23. service_forge/service.py +280 -0
  24. service_forge/service_config.py +44 -0
  25. service_forge/sft/cli.py +91 -0
  26. service_forge/sft/cmd/config_command.py +67 -0
  27. service_forge/sft/cmd/deploy_service.py +123 -0
  28. service_forge/sft/cmd/list_tars.py +41 -0
  29. service_forge/sft/cmd/service_command.py +149 -0
  30. service_forge/sft/cmd/upload_service.py +36 -0
  31. service_forge/sft/config/injector.py +129 -0
  32. service_forge/sft/config/injector_default_files.py +131 -0
  33. service_forge/sft/config/sf_metadata.py +30 -0
  34. service_forge/sft/config/sft_config.py +200 -0
  35. service_forge/sft/file/__init__.py +0 -0
  36. service_forge/sft/file/ignore_pattern.py +80 -0
  37. service_forge/sft/file/sft_file_manager.py +107 -0
  38. service_forge/sft/kubernetes/kubernetes_manager.py +257 -0
  39. service_forge/sft/util/assert_util.py +25 -0
  40. service_forge/sft/util/logger.py +16 -0
  41. service_forge/sft/util/name_util.py +8 -0
  42. service_forge/sft/util/yaml_utils.py +57 -0
  43. service_forge/storage/__init__.py +5 -0
  44. service_forge/storage/feedback_storage.py +245 -0
  45. service_forge/utils/__init__.py +0 -0
  46. service_forge/utils/default_type_converter.py +12 -0
  47. service_forge/utils/register.py +39 -0
  48. service_forge/utils/type_converter.py +99 -0
  49. service_forge/utils/workflow_clone.py +124 -0
  50. service_forge/workflow/__init__.py +1 -0
  51. service_forge/workflow/context.py +14 -0
  52. service_forge/workflow/edge.py +24 -0
  53. service_forge/workflow/node.py +184 -0
  54. service_forge/workflow/nodes/__init__.py +8 -0
  55. service_forge/workflow/nodes/control/if_node.py +29 -0
  56. service_forge/workflow/nodes/control/switch_node.py +28 -0
  57. service_forge/workflow/nodes/input/console_input_node.py +26 -0
  58. service_forge/workflow/nodes/llm/query_llm_node.py +41 -0
  59. service_forge/workflow/nodes/nested/workflow_node.py +28 -0
  60. service_forge/workflow/nodes/output/kafka_output_node.py +27 -0
  61. service_forge/workflow/nodes/output/print_node.py +29 -0
  62. service_forge/workflow/nodes/test/if_console_input_node.py +33 -0
  63. service_forge/workflow/nodes/test/time_consuming_node.py +62 -0
  64. service_forge/workflow/port.py +89 -0
  65. service_forge/workflow/trigger.py +28 -0
  66. service_forge/workflow/triggers/__init__.py +6 -0
  67. service_forge/workflow/triggers/a2a_api_trigger.py +257 -0
  68. service_forge/workflow/triggers/fast_api_trigger.py +201 -0
  69. service_forge/workflow/triggers/kafka_api_trigger.py +47 -0
  70. service_forge/workflow/triggers/once_trigger.py +23 -0
  71. service_forge/workflow/triggers/period_trigger.py +29 -0
  72. service_forge/workflow/triggers/websocket_api_trigger.py +189 -0
  73. service_forge/workflow/workflow.py +227 -0
  74. service_forge/workflow/workflow_callback.py +141 -0
  75. service_forge/workflow/workflow_config.py +66 -0
  76. service_forge/workflow/workflow_event.py +15 -0
  77. service_forge/workflow/workflow_factory.py +246 -0
  78. service_forge/workflow/workflow_group.py +51 -0
  79. service_forge/workflow/workflow_type.py +52 -0
  80. service_forge-0.1.18.dist-info/METADATA +98 -0
  81. service_forge-0.1.18.dist-info/RECORD +83 -0
  82. service_forge-0.1.18.dist-info/WHEEL +4 -0
  83. service_forge-0.1.18.dist-info/entry_points.txt +2 -0
@@ -0,0 +1,280 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import asyncio
5
+ import threading
6
+ import uuid
7
+ from loguru import logger
8
+ from typing import Callable, AsyncIterator, Awaitable, Any, TYPE_CHECKING
9
+ from service_forge.workflow.node import node_register
10
+ from service_forge.workflow.workflow_factory import create_workflow_group
11
+ from service_forge.api.http_api import start_fastapi_server
12
+ from service_forge.api.kafka_api import start_kafka_server
13
+ from service_forge.db.database import DatabaseManager
14
+ from service_forge.api.http_api_doc import generate_service_http_api_doc
15
+ from service_forge.sft.config.sf_metadata import SfMetadata
16
+ from service_forge.service_config import ServiceConfig
17
+ from service_forge.current_service import set_service
18
+
19
+ if TYPE_CHECKING:
20
+ from service_forge.workflow.workflow_group import WorkflowGroup
21
+ from service_forge.workflow.workflow import Workflow
22
+
23
+ class Service:
24
+ def __init__(
25
+ self,
26
+ metadata: SfMetadata,
27
+ config: ServiceConfig,
28
+ config_path: str,
29
+ service_env: dict[str, Any] = None,
30
+ database_manager: DatabaseManager = None,
31
+ _handle_stream_output: Callable[[str, AsyncIterator[str]], Awaitable[None]] = None,
32
+ _handle_query_user: Callable[[str, str], Awaitable[str]] = None,
33
+ ) -> None:
34
+ self.metadata = metadata
35
+ self.config = config
36
+ self.config_path = config_path
37
+ self._handle_stream_output = _handle_stream_output
38
+ self._handle_query_user = _handle_query_user
39
+ self.service_env = {} if service_env is None else service_env
40
+ self.database_manager = database_manager
41
+ self.workflow_groups: list[WorkflowGroup] = []
42
+ self.workflow_tasks: dict[str, asyncio.Task] = {} # workflow_name -> task mapping
43
+ self.workflow_config_map: dict[uuid.UUID, str] = {} # workflow_id -> config_path mapping
44
+ self.fastapi_thread: threading.Thread | None = None
45
+ self.fastapi_loop: asyncio.AbstractEventLoop | None = None
46
+
47
+ @property
48
+ def name(self) -> str:
49
+ return self.metadata.name
50
+
51
+ @property
52
+ def version(self) -> str:
53
+ return self.metadata.version
54
+
55
+ @property
56
+ def description(self) -> str:
57
+ return self.metadata.description
58
+
59
+ async def start(self):
60
+ set_service(self)
61
+
62
+ if self.config.enable_http:
63
+ fastapi_task = asyncio.create_task(start_fastapi_server(self.config.http_host, self.config.http_port))
64
+ doc_task = asyncio.create_task(generate_service_http_api_doc(self))
65
+ else:
66
+ fastapi_task = None
67
+ doc_task = None
68
+
69
+ if self.config.enable_kafka:
70
+ kafka_task = asyncio.create_task(start_kafka_server(f"{self.config.kafka_host}:{self.config.kafka_port}"))
71
+ else:
72
+ kafka_task = None
73
+
74
+ workflow_tasks: list[asyncio.Task] = []
75
+
76
+ for workflow_config_path in self.config.workflows:
77
+ logger.info(f"Loading workflow from {self.parse_workflow_path(workflow_config_path)}")
78
+ workflow_group = create_workflow_group(
79
+ config_path=self.parse_workflow_path(workflow_config_path),
80
+ service_env=self.service_env,
81
+ _handle_stream_output=self._handle_stream_output,
82
+ _handle_query_user=self._handle_query_user,
83
+ database_manager=self.database_manager,
84
+ )
85
+ self.workflow_groups.append(workflow_group)
86
+ main_workflow = workflow_group.get_main_workflow()
87
+ task = asyncio.create_task(workflow_group.run())
88
+ workflow_tasks.append(task)
89
+ self.workflow_tasks[main_workflow.id] = task
90
+ self.workflow_config_map[main_workflow.name] = workflow_config_path
91
+
92
+ try:
93
+ core_tasks = []
94
+ if fastapi_task:
95
+ core_tasks.append(fastapi_task)
96
+ if doc_task:
97
+ core_tasks.append(doc_task)
98
+ if kafka_task:
99
+ core_tasks.append(kafka_task)
100
+
101
+ all_tasks = core_tasks + workflow_tasks
102
+ results = await asyncio.gather(*all_tasks, return_exceptions=True)
103
+
104
+ # Check core tasks
105
+ for i, result in enumerate(results[:len(core_tasks)]):
106
+ if isinstance(result, Exception):
107
+ logger.error(f"Error in service {self.name} core task {i}: {result}")
108
+ raise result
109
+
110
+ # Check workflow tasks
111
+ for i, result in enumerate(results[len(core_tasks):], start=len(core_tasks)):
112
+ if isinstance(result, Exception) and not isinstance(result, asyncio.CancelledError):
113
+ # Workflow task exception should not stop the service
114
+ logger.error(f"Error in service {self.name} workflow task {i}: {result}")
115
+
116
+ except Exception as e:
117
+ logger.error(f"Error in service {self.name}: {e}")
118
+ if fastapi_task:
119
+ fastapi_task.cancel()
120
+ if kafka_task:
121
+ kafka_task.cancel()
122
+ for workflow_task in workflow_tasks:
123
+ workflow_task.cancel()
124
+ raise
125
+
126
+ def parse_workflow_path(self, workflow_config_path: str) -> str:
127
+ if os.path.isabs(workflow_config_path):
128
+ return workflow_config_path
129
+ else:
130
+ return os.path.join(os.path.dirname(self.config_path), workflow_config_path)
131
+
132
+ def get_workflow_group_by_name(self, workflow_name: str, workflow_version: str, allow_none: bool = True) -> WorkflowGroup | None:
133
+ for workflow_group in self.workflow_groups:
134
+ if workflow_group.get_workflow_by_name(workflow_name, workflow_version) is not None:
135
+ return workflow_group
136
+ if not allow_none:
137
+ raise ValueError(f"Workflow group with name {workflow_name} and version {workflow_version} not found in service {self.name}")
138
+ return None
139
+
140
+ def get_workflow_group_by_id(self, workflow_id: str, allow_none: bool = True) -> WorkflowGroup | None:
141
+ for workflow_group in self.workflow_groups:
142
+ if workflow_group.get_workflow_by_id(workflow_id) is not None:
143
+ return workflow_group
144
+ if not allow_none:
145
+ raise ValueError(f"Workflow group with id {workflow_id} not found in service {self.name}")
146
+ return None
147
+
148
+ def trigger_workflow(self, workflow_group: WorkflowGroup, trigger_name: str, **kwargs) -> uuid.UUID:
149
+ workflow = workflow_group.get_main_workflow(allow_none=False)
150
+ return workflow.trigger(trigger_name, **kwargs)
151
+
152
+ def trigger_workflow_by_name(self, workflow_name: str, workflow_version: str, trigger_name: str, **kwargs) -> uuid.UUID:
153
+ workflow_group = self.get_workflow_group_by_name(workflow_name, workflow_version, allow_none=False)
154
+ return self.trigger_workflow(workflow_group, trigger_name, **kwargs)
155
+
156
+ def trigger_workflow_by_id(self, workflow_id: str, trigger_name: str, **kwargs) -> uuid.UUID:
157
+ workflow_group = self.get_workflow_group_by_id(workflow_id, allow_none=False)
158
+ return self.trigger_workflow(workflow_group, trigger_name, **kwargs)
159
+
160
+ def start_workflow(self, workflow_group: WorkflowGroup) -> bool:
161
+ workflow = workflow_group.get_main_workflow(allow_none=False)
162
+ if workflow.id in self.workflow_tasks:
163
+ task = self.workflow_tasks[workflow.id]
164
+ if not task.done():
165
+ logger.warning(f"Workflow {workflow.id} is already running")
166
+ return False
167
+ del self.workflow_tasks[workflow.id]
168
+
169
+ task = asyncio.create_task(workflow_group.run())
170
+ self.workflow_tasks[workflow.id] = task
171
+ logger.info(f"Started workflow {workflow.id}")
172
+ return True
173
+
174
+ def start_workflow_by_name(self, workflow_name: str, workflow_version: str) -> bool:
175
+ workflow_group = self.get_workflow_group_by_name(workflow_name, workflow_version, allow_none=False)
176
+ return self.start_workflow(workflow_group)
177
+
178
+ def start_workflow_by_id(self, workflow_id: str) -> bool:
179
+ workflow_group = self.get_workflow_group_by_id(workflow_id, allow_none=False)
180
+ return self.start_workflow(workflow_group)
181
+
182
+ async def stop_workflow(self, workflow_group: WorkflowGroup) -> bool:
183
+ workflow = workflow_group.get_main_workflow(allow_none=False)
184
+ if workflow.id not in self.workflow_tasks:
185
+ logger.warning(f"Workflow {workflow.id} is not running")
186
+ return False
187
+ task = self.workflow_tasks[workflow.id]
188
+ if task.done():
189
+ logger.warning(f"Workflow {workflow.id} is already stopped")
190
+ del self.workflow_tasks[workflow.id]
191
+ return False
192
+ task.cancel()
193
+ await workflow.stop()
194
+ try:
195
+ await task
196
+ except asyncio.CancelledError:
197
+ pass
198
+ del self.workflow_tasks[workflow.id]
199
+ logger.info(f"Stopped workflow {workflow.id}")
200
+ return True
201
+
202
+ async def stop_workflow_by_name(self, workflow_name: str, workflow_version: str) -> bool:
203
+ workflow_group = self.get_workflow_group_by_name(workflow_name, workflow_version, allow_none=False)
204
+ return await self.stop_workflow(workflow_group)
205
+
206
+ async def stop_workflow_by_id(self, workflow_id: str) -> bool:
207
+ workflow_group = self.get_workflow_group_by_id(workflow_id, allow_none=False)
208
+ return await self.stop_workflow(workflow_group)
209
+
210
+ async def load_workflow_from_config(self, config_path: str = None, config: dict = None) -> uuid.UUID:
211
+ workflow_group = create_workflow_group(
212
+ config_path=config_path,
213
+ config=config,
214
+ service_env=self.service_env,
215
+ _handle_stream_output=self._handle_stream_output,
216
+ _handle_query_user=self._handle_query_user,
217
+ database_manager=self.database_manager,
218
+ )
219
+
220
+ for workflow in workflow_group.workflows:
221
+ existing_workflow_group = self.get_workflow_group_by_name(workflow.name, workflow.version)
222
+ if existing_workflow_group is not None:
223
+ raise ValueError(f"Workflow group with name {workflow.name} and version {workflow.version} already exists")
224
+
225
+ self.workflow_groups.append(workflow_group)
226
+ main_workflow = workflow_group.get_main_workflow()
227
+
228
+ if main_workflow.id in self.workflow_tasks:
229
+ await self.stop_workflow(workflow_group)
230
+
231
+ self.start_workflow(workflow_group)
232
+ return main_workflow.id
233
+
234
+ def get_service_status(self) -> dict[str, Any]:
235
+ workflow_statuses = []
236
+ for workflow_group in self.workflow_groups:
237
+ for workflow in workflow_group.workflows:
238
+ workflow_id = workflow.id
239
+ workflow_version = workflow.version
240
+ workflow_config = workflow.config
241
+ workflow_name = workflow.name
242
+ is_running = workflow_id in self.workflow_tasks and not self.workflow_tasks[workflow_id].done()
243
+ config_path = self.workflow_config_map.get(workflow_name, "unknown")
244
+ workflow_statuses.append({
245
+ "name": workflow_name,
246
+ "id": workflow_id,
247
+ "version": workflow_version,
248
+ "config": workflow_config,
249
+ "description": workflow.description,
250
+ "status": "running" if is_running else "stopped",
251
+ "config_path": config_path,
252
+ })
253
+
254
+ return {
255
+ "name": self.name,
256
+ "version": self.version,
257
+ "description": self.description,
258
+ "workflows": workflow_statuses,
259
+ }
260
+
261
+ @staticmethod
262
+ def from_config(metadata: SfMetadata, service_env: dict[str, Any] = None, config: ServiceConfig = None) -> Service:
263
+ if config is not None:
264
+ config_path = None
265
+ else:
266
+ config_path = metadata.service_config
267
+ config = ServiceConfig.from_yaml_file(config_path)
268
+ database_manager = DatabaseManager.from_config(config=config)
269
+ return Service(
270
+ metadata=metadata,
271
+ config_path=config_path,
272
+ config=config,
273
+ service_env=service_env,
274
+ database_manager=database_manager,
275
+ _handle_stream_output=None,
276
+ _handle_query_user=None,
277
+ )
278
+
279
+ def create_service(config_path: str, name: str, version: str, service_env: dict[str, Any] = None) -> Service:
280
+ return Service.from_config(config_path, name, version, service_env)
@@ -0,0 +1,44 @@
1
+ from __future__ import annotations
2
+
3
+ import yaml
4
+ from pydantic import BaseModel
5
+
6
+ class ServiceFeedbackConfig(BaseModel):
7
+ api_url: str
8
+ api_timeout: int = 5
9
+
10
+ class ServiceDatabaseConfig(BaseModel):
11
+ name: str
12
+ postgres_user: str | None = None
13
+ postgres_password: str | None = None
14
+ postgres_host: str | None = None
15
+ postgres_port: int | None = None
16
+ postgres_db: str | None = None
17
+
18
+ mongo_host: str | None = None
19
+ mongo_port: int | None = None
20
+ mongo_user: str | None = None
21
+ mongo_password: str | None = None
22
+ mongo_db: str | None = None
23
+
24
+ redis_host: str | None = None
25
+ redis_port: int | None = None
26
+ redis_password: str | None = None
27
+
28
+ class ServiceConfig(BaseModel):
29
+ name: str
30
+ workflows: list[str]
31
+ enable_http: bool
32
+ http_host: str | None = None
33
+ http_port: int | None = None
34
+ enable_kafka: bool
35
+ kafka_host: str | None = None
36
+ kafka_port: int | None = None
37
+ databases: list[ServiceDatabaseConfig] | None = None
38
+ feedback: ServiceFeedbackConfig | None = None
39
+
40
+ @classmethod
41
+ def from_yaml_file(cls, filepath: str) -> ServiceConfig:
42
+ with open(filepath, 'r', encoding='utf-8') as f:
43
+ data = yaml.safe_load(f)
44
+ return cls(**data)
@@ -0,0 +1,91 @@
1
+ #!/usr/bin/env python3
2
+ import sys
3
+ from typing import Optional
4
+
5
+ import typer
6
+ from service_forge.sft.util.logger import log_error, log_info
7
+ from service_forge.sft.cmd.list_tars import list_tars
8
+ from service_forge.sft.cmd.upload_service import upload_service
9
+ from service_forge.sft.cmd.deploy_service import deploy_service
10
+ from service_forge.sft.cmd.config_command import list_config, get_config, set_config
11
+ from service_forge.sft.cmd.service_command import list_services, delete_service, show_service_logs
12
+
13
+ app = typer.Typer(
14
+ name="sft",
15
+ help="Service Forge CLI - Service management tool",
16
+ add_completion=False,
17
+ )
18
+
19
+ @app.command(name="upload")
20
+ def upload_service_command(
21
+ project_path: Optional[str] = typer.Argument(
22
+ default=".",
23
+ help="Project path, default is the current directory"
24
+ )
25
+ ) -> None:
26
+ upload_service(project_path)
27
+
28
+ @app.command(name="list")
29
+ def list_tars_command() -> None:
30
+ list_tars()
31
+
32
+ @app.command(name="deploy")
33
+ def deploy_service_command(name: str, version: str) -> None:
34
+ deploy_service(name, version)
35
+
36
+ config_app = typer.Typer(
37
+ name="config",
38
+ help="Configuration management commands",
39
+ add_completion=False,
40
+ )
41
+
42
+ @config_app.command(name="list")
43
+ def config_list_command() -> None:
44
+ list_config()
45
+
46
+ @config_app.command(name="get")
47
+ def config_get_command(
48
+ key: str = typer.Argument(help="Configuration item key")
49
+ ) -> None:
50
+ get_config(key)
51
+
52
+ @config_app.command(name="set")
53
+ def config_set_command(
54
+ key: str = typer.Argument(help="Configuration item key"),
55
+ value: str = typer.Argument(help="Configuration item value")
56
+ ) -> None:
57
+ set_config(key, value)
58
+
59
+ app.add_typer(config_app)
60
+
61
+ service_app = typer.Typer(
62
+ name="service",
63
+ help="Kubernetes service management commands",
64
+ add_completion=False,
65
+ )
66
+
67
+ @service_app.command(name="list")
68
+ def service_list_command() -> None:
69
+ list_services()
70
+
71
+ @service_app.command(name="delete")
72
+ def service_delete_command(
73
+ service_name: str = typer.Argument(help="Service name to delete (must start with sf-)"),
74
+ force: bool = typer.Option(False, "--force", "-f", help="Force delete")
75
+ ) -> None:
76
+ delete_service(service_name, force)
77
+
78
+ @service_app.command(name="logs")
79
+ def service_logs_command(
80
+ service_name: str = typer.Argument(help="Service name to view logs for (must start with sf-)"),
81
+ container: Optional[str] = typer.Option(None, "--container", "-c", help="Container name (if pod has multiple containers)"),
82
+ tail: int = typer.Option(100, "--tail", "-n", help="Number of lines to show from the end of logs"),
83
+ follow: bool = typer.Option(False, "--follow", "-f", help="Follow log output"),
84
+ previous: bool = typer.Option(False, "--previous", "-p", help="Get logs from previous instance of container")
85
+ ) -> None:
86
+ show_service_logs(service_name, container, tail, follow, previous)
87
+
88
+ app.add_typer(service_app)
89
+
90
+ def main() -> None:
91
+ app()
@@ -0,0 +1,67 @@
1
+ from typing import Optional
2
+ import typer
3
+ from rich.console import Console
4
+ from rich.table import Table
5
+
6
+ from service_forge.sft.config.sft_config import SftConfig
7
+ from service_forge.sft.util.logger import log_error, log_info, log_success, log_warning
8
+ from service_forge.sft.config.sft_config import sft_config
9
+
10
+ def list_config() -> None:
11
+ try:
12
+ console = Console()
13
+
14
+ table = Table(title="SFT Configuration", show_header=True, header_style="bold magenta")
15
+ table.add_column("Key", style="cyan", no_wrap=True)
16
+ table.add_column("Value", style="green")
17
+ table.add_column("Description", style="yellow")
18
+
19
+ # Automatically add rows for all config items
20
+ config_dict = sft_config.to_dict()
21
+ for key, value in sorted(config_dict.items()):
22
+ description = SftConfig.CONFIG_DESCRIPTIONS.get(key, "No description available")
23
+ table.add_row(key, str(value), description)
24
+
25
+ console.print(table)
26
+ console.print(f"\n[dim]Config file location: {sft_config.config_file_path}[/dim]")
27
+ except Exception as e:
28
+ log_error(f"Failed to load config: {e}")
29
+ raise typer.Exit(1)
30
+
31
+ def get_config(key: str) -> None:
32
+ try:
33
+ value = sft_config.get(key)
34
+
35
+ if value is None:
36
+ log_error(f"Config key '{key}' not found")
37
+ log_info("Available keys: config_root, sft_file_root, k8s_namespace")
38
+ raise typer.Exit(1)
39
+
40
+ log_info(f"{key} = {value}")
41
+ except ValueError as e:
42
+ log_error(str(e))
43
+ raise typer.Exit(1)
44
+ except Exception as e:
45
+ log_error(f"Failed to get config: {e}")
46
+ raise typer.Exit(1)
47
+
48
+ def set_config(key: str, value: str) -> None:
49
+ try:
50
+ current_value = sft_config.get(key)
51
+ if current_value is None:
52
+ log_error(f"Unknown config key: {key}")
53
+ log_info("Available keys: config_root, sft_file_root, k8s_namespace")
54
+ raise typer.Exit(1)
55
+
56
+ sft_config.set(key, value)
57
+ sft_config.save()
58
+
59
+ log_success(f"Updated {key} = {value}")
60
+ log_info(f"Config saved to {sft_config.config_file_path}")
61
+ except ValueError as e:
62
+ log_error(str(e))
63
+ raise typer.Exit(1)
64
+ except Exception as e:
65
+ log_error(f"Failed to set config: {e}")
66
+ raise typer.Exit(1)
67
+
@@ -0,0 +1,123 @@
1
+ import os
2
+ import shutil
3
+ import subprocess
4
+ import tarfile
5
+ import tempfile
6
+ from pathlib import Path
7
+
8
+ import typer
9
+ from omegaconf import OmegaConf
10
+ from service_forge.sft.util.logger import log_error, log_info, log_success, log_warning
11
+ from service_forge.sft.file.sft_file_manager import sft_file_manager
12
+ from service_forge.sft.config.sft_config import sft_config
13
+ from service_forge.sft.util.assert_util import assert_file_exists, assert_dir_exists
14
+ from service_forge.sft.config.sf_metadata import load_metadata
15
+ from service_forge.sft.kubernetes.kubernetes_manager import KubernetesManager
16
+ from service_forge.sft.config.injector import Injector
17
+ from service_forge.sft.util.name_util import get_service_name
18
+
19
+ def _extract_tar_file(tar_file: Path, temp_path: Path) -> None:
20
+ log_info(f"Extracting tar file to: {temp_path}")
21
+
22
+ try:
23
+ with tarfile.open(tar_file, 'r') as tar:
24
+ tar.extractall(temp_path)
25
+ except Exception as e:
26
+ log_error(f"Failed to extract tar file: {e}")
27
+ raise typer.Exit(1)
28
+
29
+ log_success("Tar file extracted successfully")
30
+
31
+ def _build_docker_image(project_dir: Path, name: str, version: str) -> None:
32
+ image_name = f"sf-{name}:{version}"
33
+ full_image_name = sft_config.registry_address + "/" + image_name
34
+ log_info(f"Building Docker image: {image_name}")
35
+ try:
36
+ # build docker image
37
+ build_result = subprocess.run(
38
+ ["docker", "build", "-t", full_image_name, str(project_dir)],
39
+ capture_output=True,
40
+ text=True,
41
+ check=True
42
+ )
43
+ log_success(f"Docker image built successfully: {image_name}")
44
+ if build_result.stdout:
45
+ log_info(build_result.stdout)
46
+
47
+ # push docker image to registry
48
+ log_info(f"Pushing Docker image to registry: {full_image_name}")
49
+ push_result = subprocess.run(
50
+ ["docker", "push", full_image_name],
51
+ capture_output=True,
52
+ text=True,
53
+ check=True
54
+ )
55
+ log_success(f"Docker image pushed successfully: {full_image_name}")
56
+ if push_result.stdout:
57
+ log_info(push_result.stdout)
58
+
59
+ except subprocess.CalledProcessError as e:
60
+ log_error(f"Docker operation failed: {e}")
61
+ if e.stderr:
62
+ log_error(e.stderr)
63
+ raise typer.Exit(1)
64
+ except FileNotFoundError:
65
+ log_error("Docker command not found. Please install Docker.")
66
+ raise typer.Exit(1)
67
+
68
+ def _apply_k8s_deployment(deployment_yaml: Path, ingress_yaml: Path, name: str, version: str) -> None:
69
+ log_info("Applying k8s deployment...")
70
+
71
+ try:
72
+ k8s_manager = KubernetesManager()
73
+ k8s_manager.delete_service(sft_config.k8s_namespace, get_service_name(name, version), force=True)
74
+ k8s_manager.apply_deployment_yaml(deployment_yaml, sft_config.k8s_namespace)
75
+ k8s_manager.apply_deployment_yaml(ingress_yaml, sft_config.k8s_namespace)
76
+ log_success("K8s deployment applied successfully")
77
+ except Exception as e:
78
+ log_error(f"K8s deployment failed: {e}")
79
+ raise typer.Exit(1)
80
+
81
+ log_success(f"Deployment process completed for {name}:{version}")
82
+
83
+ def _inject_config(project_dir: Path) -> None:
84
+ injector = Injector(project_dir)
85
+ injector.inject()
86
+
87
+ def deploy_service(name: str, version: str) -> None:
88
+ tar_file = sft_file_manager.tar_path / f"sf_{name}_{version}.tar"
89
+
90
+ assert_file_exists(tar_file)
91
+
92
+ temp_parent = os.path.join(tempfile.gettempdir(), "sft")
93
+ os.makedirs(temp_parent, exist_ok=True)
94
+
95
+ with tempfile.TemporaryDirectory(prefix=f"deploy_{name}_{version}", dir=temp_parent) as temp_dir:
96
+ temp_path = Path(temp_dir)
97
+
98
+ _extract_tar_file(tar_file, temp_path)
99
+
100
+ project_dir = temp_path / f"{name}_{version}"
101
+
102
+ _inject_config(project_dir)
103
+
104
+ dockerfile_path = project_dir / "Dockerfile"
105
+ metadata_path = project_dir / "sf-meta.yaml"
106
+ deployment_yaml = project_dir / "deployment.yaml"
107
+ ingress_yaml = project_dir / "ingress.yaml"
108
+
109
+ assert_dir_exists(project_dir)
110
+ assert_file_exists(dockerfile_path)
111
+ assert_file_exists(metadata_path)
112
+ assert_file_exists(deployment_yaml)
113
+ assert_file_exists(ingress_yaml)
114
+
115
+ try:
116
+ meta_data = load_metadata(metadata_path)
117
+ except Exception as e:
118
+ log_error(f"Failed to read sf-meta.yaml: {e}")
119
+ raise typer.Exit(1)
120
+
121
+ _build_docker_image(project_dir, meta_data.name, meta_data.version)
122
+ # TODO: create new user in mongodb and redis
123
+ _apply_k8s_deployment(deployment_yaml, ingress_yaml, meta_data.name, meta_data.version)
@@ -0,0 +1,41 @@
1
+ from pathlib import Path
2
+
3
+ from rich.console import Console
4
+ from rich.table import Table
5
+
6
+ from service_forge.sft.util.logger import log_error, log_info
7
+ from service_forge.sft.file.sft_file_manager import sft_file_manager
8
+
9
+ def list_tars() -> None:
10
+ tar_files = sft_file_manager.load_tars()
11
+
12
+ if not tar_files:
13
+ log_info("No tar files found.")
14
+ return
15
+
16
+ console = Console()
17
+ table = Table(title="Service Tar Files", show_header=True, header_style="bold magenta")
18
+ table.add_column("Project", style="cyan", no_wrap=True)
19
+ table.add_column("Version", style="cyan", no_wrap=True)
20
+ table.add_column("File Name", style="cyan", no_wrap=True)
21
+ table.add_column("Size", justify="right", style="green")
22
+ table.add_column("Modified Time", style="yellow")
23
+
24
+ for tar_file in tar_files:
25
+ table.add_row(tar_file.project_name, tar_file.version, tar_file.path.name, tar_file._format_size(), tar_file._format_modified_time())
26
+
27
+ console.print(table)
28
+
29
+
30
+ def _format_size(size_bytes: int) -> str:
31
+ for unit in ['B', 'KB', 'MB', 'GB']:
32
+ if size_bytes < 1024.0:
33
+ return f"{size_bytes:.2f} {unit}"
34
+ size_bytes /= 1024.0
35
+ return f"{size_bytes:.2f} TB"
36
+
37
+
38
+ def _format_time(timestamp: float) -> str:
39
+ from datetime import datetime
40
+ return datetime.fromtimestamp(timestamp).strftime("%Y-%m-%d %H:%M:%S")
41
+