service-forge 0.1.18__py3-none-any.whl → 0.1.28__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of service-forge might be problematic. Click here for more details.

@@ -3,7 +3,7 @@ import uvicorn
3
3
  from fastapi import APIRouter
4
4
  from loguru import logger
5
5
  from urllib.parse import urlparse
6
- from fastapi import HTTPException, Request
6
+ from fastapi import HTTPException, Request, WebSocket, WebSocketException
7
7
  from fastapi.middleware.cors import CORSMiddleware
8
8
  from fastapi.openapi.utils import get_openapi
9
9
  from service_forge.api.routers.websocket.websocket_router import websocket_router
@@ -36,6 +36,88 @@ def is_trusted_origin(origin_host: str, host: str, trusted_root: str = "ring.shi
36
36
  )
37
37
 
38
38
 
39
+ def validate_auth_from_headers(
40
+ headers: dict,
41
+ origin: str | None,
42
+ scheme: str,
43
+ host: str,
44
+ trusted_domain: str = "ring.shiweinan.com",
45
+ ) -> tuple[str | None, str | None]:
46
+ """
47
+ Validate authentication from headers and return user_id and token.
48
+
49
+ Args:
50
+ headers: Dictionary of headers (can be from Request or WebSocket)
51
+ origin: Origin header value
52
+ scheme: URL scheme (http/https/ws/wss)
53
+ host: Host header value
54
+ trusted_domain: Trusted domain for origin validation
55
+
56
+ Returns:
57
+ tuple: (user_id, auth_token) - user_id can be None if not authenticated and not same origin
58
+ """
59
+ is_same_origin = False
60
+
61
+ logger.debug(f"origin {origin}, host:{host}")
62
+
63
+ if origin and host:
64
+ try:
65
+ parsed_origin = urlparse(origin)
66
+ parsed_host = urlparse(f"{scheme}://{host}")
67
+ is_same_origin = (
68
+ parsed_origin.hostname == parsed_host.hostname
69
+ and parsed_origin.port == parsed_host.port
70
+ and is_trusted_origin(parsed_origin.hostname, parsed_host.hostname, trusted_domain)
71
+ )
72
+ except Exception:
73
+ pass
74
+
75
+ user_id = headers.get("X-User-ID")
76
+ token = headers.get("X-User-Token")
77
+
78
+ if not is_same_origin:
79
+ # For cross-origin requests, user_id is required
80
+ if not user_id:
81
+ return None, None
82
+ return user_id, token
83
+ else:
84
+ # For same-origin requests, user_id defaults to "0" if not provided
85
+ return user_id if user_id else "0", token
86
+
87
+
88
+ async def authenticate_websocket(
89
+ websocket: WebSocket,
90
+ trusted_domain: str = "ring.shiweinan.com",
91
+ ) -> None:
92
+ """
93
+ Authenticate WebSocket connection and set user_id and auth_token in websocket.state.
94
+
95
+ Args:
96
+ websocket: WebSocket instance
97
+ trusted_domain: Trusted domain for origin validation
98
+
99
+ Raises:
100
+ WebSocketException: If authentication fails
101
+ """
102
+ origin = websocket.headers.get("origin") or websocket.headers.get("referer")
103
+ scheme = websocket.url.scheme
104
+ host = websocket.headers.get("host", "")
105
+
106
+ user_id, token = validate_auth_from_headers(
107
+ websocket.headers,
108
+ origin,
109
+ scheme,
110
+ host,
111
+ trusted_domain,
112
+ )
113
+
114
+ if user_id is None:
115
+ raise WebSocketException(code=1008, reason="Unauthorized")
116
+
117
+ websocket.state.user_id = user_id
118
+ websocket.state.auth_token = token
119
+
120
+
39
121
  def create_app(
40
122
  app: FastAPI | None = None,
41
123
  routers: list[APIRouter] | None = None,
@@ -77,6 +159,10 @@ def create_app(
77
159
  for router in routers:
78
160
  app.include_router(router)
79
161
 
162
+ # Store auth configuration in app.state for WebSocket endpoints to access
163
+ app.state.enable_auth_middleware = enable_auth_middleware
164
+ app.state.trusted_domain = trusted_domain
165
+
80
166
  # Always include WebSocket router
81
167
  app.include_router(websocket_router)
82
168
 
@@ -100,31 +186,20 @@ def create_app(
100
186
  origin = request.headers.get("origin") or request.headers.get("referer")
101
187
  scheme = request.url.scheme
102
188
  host = request.headers.get("host", "")
103
- is_same_origin = False
104
189
 
105
- logger.debug(f"origin {origin}, host:{host}")
190
+ user_id, token = validate_auth_from_headers(
191
+ request.headers,
192
+ origin,
193
+ scheme,
194
+ host,
195
+ trusted_domain,
196
+ )
197
+
198
+ if user_id is None:
199
+ raise HTTPException(status_code=401, detail="Unauthorized")
106
200
 
107
- if origin and host:
108
- try:
109
- parsed_origin = urlparse(origin)
110
- parsed_host = urlparse(f"{scheme}://{host}")
111
- is_same_origin = (
112
- parsed_origin.hostname == parsed_host.hostname
113
- and parsed_origin.port == parsed_host.port
114
- and is_trusted_origin(parsed_origin.hostname, parsed_host.hostname, trusted_domain)
115
- )
116
- except Exception:
117
- pass # If parsing fails, continue with default behavior
118
- if not is_same_origin:
119
- headers = request.headers
120
- user_id = headers.get("X-User-ID")
121
- if not user_id:
122
- raise HTTPException(status_code=401, detail="Unauthorized")
123
-
124
- request.state.user_id = user_id
125
- else:
126
- # Same-origin requests can skip auth, but still set default user_id
127
- request.state.user_id = "0" # Can be None or default value as needed
201
+ request.state.user_id = user_id
202
+ request.state.auth_token = token
128
203
 
129
204
  return await call_next(request)
130
205
 
@@ -147,6 +222,7 @@ async def start_fastapi_server(host: str, port: int):
147
222
 
148
223
  try:
149
224
  metadata = load_metadata("sf-meta.yaml")
150
- fastapi_app = create_app(enable_auth_middleware=False, root_path=f"/api/v1/{get_service_url_name(metadata.name, metadata.version)}")
225
+ fastapi_app = create_app(enable_auth_middleware=metadata.enable_auth_middleware, root_path=f"/api/v1/{get_service_url_name(metadata.name, metadata.version)}")
151
226
  except Exception as e:
152
- fastapi_app = create_app(enable_auth_middleware=False, root_path=None)
227
+ logger.warning(f"Failed to load metadata, using default configuration: {e}")
228
+ fastapi_app = create_app(enable_auth_middleware=True, root_path=None)
@@ -9,6 +9,19 @@ websocket_router = APIRouter()
9
9
 
10
10
  @websocket_router.websocket("/sdk/ws")
11
11
  async def sdk_websocket_endpoint(websocket: WebSocket):
12
+ # Authenticate WebSocket connection before accepting
13
+ # Get trusted_domain from app.state if available
14
+ # trusted_domain = getattr(websocket.app.state, "trusted_domain", "ring.shiweinan.com")
15
+ # enable_auth = getattr(websocket.app.state, "enable_auth_middleware", True)
16
+
17
+ # if enable_auth:
18
+ # from service_forge.api.http_api import authenticate_websocket
19
+ # await authenticate_websocket(websocket, trusted_domain)
20
+ # else:
21
+ # # If auth is disabled, set default values
22
+ # websocket.state.user_id = websocket.headers.get("X-User-ID", "0")
23
+ # websocket.state.auth_token = websocket.headers.get("X-User-Token")
24
+
12
25
  await websocket.accept()
13
26
  try:
14
27
  while True:
@@ -7,6 +7,7 @@ from typing import AsyncGenerator
7
7
  from loguru import logger
8
8
  from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine
9
9
  from service_forge.service_config import ServiceConfig
10
+ from pymongo import AsyncMongoClient
10
11
 
11
12
  class PostgresDatabase:
12
13
  def __init__(
@@ -114,6 +115,7 @@ class MongoDatabase:
114
115
  self.mongo_password = mongo_password
115
116
  self.mongo_db = mongo_db or ""
116
117
  self.client = pymongo.MongoClient(self.database_url)
118
+ self.async_client = AsyncMongoClient(self.database_url)
117
119
  self.test_connection()
118
120
 
119
121
  @property
@@ -129,6 +131,21 @@ class MongoDatabase:
129
131
  logger.error(f"MongoDB connection test failed for database '{self.name}': {e}")
130
132
  return False
131
133
 
134
+ async def test_async_connection(self) -> bool:
135
+ try:
136
+ await self.async_client.admin.command('ping')
137
+ logger.info(f"Async MongoDB connection test successful for database '{self.name}'")
138
+ return True
139
+ except Exception as e:
140
+ logger.error(f"Async MongoDB connection test failed for database '{self.name}': {e}")
141
+ return False
142
+
143
+ def get_sync_collection(self, collection_name: str):
144
+ return self.client[self.mongo_db][collection_name]
145
+
146
+ def get_async_collection(self, collection_name: str):
147
+ return self.async_client[self.mongo_db][collection_name]
148
+
132
149
  class RedisDatabase:
133
150
  def __init__(
134
151
  self,
service_forge/service.py CHANGED
@@ -4,6 +4,7 @@ import os
4
4
  import asyncio
5
5
  import threading
6
6
  import uuid
7
+ from importlib.metadata import version
7
8
  from loguru import logger
8
9
  from typing import Callable, AsyncIterator, Awaitable, Any, TYPE_CHECKING
9
10
  from service_forge.workflow.node import node_register
@@ -260,6 +261,12 @@ class Service:
260
261
 
261
262
  @staticmethod
262
263
  def from_config(metadata: SfMetadata, service_env: dict[str, Any] = None, config: ServiceConfig = None) -> Service:
264
+ try:
265
+ service_forge_version = version("service-forge")
266
+ logger.info(f"service-forge version: {service_forge_version}")
267
+ except Exception as e:
268
+ logger.warning(f"Failed to get service-forge version: {e}")
269
+
263
270
  if config is not None:
264
271
  config_path = None
265
272
  else:
service_forge/sft/cli.py CHANGED
@@ -9,6 +9,8 @@ from service_forge.sft.cmd.upload_service import upload_service
9
9
  from service_forge.sft.cmd.deploy_service import deploy_service
10
10
  from service_forge.sft.cmd.config_command import list_config, get_config, set_config
11
11
  from service_forge.sft.cmd.service_command import list_services, delete_service, show_service_logs
12
+ from service_forge.sft.cmd.remote_list_tars import remote_list_tars
13
+ from service_forge.sft.cmd.remote_deploy import remote_deploy_tar, remote_list_and_deploy
12
14
 
13
15
  app = typer.Typer(
14
16
  name="sft",
@@ -33,6 +35,43 @@ def list_tars_command() -> None:
33
35
  def deploy_service_command(name: str, version: str) -> None:
34
36
  deploy_service(name, version)
35
37
 
38
+ @app.command(name="remote-list")
39
+ def remote_list_tars_command(
40
+ url: str = typer.Option(
41
+ None,
42
+ "--url",
43
+ "-u",
44
+ help="Service Center URL (default: http://localhost:5000 or from service_center_address config)"
45
+ )
46
+ ) -> None:
47
+ """List tar packages and their status on remote server"""
48
+ remote_list_tars(url)
49
+
50
+ @app.command(name="remote-deploy")
51
+ def remote_deploy_command(
52
+ filename: str = typer.Argument(help="Filename of the tar package to deploy"),
53
+ url: str = typer.Option(
54
+ None,
55
+ "--url",
56
+ "-u",
57
+ help="Service Center URL (default: http://localhost:5000 or from service_center_address config)"
58
+ )
59
+ ) -> None:
60
+ """Remote deploy specified tar package"""
61
+ remote_deploy_tar(filename, url)
62
+
63
+ @app.command(name="remote-deploy-interactive")
64
+ def remote_deploy_interactive_command(
65
+ url: str = typer.Option(
66
+ None,
67
+ "--url",
68
+ "-u",
69
+ help="Service Center URL (default: http://localhost:5000 or from service_center_address config)"
70
+ )
71
+ ) -> None:
72
+ """Interactive remote deployment of tar packages (list available packages first, then select for deployment)"""
73
+ remote_list_and_deploy(url)
74
+
36
75
  config_app = typer.Typer(
37
76
  name="config",
38
77
  help="Configuration management commands",
@@ -0,0 +1,160 @@
1
+ import os
2
+ import json
3
+ import requests
4
+ from pathlib import Path
5
+ from service_forge.sft.util.logger import log_error, log_info, log_success, log_warning
6
+ from service_forge.sft.config.sft_config import sft_config
7
+
8
+ def remote_deploy_tar(filename: str, service_center_url: str = None) -> None:
9
+ """
10
+ Remote deploy specified tar package from service-center
11
+ """
12
+ # If URL is not provided, try to get it from configuration
13
+ if not service_center_url:
14
+ service_center_url = getattr(sft_config, 'service_center_address', 'http://localhost:5000')
15
+
16
+ # Ensure URL ends with /
17
+ if not service_center_url.endswith('/'):
18
+ service_center_url += '/'
19
+
20
+ api_url = f"{service_center_url}api/v1/services/deploy-from-tar"
21
+
22
+ log_info(f"Sending deployment request to {api_url}...")
23
+ log_info(f"Tar package to deploy: {filename}")
24
+
25
+ try:
26
+ # Prepare request data
27
+ data = {
28
+ "filename": filename
29
+ }
30
+
31
+ # Send POST request
32
+ response = requests.post(
33
+ api_url,
34
+ json=data,
35
+ headers={'Content-Type': 'application/json'},
36
+ timeout=300 # 5 minute timeout
37
+ )
38
+
39
+ if response.status_code != 200:
40
+ log_error(f"Deployment request failed, status code: {response.status_code}")
41
+ try:
42
+ error_data = response.json()
43
+ log_error(f"Error message: {error_data.get('message', 'Unknown error')}")
44
+ if 'data' in error_data and error_data['data']:
45
+ log_error(f"Details: {json.dumps(error_data['data'], indent=2, ensure_ascii=False)}")
46
+ except:
47
+ log_error(f"Response content: {response.text}")
48
+ return
49
+
50
+ # Parse response data
51
+ result = response.json()
52
+
53
+ if result.get('code') != 200:
54
+ log_error(f"Deployment failed: {result.get('message', 'Unknown error')}")
55
+ if 'data' in result and result['data']:
56
+ log_error(f"Details: {json.dumps(result['data'], indent=2, ensure_ascii=False)}")
57
+ return
58
+
59
+ # Deployment successful
60
+ data = result.get('data', {})
61
+ service_name = data.get('service_name', 'Unknown')
62
+ version = data.get('version', 'Unknown')
63
+ deploy_output = data.get('deploy_output', '')
64
+
65
+ log_success(f"Successfully deployed service: {service_name} version: {version}")
66
+
67
+ if deploy_output:
68
+ log_info("Deployment output:")
69
+ print(deploy_output)
70
+
71
+ except requests.exceptions.Timeout:
72
+ log_error("Deployment request timed out (exceeded 5 minutes), please check service status or try again later")
73
+ except requests.exceptions.RequestException as e:
74
+ log_error(f"Request failed: {str(e)}")
75
+ log_info(f"Please check if service-center service is running normally and if the URL is correct: {service_center_url}")
76
+ except Exception as e:
77
+ log_error(f"Exception occurred while deploying tar package: {str(e)}")
78
+
79
+ def remote_list_and_deploy(service_center_url: str = None) -> None:
80
+ """
81
+ List remote tar packages first, then let user select which package to deploy
82
+ """
83
+ # If URL is not provided, try to get it from configuration
84
+ if not service_center_url:
85
+ service_center_url = getattr(sft_config, 'service_center_address', 'http://localhost:5000')
86
+
87
+ # Ensure URL ends with /
88
+ if not service_center_url.endswith('/'):
89
+ service_center_url += '/'
90
+
91
+ api_url = f"{service_center_url}api/v1/services/tar-list"
92
+
93
+ log_info(f"Getting tar package list from {api_url}...")
94
+
95
+ try:
96
+ # 发送GET请求获取tar包列表
97
+ response = requests.get(api_url, timeout=30)
98
+
99
+ if response.status_code != 200:
100
+ log_error(f"Failed to get tar package list, status code: {response.status_code}")
101
+ return
102
+
103
+ # Parse response data
104
+ result = response.json()
105
+
106
+ if result.get('code') != 200:
107
+ log_error(f"Failed to get tar package list: {result.get('message', 'Unknown error')}")
108
+ return
109
+
110
+ tar_files = result.get('data', [])
111
+
112
+ if not tar_files:
113
+ log_info("No tar packages found")
114
+ return
115
+
116
+ # Display tar package list
117
+ log_info("Available tar package list:")
118
+ for i, tar_file in enumerate(tar_files, 1):
119
+ filename = tar_file.get('filename', '-')
120
+ service_name = tar_file.get('service_name', '-')
121
+ version = tar_file.get('version', '-')
122
+ deployed_status = "Deployed" if tar_file.get('deployed_status', False) else "Not Deployed"
123
+
124
+ print(f"{i}. {filename} (service: {service_name}, version: {version}, status: {deployed_status})")
125
+
126
+ # Let user choose
127
+ try:
128
+ choice = input("\nEnter the number of the tar package to deploy (enter 'q' to exit): ").strip()
129
+
130
+ if choice.lower() == 'q':
131
+ log_info("Deployment cancelled")
132
+ return
133
+
134
+ index = int(choice) - 1
135
+ if 0 <= index < len(tar_files):
136
+ selected_tar = tar_files[index]
137
+ filename = selected_tar.get('filename')
138
+
139
+ if selected_tar.get('deployed_status', False):
140
+ log_warning(f"Tar package {filename} is already deployed, continue deployment?")
141
+ confirm = input("Enter 'y' to continue, any other key to cancel: ").strip().lower()
142
+ if confirm != 'y':
143
+ log_info("Deployment cancelled")
144
+ return
145
+
146
+ log_info(f"Selected for deployment: {filename}")
147
+ remote_deploy_tar(filename, service_center_url)
148
+ else:
149
+ log_error("Invalid selection")
150
+
151
+ except ValueError:
152
+ log_error("Please enter a valid number")
153
+ except KeyboardInterrupt:
154
+ log_info("\nDeployment cancelled")
155
+
156
+ except requests.exceptions.RequestException as e:
157
+ log_error(f"Request failed: {str(e)}")
158
+ log_info(f"Please check if service-center service is running normally and if the URL is correct: {service_center_url}")
159
+ except Exception as e:
160
+ log_error(f"Exception occurred while getting tar package list: {str(e)}")
@@ -0,0 +1,111 @@
1
+ import os
2
+ import json
3
+ import requests
4
+ from pathlib import Path
5
+ from rich.console import Console
6
+ from rich.table import Table
7
+ from service_forge.sft.util.logger import log_error, log_info, log_success, log_warning
8
+ from service_forge.sft.config.sft_config import sft_config
9
+
10
+ def remote_list_tars(service_center_url: str = None) -> None:
11
+ """
12
+ Get remote tar package list and status from service-center
13
+ """
14
+ # If URL is not provided, try to get it from configuration
15
+ if not service_center_url:
16
+ service_center_url = getattr(sft_config, 'service_center_address', 'http://localhost:5000')
17
+
18
+ # Ensure URL ends with /
19
+ if not service_center_url.endswith('/'):
20
+ service_center_url += '/'
21
+
22
+ api_url = f"{service_center_url}api/v1/services/tar-list"
23
+
24
+ log_info(f"Getting tar package list from {api_url}...")
25
+
26
+ try:
27
+ # 发送GET请求
28
+ response = requests.get(api_url, timeout=30)
29
+
30
+ if response.status_code != 200:
31
+ log_error(f"Failed to get tar package list, status code: {response.status_code}")
32
+ try:
33
+ error_data = response.json()
34
+ log_error(f"Error message: {error_data.get('message', 'Unknown error')}")
35
+ except:
36
+ log_error(f"Response content: {response.text}")
37
+ return
38
+
39
+ # Parse response data
40
+ result = response.json()
41
+
42
+ if result.get('code') != 200:
43
+ log_error(f"Failed to get tar package list: {result.get('message', 'Unknown error')}")
44
+ return
45
+
46
+ tar_files = result.get('data', [])
47
+
48
+ if not tar_files:
49
+ log_info("No tar packages found")
50
+ return
51
+
52
+ # Use rich table to display results
53
+ console = Console()
54
+ table = Table(title="Remote Server Tar Package List", show_header=True, header_style="bold magenta")
55
+ table.add_column("Filename", style="cyan", no_wrap=True)
56
+ table.add_column("Service Name", style="green", no_wrap=True)
57
+ table.add_column("Version", style="blue", no_wrap=True)
58
+ table.add_column("Size", justify="right", style="yellow")
59
+ table.add_column("Modified Time", style="dim")
60
+ table.add_column("Deploy Status", justify="center", style="bold")
61
+
62
+ for tar_file in tar_files:
63
+ # Format file size
64
+ size = _format_size(tar_file.get('file_size', 0))
65
+
66
+ # Format modification time
67
+ modified_time = _format_time(tar_file.get('modified_time', 0))
68
+
69
+ # Deployment status
70
+ deployed_status = "✅ Deployed" if tar_file.get('deployed_status', False) else "❌ Not Deployed"
71
+ status_style = "green" if tar_file.get('deployed_status', False) else "red"
72
+
73
+ table.add_row(
74
+ tar_file.get('filename', '-'),
75
+ tar_file.get('service_name', '-'),
76
+ tar_file.get('version', '-'),
77
+ size,
78
+ modified_time,
79
+ f"[{status_style}]{deployed_status}[/{status_style}]"
80
+ )
81
+
82
+ console.print(table)
83
+ log_success(f"Found {len(tar_files)} tar packages in total")
84
+
85
+ except requests.exceptions.RequestException as e:
86
+ log_error(f"Request failed: {str(e)}")
87
+ log_info(f"Please check if service-center service is running normally and if the URL is correct: {service_center_url}")
88
+ except Exception as e:
89
+ log_error(f"Exception occurred while getting tar package list: {str(e)}")
90
+
91
+ def _format_size(size_bytes: int) -> str:
92
+ """Format file size"""
93
+ if size_bytes == 0:
94
+ return "0 B"
95
+
96
+ for unit in ['B', 'KB', 'MB', 'GB']:
97
+ if size_bytes < 1024.0:
98
+ return f"{size_bytes:.2f} {unit}"
99
+ size_bytes /= 1024.0
100
+ return f"{size_bytes:.2f} TB"
101
+
102
+ def _format_time(timestamp: float) -> str:
103
+ """Format timestamp"""
104
+ if timestamp == 0:
105
+ return "-"
106
+
107
+ try:
108
+ from datetime import datetime
109
+ return datetime.fromtimestamp(timestamp).strftime("%Y-%m-%d %H:%M:%S")
110
+ except:
111
+ return "-"
@@ -16,6 +16,7 @@ class Injector:
16
16
  self.ingress_yaml_path = project_dir / "ingress.yaml"
17
17
  self.dockerfile_path = project_dir / "Dockerfile"
18
18
  self.pyproject_toml_path = project_dir / "pyproject.toml"
19
+ self.start_sh_path = project_dir / "start.sh"
19
20
  self.metadata = load_metadata(self.metadata_path)
20
21
  self.name = self.metadata.name
21
22
  self.version = self.metadata.version
@@ -121,9 +122,25 @@ class Injector:
121
122
  f.write(pyproject_toml)
122
123
  print("pyproject_toml_path: ", self.pyproject_toml_path)
123
124
 
125
+ def clear_start_sh(self) -> None:
126
+ if Path(self.start_sh_path).exists():
127
+ with open(self.start_sh_path, "rb") as f:
128
+ content = f.read()
129
+ content_str = content.decode("utf-8")
130
+ lines = content_str.splitlines()
131
+ new_content = "\n".join(lines) + ("\n" if content_str.endswith(('\n', '\r')) else "")
132
+ with open(self.start_sh_path, "w", encoding="utf-8", newline="\n") as f:
133
+ f.write(new_content)
134
+
124
135
  def inject(self) -> None:
125
- self.inject_deployment()
126
- self.inject_service_config()
127
- self.inject_ingress()
128
- self.inject_dockerfile()
129
- self.inject_pyproject_toml()
136
+ if self.metadata.inject.deployment:
137
+ self.inject_deployment()
138
+ if self.metadata.inject.service_config:
139
+ self.inject_service_config()
140
+ if self.metadata.inject.ingress:
141
+ self.inject_ingress()
142
+ if self.metadata.inject.dockerfile:
143
+ self.inject_dockerfile()
144
+ if self.metadata.inject.pyproject_toml:
145
+ self.inject_pyproject_toml()
146
+ self.clear_start_sh()
@@ -128,4 +128,4 @@ DEFAULT_PYPROJECT_TOML = """
128
128
 
129
129
  [tool.uv.sources]
130
130
  service-forge = { workspace = true }
131
- """
131
+ """
@@ -1,30 +1,29 @@
1
- from omegaconf import OmegaConf
1
+ from __future__ import annotations
2
+ import yaml
3
+ from pydantic import BaseModel
2
4
 
3
- class SfMetadata:
4
- def __init__(
5
- self,
6
- name: str,
7
- version: str,
8
- description: str,
9
- service_config: str,
10
- config_only: bool,
11
- env: list[dict],
12
- ) -> None:
13
- self.name = name
14
- self.version = version
15
- self.description = description
16
- self.service_config = service_config
17
- self.config_only = config_only
18
- self.env = env
5
+ class SfMetadataInject(BaseModel):
6
+ deployment: bool = True
7
+ service_config: bool = True
8
+ ingress: bool = True
9
+ dockerfile: bool = True
10
+ pyproject_toml: bool = True
11
+
12
+ class SfMetadata(BaseModel):
13
+ name: str
14
+ version: str
15
+ description: str
16
+ service_config: str
17
+ config_only: bool
18
+ env: list[dict]
19
+ inject: SfMetadataInject = SfMetadataInject()
20
+ enable_auth_middleware: bool = True
21
+
22
+ @classmethod
23
+ def from_yaml_file(cls, filepath: str) -> SfMetadata:
24
+ with open(filepath, 'r', encoding='utf-8') as f:
25
+ data = yaml.safe_load(f)
26
+ return cls(**data)
19
27
 
20
28
  def load_metadata(path: str) -> SfMetadata:
21
- with open(path, 'r') as file:
22
- data = OmegaConf.load(file)
23
- return SfMetadata(
24
- name=data.get('name'),
25
- version=data.get('version'),
26
- description=data.get('description'),
27
- service_config=data.get('service_config'),
28
- config_only=data.get('config_only'),
29
- env=data.get('env', []),
30
- )
29
+ return SfMetadata.from_yaml_file(path)
@@ -2,7 +2,6 @@ import typer
2
2
  from pathlib import Path
3
3
  from typing import Callable, TypeVar, Any
4
4
  from service_forge.sft.util.logger import log_error, log_info
5
- from service_forge.sft.config.sf_metadata import load_metadata, SfMetadata
6
5
 
7
6
  T = TypeVar('T')
8
7
 
@@ -2,8 +2,8 @@ from ..utils.type_converter import TypeConverter
2
2
  from ..workflow.workflow import Workflow
3
3
  from ..api.http_api import fastapi_app
4
4
  from ..api.kafka_api import KafkaApp, kafka_app
5
- from fastapi import FastAPI
6
5
  from ..workflow.workflow_type import WorkflowType, workflow_type_register
6
+ from fastapi import FastAPI
7
7
 
8
8
  type_converter = TypeConverter()
9
9
  type_converter.register(str, Workflow, lambda s, node: node.sub_workflows.get_workflow(s))
@@ -1,6 +1,8 @@
1
1
  from typing import Any, Callable, Type, Dict, Tuple, Set, List
2
2
  from collections import deque
3
3
  import inspect
4
+ import traceback
5
+ from pydantic import BaseModel
4
6
  from typing_extensions import get_origin, get_args
5
7
 
6
8
  def is_type(value, dst_type):
@@ -57,6 +59,9 @@ class TypeConverter:
57
59
  except Exception:
58
60
  pass
59
61
 
62
+ if issubclass(dst_type, BaseModel) and isinstance(value, dict):
63
+ return dst_type(**value)
64
+
60
65
  path = self._find_path(src_type, dst_type)
61
66
  if not path:
62
67
  raise TypeError(f"No conversion path found from {src_type.__name__} to {dst_type.__name__}.")
@@ -69,6 +69,7 @@ def workflow_clone(self: Workflow, task_id: uuid.UUID, trigger_node: Trigger) ->
69
69
  callbacks=self.callbacks,
70
70
  task_id=task_id,
71
71
  real_trigger_node=trigger_node,
72
+ global_context=self.global_context,
72
73
  )
73
74
 
74
75
  for node in workflow.nodes:
@@ -10,6 +10,7 @@ from .context import Context
10
10
  from ..utils.register import Register
11
11
  from ..db.database import DatabaseManager, PostgresDatabase, MongoDatabase, RedisDatabase
12
12
  from ..utils.workflow_clone import node_clone
13
+ from .workflow_callback import CallbackEvent
13
14
 
14
15
  if TYPE_CHECKING:
15
16
  from .workflow import Workflow
@@ -62,6 +63,10 @@ class Node(ABC):
62
63
  def database_manager(self) -> DatabaseManager:
63
64
  return self.workflow.database_manager
64
65
 
66
+ @property
67
+ def global_context(self) -> Context:
68
+ return self.workflow.global_context
69
+
65
70
  def backup(self) -> None:
66
71
  # do NOT use deepcopy here
67
72
  # self.bak_context = deepcopy(self.context)
@@ -181,4 +186,7 @@ class Node(ABC):
181
186
  def _clone(self, context: Context) -> Node:
182
187
  return node_clone(self, context)
183
188
 
189
+ async def stream_output(self, data: Any) -> None:
190
+ await self.workflow.call_callbacks(CallbackEvent.ON_NODE_STREAM_OUTPUT, node=self, output=data)
191
+
184
192
  node_register = Register[Node]()
@@ -26,6 +26,7 @@ class FastAPITrigger(Trigger):
26
26
  DEFAULT_OUTPUT_PORTS = [
27
27
  Port("trigger", bool),
28
28
  Port("user_id", int),
29
+ Port("token", str),
29
30
  Port("data", Any),
30
31
  ]
31
32
 
@@ -72,6 +73,7 @@ class FastAPITrigger(Trigger):
72
73
  self.trigger_queue.put_nowait({
73
74
  "id": task_id,
74
75
  "user_id": getattr(request.state, "user_id", None),
76
+ "token": getattr(request.state, "auth_token", None),
75
77
  "data": converted_data,
76
78
  })
77
79
 
@@ -169,6 +171,7 @@ class FastAPITrigger(Trigger):
169
171
  try:
170
172
  trigger = await self.trigger_queue.get()
171
173
  self.prepare_output_edges(self.get_output_port_by_name('user_id'), trigger['user_id'])
174
+ self.prepare_output_edges(self.get_output_port_by_name('token'), trigger['token'])
172
175
  self.prepare_output_edges(self.get_output_port_by_name('data'), trigger['data'])
173
176
  yield self.trigger(trigger['id'])
174
177
  except Exception as e:
@@ -9,6 +9,7 @@ from fastapi import FastAPI, WebSocket, WebSocketDisconnect
9
9
  from service_forge.workflow.port import Port
10
10
  from google.protobuf.message import Message
11
11
  from google.protobuf.json_format import MessageToJson
12
+ from service_forge.api.http_api import authenticate_websocket
12
13
 
13
14
  class WebSocketAPITrigger(Trigger):
14
15
  DEFAULT_INPUT_PORTS = [
@@ -19,6 +20,8 @@ class WebSocketAPITrigger(Trigger):
19
20
 
20
21
  DEFAULT_OUTPUT_PORTS = [
21
22
  Port("trigger", bool),
23
+ Port("user_id", int),
24
+ Port("token", str),
22
25
  Port("data", Any),
23
26
  ]
24
27
 
@@ -36,6 +39,20 @@ class WebSocketAPITrigger(Trigger):
36
39
  )
37
40
  return result
38
41
 
42
+ async def send_message(
43
+ self,
44
+ websocket: WebSocket,
45
+ type: str,
46
+ task_id: uuid.UUID,
47
+ data: Any,
48
+ ):
49
+ message = {
50
+ "type": type,
51
+ "task_id": str(task_id),
52
+ "data": data
53
+ }
54
+ await websocket.send_text(json.dumps(message))
55
+
39
56
  async def handle_stream_output(
40
57
  self,
41
58
  websocket: WebSocket,
@@ -46,12 +63,7 @@ class WebSocketAPITrigger(Trigger):
46
63
  item = await self.stream_queues[task_id].get()
47
64
 
48
65
  if item.is_error:
49
- error_response = {
50
- "type": "stream_error",
51
- "task_id": str(task_id),
52
- "detail": str(item.result)
53
- }
54
- await websocket.send_text(json.dumps(error_response))
66
+ await self.send_message(websocket, "stream_error", task_id, str(item.result))
55
67
  break
56
68
 
57
69
  if item.is_end:
@@ -65,18 +77,9 @@ class WebSocketAPITrigger(Trigger):
65
77
  data = serialized
66
78
  else:
67
79
  data = serialized
68
-
69
- end_response = {
70
- "type": "stream_end",
71
- "task_id": str(task_id),
72
- "data": data
73
- }
80
+ await self.send_message(websocket, "stream_end", task_id, data)
74
81
  else:
75
- end_response = {
76
- "type": "stream_end",
77
- "task_id": str(task_id)
78
- }
79
- await websocket.send_text(json.dumps(end_response))
82
+ await self.send_message(websocket, "stream_end", task_id, None)
80
83
  break
81
84
 
82
85
  # Send stream data
@@ -89,23 +92,10 @@ class WebSocketAPITrigger(Trigger):
89
92
  else:
90
93
  data = serialized
91
94
 
92
- stream_response = {
93
- "type": "stream",
94
- "task_id": str(task_id),
95
- "data": data
96
- }
97
- await websocket.send_text(json.dumps(stream_response))
95
+ await self.send_message(websocket, "stream", task_id, data)
98
96
  except Exception as e:
99
97
  logger.error(f"Error handling stream output for task {task_id}: {e}")
100
- error_response = {
101
- "type": "stream_error",
102
- "task_id": str(task_id),
103
- "detail": str(e)
104
- }
105
- try:
106
- await websocket.send_text(json.dumps(error_response))
107
- except Exception:
108
- pass
98
+ await self.send_message(websocket, "stream_error", task_id, str(e))
109
99
  finally:
110
100
  if task_id in self.stream_queues:
111
101
  del self.stream_queues[task_id]
@@ -120,10 +110,13 @@ class WebSocketAPITrigger(Trigger):
120
110
  self.result_queues[task_id] = asyncio.Queue()
121
111
  self.stream_queues[task_id] = asyncio.Queue()
122
112
 
113
+ logger.info(f'user_id {getattr(websocket.state, "user_id", None)} token {getattr(websocket.state, "auth_token", None)}')
114
+
123
115
  if data_type is Any:
124
116
  converted_data = message_data
125
117
  else:
126
118
  try:
119
+ # TODO: message_data is Message, need to convert to dict
127
120
  converted_data = data_type(**message_data)
128
121
  except Exception as e:
129
122
  error_msg = {"error": f"Failed to convert data: {str(e)}"}
@@ -135,6 +128,8 @@ class WebSocketAPITrigger(Trigger):
135
128
 
136
129
  self.trigger_queue.put_nowait({
137
130
  "id": task_id,
131
+ "user_id": getattr(websocket.state, "user_id", None),
132
+ "token": getattr(websocket.state, "auth_token", None),
138
133
  "data": converted_data,
139
134
  })
140
135
 
@@ -142,20 +137,30 @@ class WebSocketAPITrigger(Trigger):
142
137
 
143
138
  def _setup_websocket(self, app: FastAPI, path: str, data_type: type) -> None:
144
139
  async def websocket_handler(websocket: WebSocket):
140
+ # Authenticate WebSocket connection before accepting
141
+ # Get trusted_domain from app.state if available
142
+ trusted_domain = getattr(app.state, "trusted_domain", "ring.shiweinan.com")
143
+ enable_auth = getattr(app.state, "enable_auth_middleware", True)
144
+
145
+ if enable_auth:
146
+ await authenticate_websocket(websocket, trusted_domain)
147
+ else:
148
+ # If auth is disabled, set default values
149
+ websocket.state.user_id = websocket.headers.get("X-User-ID", "0")
150
+ websocket.state.auth_token = websocket.headers.get("X-User-Token")
151
+
145
152
  await websocket.accept()
146
153
 
147
154
  try:
148
155
  while True:
149
- # Receive message from client
150
- data = await websocket.receive_text()
156
+ data = await websocket.receive()
151
157
  try:
152
- message = json.loads(data)
153
-
158
+ # message = json.loads(data)
154
159
  # Handle the message and trigger workflow
155
160
  await self.handle_websocket_message(
156
161
  websocket,
157
162
  data_type,
158
- message
163
+ data
159
164
  )
160
165
  except json.JSONDecodeError:
161
166
  error_msg = {"error": "Invalid JSON format"}
@@ -179,6 +184,8 @@ class WebSocketAPITrigger(Trigger):
179
184
  while True:
180
185
  try:
181
186
  trigger = await self.trigger_queue.get()
187
+ self.prepare_output_edges(self.get_output_port_by_name('user_id'), trigger['user_id'])
188
+ self.prepare_output_edges(self.get_output_port_by_name('token'), trigger['token'])
182
189
  self.prepare_output_edges(self.get_output_port_by_name('data'), trigger['data'])
183
190
  yield self.trigger(trigger['id'])
184
191
  except Exception as e:
@@ -13,6 +13,7 @@ from ..db.database import DatabaseManager
13
13
  from ..utils.workflow_clone import workflow_clone
14
14
  from .workflow_callback import WorkflowCallback, BuiltinWorkflowCallback, CallbackEvent
15
15
  from .workflow_config import WorkflowConfig
16
+ from .context import Context
16
17
 
17
18
  class Workflow:
18
19
  def __init__(
@@ -31,6 +32,9 @@ class Workflow:
31
32
  # for run
32
33
  task_id: uuid.UUID = None,
33
34
  real_trigger_node: Trigger = None,
35
+
36
+ # global variables
37
+ global_context: Context = None,
34
38
  ) -> None:
35
39
  self.id = id
36
40
  self.config = config
@@ -48,6 +52,7 @@ class Workflow:
48
52
  self.callbacks = callbacks
49
53
  self.task_id = task_id
50
54
  self.real_trigger_node = real_trigger_node
55
+ self.global_context = global_context
51
56
  self._validate()
52
57
 
53
58
  @property
@@ -74,6 +79,8 @@ class Workflow:
74
79
  await callback.on_workflow_start(*args, **kwargs)
75
80
  elif callback_type == CallbackEvent.ON_WORKFLOW_END:
76
81
  await callback.on_workflow_end(*args, **kwargs)
82
+ elif callback_type == CallbackEvent.ON_WORKFLOW_ERROR:
83
+ await callback.on_workflow_error(*args, **kwargs)
77
84
  elif callback_type == CallbackEvent.ON_NODE_START:
78
85
  await callback.on_node_start(*args, **kwargs)
79
86
  elif callback_type == CallbackEvent.ON_NODE_END:
@@ -117,7 +124,7 @@ class Workflow:
117
124
  raise ValueError("Multiple trigger nodes found in workflow.")
118
125
  return trigger_nodes[0]
119
126
 
120
- async def _run_node_with_callbacks(self, node: Node) -> None:
127
+ async def _run_node_with_callbacks(self, node: Node) -> bool:
121
128
  await self.call_callbacks(CallbackEvent.ON_NODE_START, node=node)
122
129
 
123
130
  try:
@@ -126,8 +133,13 @@ class Workflow:
126
133
  await self.handle_node_stream_output(node, result)
127
134
  elif asyncio.iscoroutine(result):
128
135
  await result
136
+ except Exception as e:
137
+ await self.call_callbacks(CallbackEvent.ON_WORKFLOW_ERROR, workflow=self, node=node, error=e)
138
+ logger.error(f"Error when running node {node.name}: {str(e)}, task_id: {self.task_id}")
139
+ return False
129
140
  finally:
130
141
  await self.call_callbacks(CallbackEvent.ON_NODE_END, node=node)
142
+ return True
131
143
 
132
144
  async def run_after_trigger(self) -> Any:
133
145
  logger.info(f"Running workflow: {self.name}")
@@ -138,30 +150,41 @@ class Workflow:
138
150
  for edge in self.get_trigger_node().output_edges:
139
151
  edge.end_port.trigger()
140
152
 
141
- try:
142
- for input_port in self.input_ports:
143
- if input_port.value is not None:
144
- input_port.port.node.fill_input(input_port.port, input_port.value)
145
-
146
- for node in self.nodes:
147
- for key in node.AUTO_FILL_INPUT_PORTS:
148
- if key[0] not in [edge.end_port.name for edge in node.input_edges]:
149
- node.fill_input_by_name(key[0], key[1])
150
-
151
- while self.ready_nodes:
152
- nodes = self.ready_nodes.copy()
153
- self.ready_nodes = []
154
-
155
- tasks = []
156
- for node in nodes:
157
- tasks.append(asyncio.create_task(self._run_node_with_callbacks(node)))
158
-
159
- await asyncio.gather(*tasks)
160
-
161
- except Exception as e:
162
- error_msg = f"Error in run_after_trigger: {str(e)}"
163
- logger.error(error_msg)
164
- raise e
153
+ for input_port in self.input_ports:
154
+ if input_port.value is not None:
155
+ input_port.port.node.fill_input(input_port.port, input_port.value)
156
+
157
+ for node in self.nodes:
158
+ for key in node.AUTO_FILL_INPUT_PORTS:
159
+ if key[0] not in [edge.end_port.name for edge in node.input_edges]:
160
+ node.fill_input_by_name(key[0], key[1])
161
+
162
+ while self.ready_nodes:
163
+ nodes = self.ready_nodes.copy()
164
+ self.ready_nodes = []
165
+
166
+ tasks = []
167
+ for node in nodes:
168
+ tasks.append(asyncio.create_task(self._run_node_with_callbacks(node)))
169
+
170
+ results = await asyncio.gather(*tasks, return_exceptions=True)
171
+
172
+ for i, result in enumerate(results):
173
+ if isinstance(result, Exception):
174
+ for task in tasks:
175
+ if not task.done():
176
+ task.cancel()
177
+ await asyncio.gather(*tasks, return_exceptions=True)
178
+ return
179
+ # raise result
180
+ elif result is False:
181
+ logger.error(f"Node execution failed, stopping workflow: {nodes[i].name}")
182
+ for task in tasks:
183
+ if not task.done():
184
+ task.cancel()
185
+ await asyncio.gather(*tasks, return_exceptions=True)
186
+ return
187
+ # raise RuntimeError(f"Workflow stopped due to node execution failure: {nodes[i].name}")
165
188
 
166
189
  if len(self.output_ports) > 0:
167
190
  if len(self.output_ports) == 1:
@@ -186,8 +209,11 @@ class Workflow:
186
209
  # TODO: clear new_workflow
187
210
 
188
211
  except Exception as e:
189
- error_msg = f"Error running workflow: {str(e)}, {traceback.format_exc()}"
190
- logger.error(error_msg)
212
+ await self.call_callbacks(CallbackEvent.ON_WORKFLOW_ERROR, workflow=self, node=None, error=e)
213
+ # error_msg = f"Error running workflow: {str(e)}, {traceback.format_exc()}"
214
+ # logger.error(error_msg)
215
+ # await self.call_callbacks(CallbackEvent.ON_WORKFLOW_END, workflow=self, node=None, error=e)
216
+ return
191
217
 
192
218
  async def run(self):
193
219
  tasks = []
@@ -31,7 +31,7 @@ class WorkflowCallback:
31
31
  pass
32
32
 
33
33
  @abstractmethod
34
- async def on_workflow_error(self, workflow: Workflow, error: Any) -> None:
34
+ async def on_workflow_error(self, workflow: Workflow, node: Node, error: Any) -> None:
35
35
  pass
36
36
 
37
37
  @abstractmethod
@@ -90,7 +90,7 @@ class BuiltinWorkflowCallback(WorkflowCallback):
90
90
  logger.error(f"发送 workflow_end 消息到 websocket 失败: {e}")
91
91
 
92
92
  @override
93
- async def on_workflow_error(self, workflow: Workflow, error: Any) -> None:
93
+ async def on_workflow_error(self, workflow: Workflow, node: Node | None, error: Any) -> None:
94
94
  workflow_result = WorkflowResult(result=error, is_end=False, is_error=True)
95
95
 
96
96
  if workflow.task_id in workflow.real_trigger_node.result_queues:
@@ -103,6 +103,7 @@ class BuiltinWorkflowCallback(WorkflowCallback):
103
103
  message = {
104
104
  "type": "workflow_error",
105
105
  "task_id": str(workflow.task_id),
106
+ "node": node.name if node else None,
106
107
  "error": self._serialize_result(error),
107
108
  "is_end": False,
108
109
  "is_error": True
@@ -71,6 +71,7 @@ def create_workflow(
71
71
  database_manager = database_manager,
72
72
  # TODO: max_concurrent_runs
73
73
  callbacks = [BuiltinWorkflowCallback()],
74
+ global_context = Context(variables={}),
74
75
  )
75
76
 
76
77
  nodes: dict[str, Node] = {}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: service-forge
3
- Version: 0.1.18
3
+ Version: 0.1.28
4
4
  Summary: Add your description here
5
5
  Author-email: euxcet <zcc.qwer@gmail.com>
6
6
  Requires-Python: >=3.11
@@ -1,18 +1,18 @@
1
1
  service_forge/current_service.py,sha256=0YKm7nQiXzUUAc1ToCcbG1QPJfOSNKcOHUpyJ4E3xrY,342
2
- service_forge/service.py,sha256=rZZ7-BXTTCrc6KqnONnKpg-bRs5JCfDTvMrl4K26vbE,12633
2
+ service_forge/service.py,sha256=lHsZraEH6Ze20QO5v6iW1MO8src8T8JKLFRBXtd0LD0,12933
3
3
  service_forge/service_config.py,sha256=zsTdCZ1peMAotjGEVypPos7d-gjwrYoB9x_12g95G4g,1242
4
4
  service_forge/api/deprecated_websocket_api.py,sha256=E36-fpUPxzMJ2YGlCPeqwRbryk2FMMbQD_pbb8k1FYI,3343
5
5
  service_forge/api/deprecated_websocket_manager.py,sha256=Xiwg3zwXRVi63sXmVH-TgbpL2XH_djyLeo96STm4cNM,16757
6
- service_forge/api/http_api.py,sha256=XnEQ45zuWQV1zrSL9vd3USUh47ymkVc4w_1SoFlNdl8,5648
6
+ service_forge/api/http_api.py,sha256=8vcN7oyxwTwu_w1f-Egh2XrFWsm-ci5_gk7KsQiP1iM,7673
7
7
  service_forge/api/http_api_doc.py,sha256=ASlxvsIiUzDcMhVoumRjt9CfEMbh0O1U4ZLC9eobLF8,20235
8
8
  service_forge/api/kafka_api.py,sha256=PInx2ZzKJRON7EaJFWroXkiOt_UeZY7WE6qK03gq4ak,4599
9
9
  service_forge/api/task_manager.py,sha256=9Lk-NV4cBnuv9b8V6GVLWJJ4MCiAwCp5TVAwmYgqXbs,5269
10
10
  service_forge/api/routers/feedback/feedback_router.py,sha256=JOJI6kaQYapg4__iA6Eo26_9su48p7R2Kpn422nbsxw,5640
11
11
  service_forge/api/routers/service/service_router.py,sha256=hGOT-ScnXR7agHp-F9OFGWiPFjG9f3gl7NBsnayW3JI,5088
12
12
  service_forge/api/routers/websocket/websocket_manager.py,sha256=j1AFqzXQhZZyaLQwhvZefXAS-zCOPzLcRMDEuusv6V0,3605
13
- service_forge/api/routers/websocket/websocket_router.py,sha256=V0B7eQP8toO94-WbTrGraadXD3qeZ9lnKFcxwx6kLgM,3777
13
+ service_forge/api/routers/websocket/websocket_router.py,sha256=sPDJriEpD2mqu4508cOaWK7u040sgOdaUFlyiBqCSgc,4447
14
14
  service_forge/db/__init__.py,sha256=EWLhH8bYsMOvRF_YXF6FgL3irKA6GZeLxSGvWDRM6f8,85
15
- service_forge/db/database.py,sha256=IdF7RV-bOFmPPu7d4sQFtsF3e8mZKwMXUkMi7HPEPnc,9329
15
+ service_forge/db/database.py,sha256=WKtZ0MoOnbMw54ohfs9zKsrOZ5_qenLvXkAV_Gr2WOs,10068
16
16
  service_forge/db/migrations/feedback_migration.py,sha256=-zQ71TsOlWmQPQo1NKSIu3C1T47v3cfD6IAQ5HE_ffk,4845
17
17
  service_forge/db/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
18
  service_forge/db/models/feedback.py,sha256=gltX3y-nNhXSR9z1cd82Vg-zwjF0JhnGbOvUapkcWKQ,1253
@@ -22,42 +22,44 @@ service_forge/model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
22
22
  service_forge/model/feedback.py,sha256=Is5tkplzYkjChGb67o-Qjtbu4kSspVuaKi4Aua_QdRo,1318
23
23
  service_forge/model/websocket.py,sha256=YIUCW32sbHIEFPHjk5FiDM_rDe2aVD6OpzBQul2R5IM,267
24
24
  service_forge/proto/foo_input.py,sha256=-POJZSIFrGdBGz7FqZZ03r5uztpc5Apin9A0Yxbk6YI,90
25
- service_forge/sft/cli.py,sha256=stB_YPhZ7gAQeOxIq03-tLyl5VfU-gnRacAT05GSMis,2904
25
+ service_forge/sft/cli.py,sha256=xcM6kiGPJeHEUqfJd4uFngVFpjfVkDxW4G_PbNRD9Xs,4265
26
26
  service_forge/sft/cmd/config_command.py,sha256=I9t2HG28S6lCXpExHyZUc47b_1yB3i51tCFVk5J6TTU,2382
27
27
  service_forge/sft/cmd/deploy_service.py,sha256=5IYbCVI0Nlry1KXBhm9leJmr2bzUEXrSY-2BympLR0c,4686
28
28
  service_forge/sft/cmd/list_tars.py,sha256=Z3zvu2JLb_wNbTwi5TZXL5cZ8PxYrKks9AxkOzoUd_Q,1380
29
+ service_forge/sft/cmd/remote_deploy.py,sha256=AStAlbqGD7XeZFhL0fx2j12YWP_MVbdURbO5ZENEMgc,6510
30
+ service_forge/sft/cmd/remote_list_tars.py,sha256=mx6hkNnu0ySMyBX2Qi6blKMj5xnNnrmXq3VD_nERlmw,4176
29
31
  service_forge/sft/cmd/service_command.py,sha256=69GMMN61KtuoEFuYzFJ74ivNt8RX8q0I6rbePfJfEwQ,5538
30
32
  service_forge/sft/cmd/upload_service.py,sha256=86PvvJSXCZKH4BU6rLytuc45grX-sRnQnOHCo9zUaPY,1232
31
- service_forge/sft/config/injector.py,sha256=El8U5USveKfC0-jhhqYgaevjp4R4fwW02oMvDn7Amyk,5762
32
- service_forge/sft/config/injector_default_files.py,sha256=aTMQ2Tla3wdpvdaD_5VP2X6oeZbI0X414FM9NbirnO4,2716
33
- service_forge/sft/config/sf_metadata.py,sha256=Y9akhSCgOd11-oqRs3LIs8FL9pvWNw6hyy57fuFcBhc,866
33
+ service_forge/sft/config/injector.py,sha256=V79AW1W_LyU-Hn2QgJlLyTt8tdI3J1t1jS3wRoXfbSo,6581
34
+ service_forge/sft/config/injector_default_files.py,sha256=f7mNJ5Y9yb4e9kjLn414WiQoZrOue9ok_hq_POG4I2o,2717
35
+ service_forge/sft/config/sf_metadata.py,sha256=RruOe3_6JdxLnzMbmdnOcncxwnmATR-1q3Cn8R9d5eE,782
34
36
  service_forge/sft/config/sft_config.py,sha256=MgurtgbcSmyXbGlVX3NG84KD4Hst1gZWHdF9a8zi-6U,7707
35
37
  service_forge/sft/file/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
36
38
  service_forge/sft/file/ignore_pattern.py,sha256=UrVmR83wOx51XHFcZDTPp15dGYcvMTE5W1m07-SvHpw,2521
37
39
  service_forge/sft/file/sft_file_manager.py,sha256=poIM77tZZg7vfwBdCsdQctBbCczVLQePdTwVINEABvE,4337
38
40
  service_forge/sft/kubernetes/kubernetes_manager.py,sha256=IF2_X9U-k5Dx7EZuGrJ9lZ85ltbilrrZDfsl8qFyTu4,11339
39
- service_forge/sft/util/assert_util.py,sha256=8HreVkOzs9_ClKiFqG4qsFn_yyDLo5uXYhYUPXlmDjM,828
41
+ service_forge/sft/util/assert_util.py,sha256=6XVTsXKxg92ww3heWzuMvTbybGuw1cmTqWqfiSbPAcY,753
40
42
  service_forge/sft/util/logger.py,sha256=0Hi74IoxshE-wBgvBa2EZPXYj37tTrUYwlOBd9UMMMs,502
41
43
  service_forge/sft/util/name_util.py,sha256=WSYHM6c7SZULXCFON7nmGqsvAPPs_wavd6QjCa4UbRQ,301
42
44
  service_forge/sft/util/yaml_utils.py,sha256=9OhJNQlzj_C1NeQoUZVF8qpDovrE7RDWtNXe-H7tuNA,1703
43
45
  service_forge/storage/__init__.py,sha256=8Jg4R9z2JHadheV1YrHtCsFxEL5aCl9n2dMQGHcJfvM,156
44
46
  service_forge/storage/feedback_storage.py,sha256=wnuNTmEzpnS7iisiU9MrEJIgVa2G_HysqICWk_PxzfU,9124
45
47
  service_forge/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
- service_forge/utils/default_type_converter.py,sha256=CuUZpMATdTwgcV1M3lbK64znwmEG85Zt3y_QGXr9tYQ,625
48
+ service_forge/utils/default_type_converter.py,sha256=KFWhlZJUrQc2e-Wm9-WfPUKp5UaI-fpVjzNLO6n37e8,625
47
49
  service_forge/utils/register.py,sha256=nxiGQBCX238FoZZhsDoDdBMv_2QzeIZpM367HPNfaqM,874
48
- service_forge/utils/type_converter.py,sha256=IRphYxyGA0ICwFvDMvqAnRnSUTpY2ZQXvTE5o99CKpo,3046
49
- service_forge/utils/workflow_clone.py,sha256=I3qNmJLbhAbKb-5VITls89pMmGcWNtI64IOTxN-PUwY,4549
50
+ service_forge/utils/type_converter.py,sha256=eGAAnqEr-va1PBwYOzuzzzExVJNtusn-yGRv7i9WjRY,3204
51
+ service_forge/utils/workflow_clone.py,sha256=K2Y4XXsGYQn4OTKcDYEa7UZHGXta_hztKW-pr4EYaDQ,4593
50
52
  service_forge/workflow/__init__.py,sha256=9oh4qPyA33RugrUYRAlLmFtmQPUN2wxruFQE3omTJF8,49
51
53
  service_forge/workflow/context.py,sha256=1PotSEN_l8Emd5p8_6mtXJngXGYd3NSbOs1EKHgvnlo,346
52
54
  service_forge/workflow/edge.py,sha256=88Ex-9_dHAGD38OHgiqP0DrfxK0FrhvDAxThR3ilUi4,627
53
- service_forge/workflow/node.py,sha256=jPzXuixmZBL6-_jjXikFZmierlu3SOBJOSdQxTyV0FY,7536
55
+ service_forge/workflow/node.py,sha256=hoO8TdfbB5inpu55YCwecnasi4RS-Bg9R8Sp-M5c2Ys,7841
54
56
  service_forge/workflow/port.py,sha256=JVj0JrnQeOWCsp7n48Cm03bfmO8r3V5oTSEsC-HTGPE,2967
55
57
  service_forge/workflow/trigger.py,sha256=2OqiHi0dFcoC8g5GDqVpVEpHKlmqtDADb74Z7PRzHlo,879
56
- service_forge/workflow/workflow.py,sha256=ykPZBN0sLrORnM83AErcLmFTdUjF5gvbNuMAnY_-p00,8332
57
- service_forge/workflow/workflow_callback.py,sha256=S__F7s-7l5LgkIXcZMcG68qCyc8NgdWQX81F0hKWL1U,5135
58
+ service_forge/workflow/workflow.py,sha256=YsA_Yeh5XjYQflztqKWoWBt9euEoHVOJMGMTquIoO04,9788
59
+ service_forge/workflow/workflow_callback.py,sha256=MJBG_DTQGCgqCjpnBhuSteZmOxitYRdtkxXlFCOh930,5219
58
60
  service_forge/workflow/workflow_config.py,sha256=Yih10b-utKIpaR-X-nfy7fPnmBNhRvlD8Bw2_mQ5lJI,1821
59
61
  service_forge/workflow/workflow_event.py,sha256=QG1VFJwUUF1bTKKPKvqBICnYxkBwpfYDEoAuxwQYhhE,371
60
- service_forge/workflow/workflow_factory.py,sha256=AV39dK2QmG1ed9Mm3BXGI3s-w-o3PuMnwU-Y6VyAVFk,9885
62
+ service_forge/workflow/workflow_factory.py,sha256=KfIxjdQhsRC0KYrEkAhqlx3oY6tABoulQGhBwBBXLq0,9933
61
63
  service_forge/workflow/workflow_group.py,sha256=c-Hcfb-nVKxCVddoEQNFV6nY4LVAku0iq7tTUef9fDM,2076
62
64
  service_forge/workflow/workflow_type.py,sha256=zRc-gL2LBE-gOgTUCU5-VDWeGUzuQahkHIg98ipEvQg,1629
63
65
  service_forge/workflow/nodes/__init__.py,sha256=AUOoFUAMgRwfLiHNkjnDnWToMSe2AeV5vJO3NCG1eLw,381
@@ -72,12 +74,12 @@ service_forge/workflow/nodes/test/if_console_input_node.py,sha256=CtKHkFqr8PN974
72
74
  service_forge/workflow/nodes/test/time_consuming_node.py,sha256=gB2qw2DdjRf82z1158u36nSnCHrheHaxscAzPRnXNyk,1813
73
75
  service_forge/workflow/triggers/__init__.py,sha256=iQ0WEYu6JgL191Y9XslMhZ7jS7JO8bL3SZ9YqIw5LCM,269
74
76
  service_forge/workflow/triggers/a2a_api_trigger.py,sha256=Oaw3vRLA8fWZUIQ-h33dYmojmjp4mwNF_0LHqQ_4mZQ,8583
75
- service_forge/workflow/triggers/fast_api_trigger.py,sha256=8BF0A8gdcKeiP3cyF_dF0T3MH7bXnnZRCa_h5hx9kQ4,7513
77
+ service_forge/workflow/triggers/fast_api_trigger.py,sha256=bAtnuNkUcB5rApXj7x3oBscdavUnDGTb7lE9OpmtauE,7705
76
78
  service_forge/workflow/triggers/kafka_api_trigger.py,sha256=Zv8J75Rmg1-xqxHwpBMBhsm_TWX8p3_rqldk2RVSwVc,1561
77
79
  service_forge/workflow/triggers/once_trigger.py,sha256=YmzSQBoKE-8liNFIoDCqi2UdqhHujizsXVDft81_8jA,572
78
80
  service_forge/workflow/triggers/period_trigger.py,sha256=JFX3yBjKqoRP55jiulaSG_SPO-zWLMcwEb1BwcKsWUM,767
79
- service_forge/workflow/triggers/websocket_api_trigger.py,sha256=DRVFVdR3NgUx8S6Rlv492_3xrmlNGDOcvUrz13jHQ7o,6902
80
- service_forge-0.1.18.dist-info/METADATA,sha256=dP-GKlnX6P32jvP-1oudgM_Rj760sCCf6fayQf9FvuY,2308
81
- service_forge-0.1.18.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
82
- service_forge-0.1.18.dist-info/entry_points.txt,sha256=WHntHW7GAyKQUEeMcMvHDZ7_xAb0-cZeAK4iJeu9lm8,51
83
- service_forge-0.1.18.dist-info/RECORD,,
81
+ service_forge/workflow/triggers/websocket_api_trigger.py,sha256=gmO3ie2ctXOpZ3LIMXpp8AQ9I2Bo3l2IBkvxH9zFh5s,7588
82
+ service_forge-0.1.28.dist-info/METADATA,sha256=8HUAOLGWFRFgOaztY6wsCa9kWD-v1aJlNYSAWBoBmGs,2308
83
+ service_forge-0.1.28.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
84
+ service_forge-0.1.28.dist-info/entry_points.txt,sha256=WHntHW7GAyKQUEeMcMvHDZ7_xAb0-cZeAK4iJeu9lm8,51
85
+ service_forge-0.1.28.dist-info/RECORD,,