service-forge 0.1.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of service-forge might be problematic. Click here for more details.
- service_forge/api/deprecated_websocket_api.py +86 -0
- service_forge/api/deprecated_websocket_manager.py +425 -0
- service_forge/api/http_api.py +152 -0
- service_forge/api/http_api_doc.py +455 -0
- service_forge/api/kafka_api.py +126 -0
- service_forge/api/routers/feedback/feedback_router.py +148 -0
- service_forge/api/routers/service/service_router.py +127 -0
- service_forge/api/routers/websocket/websocket_manager.py +83 -0
- service_forge/api/routers/websocket/websocket_router.py +78 -0
- service_forge/api/task_manager.py +141 -0
- service_forge/current_service.py +14 -0
- service_forge/db/__init__.py +1 -0
- service_forge/db/database.py +237 -0
- service_forge/db/migrations/feedback_migration.py +154 -0
- service_forge/db/models/__init__.py +0 -0
- service_forge/db/models/feedback.py +33 -0
- service_forge/llm/__init__.py +67 -0
- service_forge/llm/llm.py +56 -0
- service_forge/model/__init__.py +0 -0
- service_forge/model/feedback.py +30 -0
- service_forge/model/websocket.py +13 -0
- service_forge/proto/foo_input.py +5 -0
- service_forge/service.py +280 -0
- service_forge/service_config.py +44 -0
- service_forge/sft/cli.py +91 -0
- service_forge/sft/cmd/config_command.py +67 -0
- service_forge/sft/cmd/deploy_service.py +123 -0
- service_forge/sft/cmd/list_tars.py +41 -0
- service_forge/sft/cmd/service_command.py +149 -0
- service_forge/sft/cmd/upload_service.py +36 -0
- service_forge/sft/config/injector.py +129 -0
- service_forge/sft/config/injector_default_files.py +131 -0
- service_forge/sft/config/sf_metadata.py +30 -0
- service_forge/sft/config/sft_config.py +200 -0
- service_forge/sft/file/__init__.py +0 -0
- service_forge/sft/file/ignore_pattern.py +80 -0
- service_forge/sft/file/sft_file_manager.py +107 -0
- service_forge/sft/kubernetes/kubernetes_manager.py +257 -0
- service_forge/sft/util/assert_util.py +25 -0
- service_forge/sft/util/logger.py +16 -0
- service_forge/sft/util/name_util.py +8 -0
- service_forge/sft/util/yaml_utils.py +57 -0
- service_forge/storage/__init__.py +5 -0
- service_forge/storage/feedback_storage.py +245 -0
- service_forge/utils/__init__.py +0 -0
- service_forge/utils/default_type_converter.py +12 -0
- service_forge/utils/register.py +39 -0
- service_forge/utils/type_converter.py +99 -0
- service_forge/utils/workflow_clone.py +124 -0
- service_forge/workflow/__init__.py +1 -0
- service_forge/workflow/context.py +14 -0
- service_forge/workflow/edge.py +24 -0
- service_forge/workflow/node.py +184 -0
- service_forge/workflow/nodes/__init__.py +8 -0
- service_forge/workflow/nodes/control/if_node.py +29 -0
- service_forge/workflow/nodes/control/switch_node.py +28 -0
- service_forge/workflow/nodes/input/console_input_node.py +26 -0
- service_forge/workflow/nodes/llm/query_llm_node.py +41 -0
- service_forge/workflow/nodes/nested/workflow_node.py +28 -0
- service_forge/workflow/nodes/output/kafka_output_node.py +27 -0
- service_forge/workflow/nodes/output/print_node.py +29 -0
- service_forge/workflow/nodes/test/if_console_input_node.py +33 -0
- service_forge/workflow/nodes/test/time_consuming_node.py +62 -0
- service_forge/workflow/port.py +89 -0
- service_forge/workflow/trigger.py +28 -0
- service_forge/workflow/triggers/__init__.py +6 -0
- service_forge/workflow/triggers/a2a_api_trigger.py +257 -0
- service_forge/workflow/triggers/fast_api_trigger.py +201 -0
- service_forge/workflow/triggers/kafka_api_trigger.py +47 -0
- service_forge/workflow/triggers/once_trigger.py +23 -0
- service_forge/workflow/triggers/period_trigger.py +29 -0
- service_forge/workflow/triggers/websocket_api_trigger.py +189 -0
- service_forge/workflow/workflow.py +227 -0
- service_forge/workflow/workflow_callback.py +141 -0
- service_forge/workflow/workflow_config.py +66 -0
- service_forge/workflow/workflow_event.py +15 -0
- service_forge/workflow/workflow_factory.py +246 -0
- service_forge/workflow/workflow_group.py +51 -0
- service_forge/workflow/workflow_type.py +52 -0
- service_forge-0.1.18.dist-info/METADATA +98 -0
- service_forge-0.1.18.dist-info/RECORD +83 -0
- service_forge-0.1.18.dist-info/WHEEL +4 -0
- service_forge-0.1.18.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import redis
|
|
4
|
+
import pymongo
|
|
5
|
+
import psycopg2
|
|
6
|
+
from typing import AsyncGenerator
|
|
7
|
+
from loguru import logger
|
|
8
|
+
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine
|
|
9
|
+
from service_forge.service_config import ServiceConfig
|
|
10
|
+
|
|
11
|
+
class PostgresDatabase:
|
|
12
|
+
def __init__(
|
|
13
|
+
self,
|
|
14
|
+
name: str,
|
|
15
|
+
postgres_user: str,
|
|
16
|
+
postgres_password: str,
|
|
17
|
+
postgres_host: str,
|
|
18
|
+
postgres_port: int,
|
|
19
|
+
postgres_db: str,
|
|
20
|
+
) -> None:
|
|
21
|
+
self.name = name
|
|
22
|
+
self.postgres_user = postgres_user
|
|
23
|
+
self.postgres_password = postgres_password
|
|
24
|
+
self.postgres_host = postgres_host
|
|
25
|
+
self.postgres_port = postgres_port
|
|
26
|
+
self.postgres_db = postgres_db
|
|
27
|
+
self.engine = None
|
|
28
|
+
self.session_factory = None
|
|
29
|
+
self.test_connection()
|
|
30
|
+
|
|
31
|
+
@property
|
|
32
|
+
def database_url(self) -> str:
|
|
33
|
+
return f"postgresql+asyncpg://{self.postgres_user}:{self.postgres_password}@{self.postgres_host}:{self.postgres_port}/{self.postgres_db}"
|
|
34
|
+
|
|
35
|
+
@property
|
|
36
|
+
def database_base_url(self) -> str:
|
|
37
|
+
return f"postgresql+asyncpg://{self.postgres_user}:{self.postgres_password}@{self.postgres_host}:{self.postgres_port}/postgres"
|
|
38
|
+
|
|
39
|
+
async def init(self) -> None:
|
|
40
|
+
if self.engine is None:
|
|
41
|
+
self.engine = await self.create_engine()
|
|
42
|
+
self.session_factory = async_sessionmaker(bind=self.engine, class_=AsyncSession, expire_on_commit=False)
|
|
43
|
+
|
|
44
|
+
async def close(self) -> None:
|
|
45
|
+
if self.engine:
|
|
46
|
+
await self.engine.dispose()
|
|
47
|
+
self.engine = None
|
|
48
|
+
self.session_factory = None
|
|
49
|
+
logger.info("Database connection closed")
|
|
50
|
+
|
|
51
|
+
async def create_engine(self) -> AsyncEngine:
|
|
52
|
+
if not all([self.postgres_user, self.postgres_host, self.postgres_port, self.postgres_db]):
|
|
53
|
+
raise ValueError("Missing required database configuration. Please check your .env file or configuration.")
|
|
54
|
+
logger.info(f"Creating database engine: {self.database_url}")
|
|
55
|
+
return create_async_engine(self.database_url)
|
|
56
|
+
|
|
57
|
+
async def get_async_session(self) -> AsyncGenerator[AsyncSession, None]:
|
|
58
|
+
if self.session_factory is None:
|
|
59
|
+
await self.init()
|
|
60
|
+
|
|
61
|
+
if self.session_factory is None:
|
|
62
|
+
raise RuntimeError("Session factory is not initialized")
|
|
63
|
+
|
|
64
|
+
async with self.session_factory() as session:
|
|
65
|
+
try:
|
|
66
|
+
yield session
|
|
67
|
+
except Exception:
|
|
68
|
+
await session.rollback()
|
|
69
|
+
raise
|
|
70
|
+
finally:
|
|
71
|
+
await session.close()
|
|
72
|
+
yield session
|
|
73
|
+
|
|
74
|
+
async def get_session_factory(self) -> async_sessionmaker[AsyncSession]:
|
|
75
|
+
if self.engine is None:
|
|
76
|
+
await self.init()
|
|
77
|
+
|
|
78
|
+
if self.session_factory is None:
|
|
79
|
+
raise RuntimeError("Session factory is not initialized")
|
|
80
|
+
|
|
81
|
+
return self.session_factory
|
|
82
|
+
|
|
83
|
+
def test_connection(self) -> bool:
|
|
84
|
+
try:
|
|
85
|
+
conn = psycopg2.connect(
|
|
86
|
+
host=self.postgres_host,
|
|
87
|
+
port=self.postgres_port,
|
|
88
|
+
user=self.postgres_user,
|
|
89
|
+
password=self.postgres_password,
|
|
90
|
+
database=self.postgres_db,
|
|
91
|
+
connect_timeout=5
|
|
92
|
+
)
|
|
93
|
+
conn.close()
|
|
94
|
+
logger.info(f"PostgreSQL connection test successful for database '{self.name}'")
|
|
95
|
+
return True
|
|
96
|
+
except Exception as e:
|
|
97
|
+
logger.warning(f"PostgreSQL connection test failed for database '{self.name}': {e}")
|
|
98
|
+
return False
|
|
99
|
+
|
|
100
|
+
class MongoDatabase:
|
|
101
|
+
def __init__(
|
|
102
|
+
self,
|
|
103
|
+
name: str,
|
|
104
|
+
mongo_host: str,
|
|
105
|
+
mongo_port: int,
|
|
106
|
+
mongo_user: str,
|
|
107
|
+
mongo_password: str,
|
|
108
|
+
mongo_db: str,
|
|
109
|
+
) -> None:
|
|
110
|
+
self.name = name
|
|
111
|
+
self.mongo_host = mongo_host
|
|
112
|
+
self.mongo_port = mongo_port
|
|
113
|
+
self.mongo_user = mongo_user
|
|
114
|
+
self.mongo_password = mongo_password
|
|
115
|
+
self.mongo_db = mongo_db or ""
|
|
116
|
+
self.client = pymongo.MongoClient(self.database_url)
|
|
117
|
+
self.test_connection()
|
|
118
|
+
|
|
119
|
+
@property
|
|
120
|
+
def database_url(self) -> str:
|
|
121
|
+
return f"mongodb://{self.mongo_user}:{self.mongo_password}@{self.mongo_host}:{self.mongo_port}/{self.mongo_db}"
|
|
122
|
+
|
|
123
|
+
def test_connection(self) -> bool:
|
|
124
|
+
try:
|
|
125
|
+
self.client.admin.command('ping')
|
|
126
|
+
logger.info(f"MongoDB connection test successful for database '{self.name}'")
|
|
127
|
+
return True
|
|
128
|
+
except Exception as e:
|
|
129
|
+
logger.error(f"MongoDB connection test failed for database '{self.name}': {e}")
|
|
130
|
+
return False
|
|
131
|
+
|
|
132
|
+
class RedisDatabase:
|
|
133
|
+
def __init__(
|
|
134
|
+
self,
|
|
135
|
+
name: str,
|
|
136
|
+
redis_host: str,
|
|
137
|
+
redis_port: int,
|
|
138
|
+
redis_password: str,
|
|
139
|
+
) -> None:
|
|
140
|
+
self.name = name
|
|
141
|
+
self.redis_host = redis_host
|
|
142
|
+
self.redis_port = redis_port
|
|
143
|
+
self.redis_password = redis_password
|
|
144
|
+
self.client = redis.Redis(host=redis_host, port=redis_port, password=redis_password)
|
|
145
|
+
self.test_connection()
|
|
146
|
+
|
|
147
|
+
def test_connection(self) -> bool:
|
|
148
|
+
try:
|
|
149
|
+
self.client.ping()
|
|
150
|
+
logger.info(f"Redis connection test successful for database '{self.name}'")
|
|
151
|
+
return True
|
|
152
|
+
except Exception as e:
|
|
153
|
+
logger.error(f"Redis connection test failed for database '{self.name}': {e}")
|
|
154
|
+
return False
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
class DatabaseManager:
|
|
158
|
+
def __init__(
|
|
159
|
+
self,
|
|
160
|
+
postgres_databases: list[PostgresDatabase],
|
|
161
|
+
mongo_databases: list[MongoDatabase],
|
|
162
|
+
redis_databases: list[RedisDatabase],
|
|
163
|
+
) -> None:
|
|
164
|
+
self.postgres_databases = postgres_databases
|
|
165
|
+
self.mongo_databases = mongo_databases
|
|
166
|
+
self.redis_databases = redis_databases
|
|
167
|
+
|
|
168
|
+
def get_database(self, name: str) -> PostgresDatabase | MongoDatabase | RedisDatabase | None:
|
|
169
|
+
for database in self.postgres_databases:
|
|
170
|
+
if database.name == name:
|
|
171
|
+
return database
|
|
172
|
+
return None
|
|
173
|
+
|
|
174
|
+
def get_default_postgres_database(self) -> PostgresDatabase | None:
|
|
175
|
+
if len(self.postgres_databases) > 0:
|
|
176
|
+
return self.postgres_databases[0]
|
|
177
|
+
return None
|
|
178
|
+
|
|
179
|
+
def get_default_mongo_database(self) -> MongoDatabase | None:
|
|
180
|
+
if len(self.mongo_databases) > 0:
|
|
181
|
+
return self.mongo_databases[0]
|
|
182
|
+
return None
|
|
183
|
+
|
|
184
|
+
def get_default_redis_database(self) -> RedisDatabase | None:
|
|
185
|
+
if len(self.redis_databases) > 0:
|
|
186
|
+
return self.redis_databases[0]
|
|
187
|
+
return None
|
|
188
|
+
|
|
189
|
+
@staticmethod
|
|
190
|
+
def from_config(config_path: str = None, config: ServiceConfig = None) -> DatabaseManager:
|
|
191
|
+
if config is None:
|
|
192
|
+
config = ServiceConfig.from_yaml_file(config_path)
|
|
193
|
+
|
|
194
|
+
postgres_databases = []
|
|
195
|
+
mongo_databases = []
|
|
196
|
+
redis_databases = []
|
|
197
|
+
|
|
198
|
+
databases_config = config.databases
|
|
199
|
+
|
|
200
|
+
if databases_config is not None:
|
|
201
|
+
for database_config in databases_config:
|
|
202
|
+
if all([database_config.postgres_host is None, database_config.mongo_host is None, database_config.redis_host is None]):
|
|
203
|
+
raise ValueError(f"Database '{database_config.name}' is missing required configuration. Please check your service.yaml file.")
|
|
204
|
+
|
|
205
|
+
if (database_config.postgres_host is not None) + (database_config.mongo_host is not None) + (database_config.redis_host is not None) > 1:
|
|
206
|
+
raise ValueError(f"Database '{database_config['name']}' has multiple host configurations. Please check your service.yaml file.")
|
|
207
|
+
|
|
208
|
+
if database_config.postgres_host is not None:
|
|
209
|
+
postgres_databases.append(PostgresDatabase(
|
|
210
|
+
name=database_config.name,
|
|
211
|
+
postgres_user=database_config.postgres_user,
|
|
212
|
+
postgres_password=database_config.postgres_password,
|
|
213
|
+
postgres_host=database_config.postgres_host,
|
|
214
|
+
postgres_port=database_config.postgres_port,
|
|
215
|
+
postgres_db=database_config.postgres_db,
|
|
216
|
+
))
|
|
217
|
+
elif database_config.mongo_host is not None:
|
|
218
|
+
mongo_databases.append(MongoDatabase(
|
|
219
|
+
name=database_config.name,
|
|
220
|
+
mongo_host=database_config.mongo_host,
|
|
221
|
+
mongo_port=database_config.mongo_port,
|
|
222
|
+
mongo_user=database_config.mongo_user,
|
|
223
|
+
mongo_password=database_config.mongo_password,
|
|
224
|
+
mongo_db=database_config.mongo_db,
|
|
225
|
+
))
|
|
226
|
+
elif database_config.redis_host is not None:
|
|
227
|
+
redis_databases.append(RedisDatabase(
|
|
228
|
+
name=database_config.name,
|
|
229
|
+
redis_host=database_config.redis_host,
|
|
230
|
+
redis_port=database_config.redis_port,
|
|
231
|
+
redis_password=database_config.redis_password,
|
|
232
|
+
))
|
|
233
|
+
|
|
234
|
+
return DatabaseManager(postgres_databases=postgres_databases, mongo_databases=mongo_databases, redis_databases=redis_databases)
|
|
235
|
+
|
|
236
|
+
def create_database_manager(config_path: str = None, config = None) -> DatabaseManager:
|
|
237
|
+
return DatabaseManager.from_config(config_path, config)
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
"""
|
|
2
|
+
数据库迁移脚本 - 创建 feedback 表
|
|
3
|
+
|
|
4
|
+
使用方法:
|
|
5
|
+
1. 确保你的 service.yaml 中配置了数据库连接
|
|
6
|
+
2. 运行此脚本: python -m src.service_forge.db.migrations.feedback_migration
|
|
7
|
+
|
|
8
|
+
此脚本会:
|
|
9
|
+
- 检查 feedback 表是否存在
|
|
10
|
+
- 如果不存在,创建 feedback 表及索引
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import asyncio
|
|
14
|
+
import sys
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
|
|
17
|
+
# 添加项目根目录到 Python 路径
|
|
18
|
+
project_root = Path(__file__).parent.parent.parent.parent
|
|
19
|
+
sys.path.insert(0, str(project_root))
|
|
20
|
+
|
|
21
|
+
from loguru import logger
|
|
22
|
+
from sqlalchemy import text
|
|
23
|
+
from src.service_forge.db.database import DatabaseManager
|
|
24
|
+
from src.service_forge.db.models.feedback import Base, FeedbackBase
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
async def create_database_if_not_exists(db):
|
|
28
|
+
"""如果数据库不存在,则创建数据库"""
|
|
29
|
+
from sqlalchemy import create_engine
|
|
30
|
+
from sqlalchemy.exc import OperationalError
|
|
31
|
+
import asyncpg
|
|
32
|
+
|
|
33
|
+
try:
|
|
34
|
+
# 先尝试连接目标数据库
|
|
35
|
+
await db.init()
|
|
36
|
+
logger.info(f"✓ 数据库 '{db.postgres_db}' 已存在")
|
|
37
|
+
await db.close()
|
|
38
|
+
return True
|
|
39
|
+
except Exception as e:
|
|
40
|
+
logger.info(f"数据库 '{db.postgres_db}' 不存在,准备创建...")
|
|
41
|
+
|
|
42
|
+
try:
|
|
43
|
+
# 连接到 postgres 默认数据库
|
|
44
|
+
conn = await asyncpg.connect(
|
|
45
|
+
host=db.postgres_host,
|
|
46
|
+
port=db.postgres_port,
|
|
47
|
+
user=db.postgres_user,
|
|
48
|
+
password=db.postgres_password,
|
|
49
|
+
database='postgres'
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
# 创建数据库
|
|
53
|
+
await conn.execute(f'CREATE DATABASE {db.postgres_db}')
|
|
54
|
+
await conn.close()
|
|
55
|
+
|
|
56
|
+
logger.info(f"✓ 数据库 '{db.postgres_db}' 创建成功!")
|
|
57
|
+
return True
|
|
58
|
+
|
|
59
|
+
except Exception as create_error:
|
|
60
|
+
logger.error(f"创建数据库失败: {create_error}")
|
|
61
|
+
return False
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
async def create_feedback_table(database_manager: DatabaseManager):
|
|
65
|
+
"""创建 feedback 表"""
|
|
66
|
+
db = database_manager.get_default_postgres_database()
|
|
67
|
+
|
|
68
|
+
if db is None:
|
|
69
|
+
logger.error("未找到默认 PostgreSQL 数据库配置")
|
|
70
|
+
return False
|
|
71
|
+
|
|
72
|
+
try:
|
|
73
|
+
# 1. 先确保数据库存在
|
|
74
|
+
db_created = await create_database_if_not_exists(db)
|
|
75
|
+
if not db_created:
|
|
76
|
+
return False
|
|
77
|
+
|
|
78
|
+
# 2. 连接到数据库
|
|
79
|
+
await db.init()
|
|
80
|
+
engine = db.engine
|
|
81
|
+
|
|
82
|
+
logger.info(f"连接到数据库: {db.database_url}")
|
|
83
|
+
|
|
84
|
+
# 3. 检查表是否存在
|
|
85
|
+
async with engine.begin() as conn:
|
|
86
|
+
result = await conn.execute(
|
|
87
|
+
text(
|
|
88
|
+
"SELECT EXISTS (SELECT FROM information_schema.tables "
|
|
89
|
+
"WHERE table_schema = 'public' AND table_name = 'feedback');"
|
|
90
|
+
)
|
|
91
|
+
)
|
|
92
|
+
table_exists = result.scalar()
|
|
93
|
+
|
|
94
|
+
if table_exists:
|
|
95
|
+
logger.info("✓ feedback 表已存在,跳过创建")
|
|
96
|
+
return True
|
|
97
|
+
|
|
98
|
+
logger.info("创建 feedback 表...")
|
|
99
|
+
|
|
100
|
+
# 4. 创建表
|
|
101
|
+
await conn.run_sync(Base.metadata.create_all)
|
|
102
|
+
|
|
103
|
+
logger.info("✓ feedback 表创建成功!")
|
|
104
|
+
logger.info("表结构:")
|
|
105
|
+
logger.info(" - feedback_id: UUID (主键)")
|
|
106
|
+
logger.info(" - task_id: VARCHAR(255) (索引)")
|
|
107
|
+
logger.info(" - workflow_name: VARCHAR(255) (索引)")
|
|
108
|
+
logger.info(" - rating: INTEGER (可选, 1-5)")
|
|
109
|
+
logger.info(" - comment: TEXT (可选)")
|
|
110
|
+
logger.info(" - metadata: JSONB (可选)")
|
|
111
|
+
logger.info(" - created_at: TIMESTAMP (索引)")
|
|
112
|
+
logger.info(" - updated_at: TIMESTAMP (可选)")
|
|
113
|
+
|
|
114
|
+
await db.close()
|
|
115
|
+
return True
|
|
116
|
+
|
|
117
|
+
except Exception as e:
|
|
118
|
+
logger.error(f"创建 feedback 表失败: {e}")
|
|
119
|
+
return False
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
async def main():
|
|
123
|
+
"""主函数"""
|
|
124
|
+
logger.info("=== Feedback 表迁移脚本 ===")
|
|
125
|
+
|
|
126
|
+
# 提示用户提供配置文件路径
|
|
127
|
+
if len(sys.argv) > 1:
|
|
128
|
+
config_path = sys.argv[1]
|
|
129
|
+
else:
|
|
130
|
+
logger.info("请提供 service 配置文件路径:")
|
|
131
|
+
logger.info(" python -m src.service_forge.db.migrations.feedback_migration <config_path>")
|
|
132
|
+
logger.info("例如:")
|
|
133
|
+
logger.info(" python -m src.service_forge.db.migrations.feedback_migration configs/service/my_service.yaml")
|
|
134
|
+
return
|
|
135
|
+
|
|
136
|
+
logger.info(f"读取配置文件: {config_path}")
|
|
137
|
+
|
|
138
|
+
try:
|
|
139
|
+
database_manager = DatabaseManager.from_config(config_path=config_path)
|
|
140
|
+
success = await create_feedback_table(database_manager)
|
|
141
|
+
|
|
142
|
+
if success:
|
|
143
|
+
logger.info("✓ 迁移完成!")
|
|
144
|
+
else:
|
|
145
|
+
logger.error("✗ 迁移失败")
|
|
146
|
+
sys.exit(1)
|
|
147
|
+
|
|
148
|
+
except Exception as e:
|
|
149
|
+
logger.error(f"迁移过程出错: {e}")
|
|
150
|
+
sys.exit(1)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
if __name__ == "__main__":
|
|
154
|
+
asyncio.run(main())
|
|
File without changes
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from sqlalchemy import Column, String, Integer, DateTime, Text
|
|
3
|
+
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
|
4
|
+
from sqlalchemy.ext.declarative import declarative_base
|
|
5
|
+
import uuid
|
|
6
|
+
|
|
7
|
+
Base = declarative_base()
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class FeedbackBase(Base):
|
|
11
|
+
"""反馈数据表模型"""
|
|
12
|
+
__tablename__ = "feedback"
|
|
13
|
+
|
|
14
|
+
feedback_id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
15
|
+
task_id = Column(String(255), nullable=False, index=True)
|
|
16
|
+
workflow_name = Column(String(255), nullable=False, index=True)
|
|
17
|
+
rating = Column(Integer, nullable=True)
|
|
18
|
+
comment = Column(Text, nullable=True)
|
|
19
|
+
extra_metadata = Column("metadata", JSONB, nullable=True, default={})
|
|
20
|
+
created_at = Column(DateTime, nullable=False, default=datetime.now, index=True)
|
|
21
|
+
updated_at = Column(DateTime, nullable=True, onupdate=datetime.now)
|
|
22
|
+
|
|
23
|
+
def to_dict(self):
|
|
24
|
+
"""转换为字典格式"""
|
|
25
|
+
return {
|
|
26
|
+
"feedback_id": str(self.feedback_id),
|
|
27
|
+
"task_id": self.task_id,
|
|
28
|
+
"workflow_name": self.workflow_name,
|
|
29
|
+
"rating": self.rating,
|
|
30
|
+
"comment": self.comment,
|
|
31
|
+
"metadata": self.extra_metadata or {},
|
|
32
|
+
"created_at": self.created_at,
|
|
33
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from .llm import LLM
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Iterator
|
|
5
|
+
|
|
6
|
+
_llm_dicts = {}
|
|
7
|
+
|
|
8
|
+
class Model(Enum):
|
|
9
|
+
GPT_4_1_NANO = "gpt-4.1-nano"
|
|
10
|
+
QWEN_TURBO_LATEST = "qwen-turbo-latest"
|
|
11
|
+
QWEN_PLUS_LATEST = "qwen-plus-latest"
|
|
12
|
+
QWEN_MAX_LATEST = "qwen-max-latest"
|
|
13
|
+
DOUBO_SEED_1_6_250615 = "doubao-seed-1-6-250615"
|
|
14
|
+
DOUBO_SEED_1_6_THINKING_250615 = "doubao-seed-1-6-thinking-250615"
|
|
15
|
+
DOUBO_SEED_1_6_FLASH_250615 = "doubao-seed-1-6-flash-250615"
|
|
16
|
+
DEEPSEEK_V3_250324 = "deepseek-v3-250324"
|
|
17
|
+
AZURE_GPT_4O_MINI = "azure-gpt-4o-mini"
|
|
18
|
+
GEMINI = "gemini-2.5-flash"
|
|
19
|
+
|
|
20
|
+
def provider(self) -> str:
|
|
21
|
+
if self.value.startswith("gpt"):
|
|
22
|
+
return "openai"
|
|
23
|
+
elif self.value.startswith("qwen"):
|
|
24
|
+
return "dashscope"
|
|
25
|
+
elif self.value.startswith("doubao"):
|
|
26
|
+
return "doubao"
|
|
27
|
+
elif self.value.startswith("deepseek"):
|
|
28
|
+
return "deepseek"
|
|
29
|
+
elif self.value.startswith("azure"):
|
|
30
|
+
return "azure"
|
|
31
|
+
elif self.value.startswith("gemini"):
|
|
32
|
+
return "gemini"
|
|
33
|
+
raise ValueError(f"Invalid model: {self.value}")
|
|
34
|
+
|
|
35
|
+
def get_model(model: str) -> Model:
|
|
36
|
+
if model in Model.__members__:
|
|
37
|
+
return Model[model]
|
|
38
|
+
|
|
39
|
+
model = model.upper().replace("-", "_")
|
|
40
|
+
if model in Model.__members__:
|
|
41
|
+
return Model[model]
|
|
42
|
+
|
|
43
|
+
raise ValueError(f"Invalid model: {model}")
|
|
44
|
+
|
|
45
|
+
def get_llm(provider: str) -> LLM:
|
|
46
|
+
if provider not in _llm_dicts:
|
|
47
|
+
if provider == "openai":
|
|
48
|
+
_llm_dicts[provider] = LLM(os.environ.get("OPENAI_API_KEY", ""), os.environ.get("OPENAI_BASE_URL", ""), int(os.environ.get("OPENAI_TIMEOUT", 2000)))
|
|
49
|
+
elif provider == "doubao":
|
|
50
|
+
_llm_dicts[provider] = LLM(os.environ.get("DOUBAO_API_KEY", ""), os.environ.get("DOUBAO_BASE_URL", ""), int(os.environ.get("DOUBAO_TIMEOUT", 2000)))
|
|
51
|
+
elif provider == "dashscope":
|
|
52
|
+
_llm_dicts[provider] = LLM(os.environ.get("DASHSCOPE_API_KEY", ""), os.environ.get("DASHSCOPE_BASE_URL", ""), int(os.environ.get("DASHSCOPE_TIMEOUT", 2000)))
|
|
53
|
+
elif provider == "deepseek":
|
|
54
|
+
_llm_dicts[provider] = LLM(os.environ.get("DEEPSEEK_API_KEY", ""), os.environ.get("DEEPSEEK_BASE_URL", ""), int(os.environ.get("DEEPSEEK_TIMEOUT", 2000)))
|
|
55
|
+
elif provider == "azure":
|
|
56
|
+
_llm_dicts[provider] = LLM(os.environ.get("AZURE_API_KEY", ""), os.environ.get("AZURE_BASE_URL", ""), int(os.environ.get("AZURE_TIMEOUT", 2000)), os.environ.get("AZURE_API_VERSION", ""))
|
|
57
|
+
elif provider == "gemini":
|
|
58
|
+
_llm_dicts[provider] = LLM(os.environ.get("GEMINI_API_KEY", ""), os.environ.get("GEMINI_BASE_URL", ""), int(os.environ.get("GEMINI_TIMEOUT", 2000)))
|
|
59
|
+
else:
|
|
60
|
+
raise ValueError(f"Invalid provider: {provider}")
|
|
61
|
+
return _llm_dicts[provider]
|
|
62
|
+
|
|
63
|
+
def chat(input: str, system_prompt: str, model: Model, temperature: float) -> str:
|
|
64
|
+
return get_llm(model.provider()).chat(input, system_prompt, model.value, temperature)
|
|
65
|
+
|
|
66
|
+
def chat_stream(input: str, system_prompt: str, model: Model, temperature: float) -> Iterator[str]:
|
|
67
|
+
return get_llm(model.provider()).chat_stream(input, system_prompt, model.value, temperature)
|
service_forge/llm/llm.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import random
|
|
2
|
+
from openai import OpenAI
|
|
3
|
+
from openai import AzureOpenAI
|
|
4
|
+
from typing import Iterator
|
|
5
|
+
|
|
6
|
+
class LLM():
|
|
7
|
+
def __init__(self, api_key: str, base_url: str, timeout: int, api_version: str | None = None):
|
|
8
|
+
if api_version is not None:
|
|
9
|
+
self.client = AzureOpenAI(
|
|
10
|
+
api_key=api_key,
|
|
11
|
+
azure_endpoint=base_url,
|
|
12
|
+
timeout=timeout,
|
|
13
|
+
api_version=api_version,
|
|
14
|
+
)
|
|
15
|
+
else:
|
|
16
|
+
self.client = OpenAI(
|
|
17
|
+
api_key=api_key,
|
|
18
|
+
base_url=base_url,
|
|
19
|
+
timeout=timeout,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
def chat(self, input: str, system_prompt: str, model: str, temperature: float) -> str:
|
|
23
|
+
if model.startswith("azure"):
|
|
24
|
+
model = model.replace("azure-", "")
|
|
25
|
+
|
|
26
|
+
response = self.client.chat.completions.create(
|
|
27
|
+
model=model,
|
|
28
|
+
messages=[
|
|
29
|
+
{"role": "system", "content": system_prompt},
|
|
30
|
+
{"role": "user", "content": input},
|
|
31
|
+
],
|
|
32
|
+
temperature=temperature,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
if response.choices[0].message.content is None:
|
|
36
|
+
return "Error"
|
|
37
|
+
else:
|
|
38
|
+
return response.choices[0].message.content
|
|
39
|
+
|
|
40
|
+
def chat_stream(self, input: str, system_prompt: str, model: str, temperature: float) -> Iterator[str]:
|
|
41
|
+
if model.startswith("azure"):
|
|
42
|
+
model = model.replace("azure-", "")
|
|
43
|
+
|
|
44
|
+
stream = self.client.chat.completions.create(
|
|
45
|
+
model=model,
|
|
46
|
+
messages=[
|
|
47
|
+
{"role": "system", "content": system_prompt},
|
|
48
|
+
{"role": "user", "content": input},
|
|
49
|
+
],
|
|
50
|
+
temperature=temperature,
|
|
51
|
+
stream=True,
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
for chunk in stream:
|
|
55
|
+
if chunk.choices[0].delta.content is not None:
|
|
56
|
+
yield chunk.choices[0].delta.content
|
|
File without changes
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from pydantic import BaseModel, Field
|
|
2
|
+
from typing import Optional, Any
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from uuid import UUID
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class FeedbackCreate(BaseModel):
|
|
8
|
+
"""创建反馈的请求模型"""
|
|
9
|
+
task_id: str = Field(..., description="工作流任务ID")
|
|
10
|
+
workflow_name: str = Field(..., description="工作流名称")
|
|
11
|
+
rating: Optional[int] = Field(None, ge=1, le=5, description="评分 (1-5)")
|
|
12
|
+
comment: Optional[str] = Field(None, description="用户评论")
|
|
13
|
+
metadata: Optional[dict[str, Any]] = Field(default_factory=dict, description="额外的元数据")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class FeedbackResponse(BaseModel):
|
|
17
|
+
"""反馈响应模型"""
|
|
18
|
+
feedback_id: str = Field(..., description="反馈ID")
|
|
19
|
+
task_id: str = Field(..., description="工作流任务ID")
|
|
20
|
+
workflow_name: str = Field(..., description="工作流名称")
|
|
21
|
+
rating: Optional[int] = Field(None, description="评分")
|
|
22
|
+
comment: Optional[str] = Field(None, description="用户评论")
|
|
23
|
+
metadata: dict[str, Any] = Field(default_factory=dict, description="元数据")
|
|
24
|
+
created_at: datetime = Field(..., description="创建时间")
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class FeedbackListResponse(BaseModel):
|
|
28
|
+
"""反馈列表响应模型"""
|
|
29
|
+
total: int = Field(..., description="总数")
|
|
30
|
+
feedbacks: list[FeedbackResponse] = Field(..., description="反馈列表")
|