service-forge 0.1.18__py3-none-any.whl → 0.1.39__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of service-forge might be problematic. Click here for more details.

Files changed (80) hide show
  1. service_forge/__init__.py +0 -0
  2. service_forge/api/deprecated_websocket_api.py +91 -33
  3. service_forge/api/deprecated_websocket_manager.py +70 -53
  4. service_forge/api/http_api.py +205 -55
  5. service_forge/api/kafka_api.py +113 -25
  6. service_forge/api/routers/meta_api/meta_api_router.py +57 -0
  7. service_forge/api/routers/service/service_router.py +42 -6
  8. service_forge/api/routers/trace/trace_router.py +326 -0
  9. service_forge/api/routers/websocket/websocket_router.py +69 -1
  10. service_forge/api/service_studio.py +9 -0
  11. service_forge/db/database.py +17 -0
  12. service_forge/execution_context.py +106 -0
  13. service_forge/frontend/static/assets/CreateNewNodeDialog-DkrEMxSH.js +1 -0
  14. service_forge/frontend/static/assets/CreateNewNodeDialog-DwFcBiGp.css +1 -0
  15. service_forge/frontend/static/assets/EditorSidePanel-BNVms9Fq.css +1 -0
  16. service_forge/frontend/static/assets/EditorSidePanel-DZbB3ILL.js +1 -0
  17. service_forge/frontend/static/assets/FeedbackPanel-CC8HX7Yo.js +1 -0
  18. service_forge/frontend/static/assets/FeedbackPanel-ClgniIVk.css +1 -0
  19. service_forge/frontend/static/assets/FormattedCodeViewer.vue_vue_type_script_setup_true_lang-BNuI1NCs.js +1 -0
  20. service_forge/frontend/static/assets/NodeDetailWrapper-BqFFM7-r.js +1 -0
  21. service_forge/frontend/static/assets/NodeDetailWrapper-pZBxv3J0.css +1 -0
  22. service_forge/frontend/static/assets/TestRunningDialog-D0GrCoYs.js +1 -0
  23. service_forge/frontend/static/assets/TestRunningDialog-dhXOsPgH.css +1 -0
  24. service_forge/frontend/static/assets/TracePanelWrapper-B9zvDSc_.js +1 -0
  25. service_forge/frontend/static/assets/TracePanelWrapper-BiednCrq.css +1 -0
  26. service_forge/frontend/static/assets/WorkflowEditor-CcaGGbko.js +3 -0
  27. service_forge/frontend/static/assets/WorkflowEditor-CmasOOYK.css +1 -0
  28. service_forge/frontend/static/assets/WorkflowList-Copuwi-a.css +1 -0
  29. service_forge/frontend/static/assets/WorkflowList-LrRJ7B7h.js +1 -0
  30. service_forge/frontend/static/assets/WorkflowStudio-CthjgII2.css +1 -0
  31. service_forge/frontend/static/assets/WorkflowStudio-FCyhGD4y.js +2 -0
  32. service_forge/frontend/static/assets/api-BDer3rj7.css +1 -0
  33. service_forge/frontend/static/assets/api-DyiqpKJK.js +1 -0
  34. service_forge/frontend/static/assets/code-editor-DBSql_sc.js +12 -0
  35. service_forge/frontend/static/assets/el-collapse-item-D4LG0FJ0.css +1 -0
  36. service_forge/frontend/static/assets/el-empty-D4ZqTl4F.css +1 -0
  37. service_forge/frontend/static/assets/el-form-item-BWkJzdQ_.css +1 -0
  38. service_forge/frontend/static/assets/el-input-D6B3r8CH.css +1 -0
  39. service_forge/frontend/static/assets/el-select-B0XIb2QK.css +1 -0
  40. service_forge/frontend/static/assets/el-tag-DljBBxJR.css +1 -0
  41. service_forge/frontend/static/assets/element-ui-D3x2y3TA.js +12 -0
  42. service_forge/frontend/static/assets/elkjs-Dm5QV7uy.js +24 -0
  43. service_forge/frontend/static/assets/highlightjs-D4ATuRwX.js +3 -0
  44. service_forge/frontend/static/assets/index-BMvodlwc.js +2 -0
  45. service_forge/frontend/static/assets/index-CjSe8i2q.css +1 -0
  46. service_forge/frontend/static/assets/js-yaml-yTPt38rv.js +32 -0
  47. service_forge/frontend/static/assets/time-DKCKV6Ug.js +1 -0
  48. service_forge/frontend/static/assets/ui-components-DQ7-U3pr.js +1 -0
  49. service_forge/frontend/static/assets/vue-core-DL-LgTX0.js +1 -0
  50. service_forge/frontend/static/assets/vue-flow-Dn7R8GPr.js +39 -0
  51. service_forge/frontend/static/index.html +16 -0
  52. service_forge/frontend/static/vite.svg +1 -0
  53. service_forge/model/meta_api/__init__.py +0 -0
  54. service_forge/model/meta_api/schema.py +29 -0
  55. service_forge/model/trace.py +82 -0
  56. service_forge/service.py +39 -11
  57. service_forge/service_config.py +14 -0
  58. service_forge/sft/cli.py +39 -0
  59. service_forge/sft/cmd/remote_deploy.py +160 -0
  60. service_forge/sft/cmd/remote_list_tars.py +111 -0
  61. service_forge/sft/config/injector.py +54 -7
  62. service_forge/sft/config/injector_default_files.py +13 -1
  63. service_forge/sft/config/sf_metadata.py +31 -27
  64. service_forge/sft/config/sft_config.py +18 -0
  65. service_forge/sft/util/assert_util.py +0 -1
  66. service_forge/telemetry.py +66 -0
  67. service_forge/utils/default_type_converter.py +1 -1
  68. service_forge/utils/type_converter.py +5 -0
  69. service_forge/utils/workflow_clone.py +1 -0
  70. service_forge/workflow/node.py +274 -27
  71. service_forge/workflow/triggers/fast_api_trigger.py +64 -28
  72. service_forge/workflow/triggers/websocket_api_trigger.py +66 -38
  73. service_forge/workflow/workflow.py +140 -37
  74. service_forge/workflow/workflow_callback.py +27 -4
  75. service_forge/workflow/workflow_factory.py +14 -0
  76. {service_forge-0.1.18.dist-info → service_forge-0.1.39.dist-info}/METADATA +4 -1
  77. service_forge-0.1.39.dist-info/RECORD +134 -0
  78. service_forge-0.1.18.dist-info/RECORD +0 -83
  79. {service_forge-0.1.18.dist-info → service_forge-0.1.39.dist-info}/WHEEL +0 -0
  80. {service_forge-0.1.18.dist-info → service_forge-0.1.39.dist-info}/entry_points.txt +0 -0
@@ -1,10 +1,12 @@
1
+ import json
1
2
  import os
2
3
  import uuid
3
4
  import tempfile
4
5
  from fastapi import APIRouter, HTTPException, UploadFile, File, Form
6
+ from fastapi.params import Body, Path
5
7
  from fastapi.responses import JSONResponse
6
8
  from loguru import logger
7
- from typing import Optional, TYPE_CHECKING
9
+ from typing import Optional, TYPE_CHECKING, Dict, Any
8
10
  from pydantic import BaseModel
9
11
  from omegaconf import OmegaConf
10
12
  from service_forge.current_service import get_service
@@ -18,18 +20,32 @@ class WorkflowStatusResponse(BaseModel):
18
20
  workflows: list[dict]
19
21
 
20
22
  class WorkflowActionResponse(BaseModel):
21
- workflow_id: str
23
+ workflow_id: Optional[str] = None
22
24
  success: bool
23
25
  message: str
26
+ task_id: Optional[str] = None
24
27
 
25
28
  @service_router.get("/status", response_model=WorkflowStatusResponse)
26
29
  async def get_service_status():
27
30
  service = get_service()
28
31
  if service is None:
29
32
  raise HTTPException(status_code=503, detail="Service not initialized")
30
-
33
+ # 排除调试版本
34
+ try:
35
+ status = service.get_service_status(exclude_debug=True)
36
+ return status
37
+ except Exception as e:
38
+ logger.error(f"Error getting service status: {e}")
39
+ raise HTTPException(status_code=500, detail=str(e))
40
+
41
+ @service_router.get("/workflow/{workflow_id}/status", response_model=dict)
42
+ def get_workflow_data(workflow_id: str):
43
+ service = get_service()
44
+ if service is None:
45
+ raise HTTPException(status_code=503, detail="Service not initialized")
46
+
31
47
  try:
32
- status = service.get_service_status()
48
+ status = service.get_workflow_status(workflow_id)
33
49
  return status
34
50
  except Exception as e:
35
51
  logger.error(f"Error getting service status: {e}")
@@ -42,7 +58,7 @@ async def start_workflow(workflow_id: str):
42
58
  raise HTTPException(status_code=503, detail="Service not initialized")
43
59
 
44
60
  try:
45
- success = service.start_workflow_by_id(uuid.UUID(workflow_id))
61
+ success = service.start_workflow_by_id(workflow_id)
46
62
  if success:
47
63
  return WorkflowActionResponse(success=True, message=f"Workflow {workflow_id} started successfully")
48
64
  else:
@@ -58,7 +74,7 @@ async def stop_workflow(workflow_id: str):
58
74
  raise HTTPException(status_code=503, detail="Service not initialized")
59
75
 
60
76
  try:
61
- success = await service.stop_workflow_by_id(uuid.UUID(workflow_id))
77
+ success = await service.stop_workflow_by_id(workflow_id)
62
78
  if success:
63
79
  return WorkflowActionResponse(success=True, message=f"Workflow {workflow_id} stopped successfully")
64
80
  else:
@@ -67,6 +83,26 @@ async def stop_workflow(workflow_id: str):
67
83
  logger.error(f"Error stopping workflow {workflow_id}: {e}")
68
84
  raise HTTPException(status_code=500, detail=str(e))
69
85
 
86
+ class TriggerWorkflowRequest(BaseModel):
87
+ kwargs: Optional[Dict[str, Any]] = {}
88
+
89
+ @service_router.post("/workflow/{workflow_id}/trigger", response_model=WorkflowActionResponse)
90
+ async def trigger_workflow(workflow_id: str = Path(...), request_body: TriggerWorkflowRequest = Body(...)):
91
+ service = get_service()
92
+ if service is None:
93
+ raise HTTPException(status_code=503, detail="Service not initialized")
94
+
95
+ try:
96
+ task_id = service.trigger_workflow_by_id(workflow_id, "", None, **request_body.kwargs) # Trigger Name实际上没有使用
97
+ if task_id is not None:
98
+ return WorkflowActionResponse(workflow_id=workflow_id, task_id=str(task_id), success=True, message=f"Workflow {workflow_id} triggered successfully with task_id {task_id}")
99
+ else:
100
+ return WorkflowActionResponse(workflow_id=workflow_id, success=False, message=f"Failed to trigger workflow {workflow_id}")
101
+ except Exception as e:
102
+ logger.error(f"Error triggering workflow {workflow_id}: {e}")
103
+ raise HTTPException(status_code=500, detail=str(e))
104
+
105
+
70
106
  @service_router.post("/workflow/upload", response_model=WorkflowActionResponse)
71
107
  async def upload_workflow_config(
72
108
  file: Optional[UploadFile] = File(None),
@@ -0,0 +1,326 @@
1
+ from datetime import datetime, timezone
2
+
3
+ from fastapi import APIRouter, HTTPException
4
+ from httpx import AsyncClient, HTTPStatusError
5
+ from loguru import logger
6
+
7
+ from service_forge.current_service import get_service
8
+ from service_forge.model.trace import (
9
+ GetTraceListParams, GetTraceListResponse,
10
+ GetTraceDetailParams, GetTraceDetailResponse, TraceListItem, TraceDetail, Span, StatusCode, SpanKind
11
+ )
12
+
13
+ trace_router = APIRouter(prefix="/sdk/trace", tags=["trace"])
14
+
15
+
16
+ def _get_signoz_api_base():
17
+ _service = get_service()
18
+ if not _service:
19
+ return None
20
+ if not _service.config.signoz:
21
+ return None
22
+ return _service.config.signoz.api_url
23
+
24
+ def _get_signoz_api_key():
25
+ _service = get_service()
26
+ if not _service:
27
+ return None
28
+ if not _service.config.signoz:
29
+ return None
30
+ return _service.config.signoz.api_key
31
+
32
+ @trace_router.post("/list", response_model=GetTraceListResponse)
33
+ async def get_trace_list(params: GetTraceListParams):
34
+ """
35
+ 获取跟踪列表,调用外部SigNoz API
36
+ """
37
+ _service = get_service()
38
+ internal_service_name = _service.name if _service else ""
39
+
40
+ # 构建SQL查询语句
41
+ where_clauses = []
42
+
43
+ # 添加服务名过滤
44
+ if params.service_name:
45
+ where_clauses.append(f"resource_string_service$$name = '{params.service_name}'")
46
+ elif internal_service_name:
47
+ where_clauses.append(f"resource_string_service$$name = '{internal_service_name}'")
48
+
49
+ # 添加工作流名过滤
50
+ if params.workflow_name:
51
+ where_clauses.append(f"attributes_string['workflow.name'] = '{params.workflow_name}'")
52
+
53
+ # 添加工作流任务ID过滤
54
+ if params.workflow_task_id:
55
+ where_clauses.append(f"attributes_string['workflow.task_id'] = '{params.workflow_task_id}'")
56
+
57
+ # 添加错误状态过滤
58
+ if params.has_error is not None:
59
+ where_clauses.append(f"has_error = {1 if params.has_error else 0}")
60
+
61
+ # 组合WHERE子句
62
+ where_part = ""
63
+ if where_clauses:
64
+ where_part = "WHERE " + " AND ".join(where_clauses)
65
+
66
+ # 构建完整的查询
67
+ # 注意:我们需要按trace_id分组,并计算每个trace的span_count
68
+ # 同时获取最新的span信息(最大timestamp)
69
+ query = f"""
70
+ SELECT
71
+ trace_id,
72
+ anyIf(resource_string_service$$name, resource_string_service$$name != '' AND resource_string_service$$name IS NOT NULL) as service_name,
73
+ anyIf(attributes_string['workflow.name'], attributes_string['workflow.name'] != '' AND attributes_string['workflow.name'] IS NOT NULL) as workflow_name,
74
+ anyIf(attributes_string['workflow.task_id'], attributes_string['workflow.task_id'] != '' AND attributes_string['workflow.task_id'] IS NOT NULL) as workflow_task_id,
75
+ max(timestamp) as timestamp,
76
+ sum(duration_nano) as duration_nano,
77
+ count() as span_count,
78
+ max(has_error) as has_error,
79
+ max(status_code) as status_code
80
+ FROM signoz_traces.distributed_signoz_index_v3
81
+ {where_part}
82
+ GROUP BY trace_id
83
+ ORDER BY timestamp DESC
84
+ LIMIT {params.limit or 100}
85
+ OFFSET {params.offset or 0}
86
+ """
87
+
88
+ # 同时计算总数
89
+ count_query = f"""
90
+ SELECT COUNT(DISTINCT trace_id) as total
91
+ FROM signoz_traces.distributed_signoz_index_v3
92
+ {where_part}
93
+ """
94
+
95
+ async with AsyncClient() as client:
96
+ try:
97
+ current_time = datetime.now(timezone.utc)
98
+ # 调用外部API
99
+ response = await client.post(
100
+ f"{_get_signoz_api_base()}/api/v5/query_range",
101
+ headers={
102
+ "SIGNOZ-API-KEY": _get_signoz_api_key()
103
+ },
104
+ json={
105
+ "start": int(params.start_time or 0),
106
+ "end": int(params.end_time or (current_time.timestamp() * 1000)),
107
+ "requestType": "raw",
108
+ "compositeQuery": {
109
+ "queries": [
110
+ {
111
+ "type": "clickhouse_sql",
112
+ "spec": {
113
+ "name": "query_1",
114
+ "query": query,
115
+ "disabled": False
116
+ }
117
+ },
118
+ {
119
+ "type": "clickhouse_sql",
120
+ "spec": {
121
+ "name": "query_2",
122
+ "query": count_query,
123
+ "disabled": False
124
+ }
125
+ }
126
+ ]
127
+ }
128
+
129
+ }
130
+ )
131
+
132
+ response.raise_for_status() # 检查HTTP状态码
133
+
134
+ # 假设外部API返回的数据格式与我们的响应模型一致
135
+ # 如果不一致,需要在这里进行数据转换
136
+ data = response.json()
137
+
138
+ query_results = data["data"]["data"]["results"]
139
+ query_1_result = None
140
+ query_2_result = None
141
+ for result in query_results:
142
+ if result["queryName"] == "query_1":
143
+ query_1_result = result
144
+ elif result["queryName"] == "query_2":
145
+ query_2_result = result
146
+
147
+ traces = [
148
+ TraceListItem(
149
+ trace_id=item["data"].get("trace_id", ""),
150
+ service_name=item["data"].get("service_name", params.service_name or internal_service_name),
151
+ workflow_name=item["data"].get("workflow_name", params.workflow_name or ""),
152
+ workflow_task_id=item["data"].get("workflow_task_id", params.workflow_task_id or ""),
153
+ timestamp=item["data"].get("timestamp", current_time.isoformat().replace("+00:00", "Z")),
154
+ duration_nano=item["data"].get("duration_nano", 0),
155
+ span_count=item["data"].get("span_count", 0),
156
+ has_error=item["data"].get("has_error", False),
157
+ status_code=item["data"].get("status_code", StatusCode.OK),
158
+ )
159
+ for item in query_1_result["rows"] or []
160
+ ]
161
+
162
+ # 转换为内部响应模型并返回
163
+ return GetTraceListResponse(
164
+ traces=traces,
165
+ total=query_2_result["rows"][0]["data"].get("total", len(traces)),
166
+ limit=params.limit or 100,
167
+ offset=params.offset or 0,
168
+ )
169
+
170
+ except HTTPStatusError as e:
171
+ # 处理HTTP错误
172
+ raise HTTPException(status_code=e.response.status_code, detail=str(e))
173
+ except Exception as e:
174
+ # 处理其他错误
175
+ logger.error(e)
176
+ raise HTTPException(status_code=500, detail=f"API处理失败: {str(e)}")
177
+
178
+
179
+ @trace_router.post("/detail", response_model=GetTraceDetailResponse)
180
+ async def get_trace_detail(params: GetTraceDetailParams):
181
+ # 构建SQL查询语句
182
+ where_clauses = [f"trace_id = '{params.trace_id}'"]
183
+
184
+ # 添加服务名过滤
185
+ if params.service_name:
186
+ where_clauses.append(f"resource_string_service$$name = '{params.service_name}'")
187
+
188
+ # 组合WHERE子句
189
+ where_part = "WHERE " + " AND ".join(where_clauses)
190
+
191
+ # 构建完整的查询获取所有span信息
192
+ span_query = f"""
193
+ SELECT
194
+ trace_id,
195
+ span_id,
196
+ parent_span_id,
197
+ name,
198
+ kind,
199
+ timestamp,
200
+ duration_nano,
201
+ status_code,
202
+ has_error,
203
+ resource_string_service$$name as service_name,
204
+ attributes_string as attributes_string
205
+ FROM signoz_traces.distributed_signoz_index_v3
206
+ {where_part}
207
+ ORDER BY timestamp ASC
208
+ """
209
+
210
+ count_query = f"""
211
+ SELECT
212
+ count(DISTINCT span_id) as total
213
+ FROM signoz_traces.distributed_signoz_index_v3
214
+ {where_part}
215
+ """
216
+
217
+ async with AsyncClient() as client:
218
+ try:
219
+ current_time = datetime.now(timezone.utc)
220
+ # 调用外部API获取span数据
221
+ response = await client.post(
222
+ f"{_get_signoz_api_base()}/api/v5/query_range",
223
+ headers={
224
+ "SIGNOZ-API-KEY": _get_signoz_api_key()
225
+ },
226
+ json={
227
+ "start": 0, # 获取完整trace,不限制时间范围
228
+ "end": int(current_time.timestamp() * 1000),
229
+ "requestType": "raw",
230
+ "compositeQuery": {
231
+ "queries": [
232
+ {
233
+ "type": "clickhouse_sql",
234
+ "spec": {
235
+ "name": "query_1",
236
+ "query": span_query,
237
+ "disabled": False
238
+ }
239
+ },
240
+ {
241
+ "type": "clickhouse_sql",
242
+ "spec": {
243
+ "name": "query_2",
244
+ "query": count_query,
245
+ "disabled": False
246
+ }
247
+ },
248
+ ]
249
+ }
250
+ }
251
+ )
252
+ response.raise_for_status() # 检查HTTP状态码
253
+
254
+ # 解析响应数据
255
+ data = response.json()
256
+
257
+ query_results = data["data"]["data"]["results"]
258
+ query_1_result = None
259
+ query_2_result = None
260
+ for result in query_results:
261
+ if result["queryName"] == "query_1":
262
+ query_1_result = result
263
+ elif result["queryName"] == "query_2":
264
+ query_2_result = result
265
+
266
+ _service = get_service()
267
+ internal_service_name = _service.name if _service else ""
268
+
269
+ spans = [
270
+ Span(
271
+ span_id=item["data"].get("span_id", ""),
272
+ parent_span_id=item["data"].get("parent_span_id", ""),
273
+ name=item["data"].get("name", ""),
274
+ kind=item["data"].get("kind", SpanKind.UNKNOWN),
275
+ timestamp=item["data"].get("timestamp", current_time.isoformat().replace("+00:00", "Z")),
276
+ duration_nano=item["data"].get("duration_nano", 0),
277
+ status_code=item["data"].get("status_code", StatusCode.OK),
278
+ service_name=item["data"].get("attributes_string", {}).get("service.name", params.service_name or internal_service_name),
279
+ workflow_name=item["data"].get("attributes_string", {}).get("workflow.name", ""),
280
+ workflow_task_id=item["data"].get("attributes_string", {}).get("workflow.task_id", ""),
281
+ node_name=item["data"].get("attributes_string", {}).get("node.name", ""),
282
+ attributes=item["data"].get("attributes_string", {}),
283
+ )
284
+ for item in query_1_result["rows"] or []
285
+ ]
286
+
287
+ has_error = False
288
+ for row in (query_1_result["rows"] or []):
289
+ if row["data"].get("has_error", False):
290
+ has_error = True
291
+ break
292
+
293
+ workflow_name = ""
294
+ for span in spans:
295
+ if span.workflow_name:
296
+ workflow_name = span.workflow_name
297
+ break
298
+
299
+ workflow_task_id = ""
300
+ for span in spans:
301
+ if span.workflow_task_id:
302
+ workflow_task_id = span.workflow_task_id
303
+ break
304
+
305
+ return GetTraceDetailResponse(
306
+ trace=TraceDetail(
307
+ trace_id=params.trace_id,
308
+ service_name=params.service_name or "", # 注入当前服务名
309
+ spans=spans,
310
+ start_time=spans[0].timestamp if spans else None,
311
+ end_time=spans[-1].timestamp if spans else None,
312
+ span_count=query_2_result["rows"][0]["data"].get("total", len(spans)),
313
+ has_error=has_error,
314
+ workflow_name=workflow_name,
315
+ workflow_task_id=workflow_task_id,
316
+ )
317
+ )
318
+
319
+
320
+
321
+ except HTTPStatusError as e:
322
+ # 处理HTTP错误
323
+ raise HTTPException(status_code=e.response.status_code, detail=str(e))
324
+ except Exception as e:
325
+ # 处理其他错误
326
+ raise HTTPException(status_code=500, detail=f"API处理失败: {str(e)}")
@@ -1,14 +1,33 @@
1
- from fastapi import WebSocket, WebSocketDisconnect
1
+ from http.client import HTTPException
2
+
3
+ from fastapi import WebSocket, WebSocketDisconnect, Form
2
4
  from fastapi.routing import APIRouter
3
5
  from loguru import logger
4
6
  import json
5
7
  import uuid
8
+
9
+ from omegaconf import OmegaConf
10
+
11
+ from service_forge.current_service import get_service
6
12
  from .websocket_manager import websocket_manager
7
13
 
8
14
  websocket_router = APIRouter()
9
15
 
10
16
  @websocket_router.websocket("/sdk/ws")
11
17
  async def sdk_websocket_endpoint(websocket: WebSocket):
18
+ # Authenticate WebSocket connection before accepting
19
+ # Get trusted_domain from app.state if available
20
+ # trusted_domain = getattr(websocket.app.state, "trusted_domain", "ring.shiweinan.com")
21
+ # enable_auth = getattr(websocket.app.state, "enable_auth_middleware", True)
22
+
23
+ # if enable_auth:
24
+ # from service_forge.api.http_api import authenticate_websocket
25
+ # await authenticate_websocket(websocket, trusted_domain)
26
+ # else:
27
+ # # If auth is disabled, set default values
28
+ # websocket.state.user_id = websocket.headers.get("X-User-ID", "0")
29
+ # websocket.state.auth_token = websocket.headers.get("X-User-Token")
30
+
12
31
  await websocket.accept()
13
32
  try:
14
33
  while True:
@@ -61,6 +80,10 @@ async def sdk_websocket_endpoint(websocket: WebSocket):
61
80
  await websocket.send_text(
62
81
  json.dumps({"error": "Invalid task_id format"})
63
82
  )
83
+ # TODO: 支持调试模式下取消订阅后删除debug版本
84
+ # 调式模式下订阅(客户端无需实现提供task_id,而是提供工作流配置,并由服务端创建debug版本、触发debug版本、登记订阅并返回task_id和workflow_id)
85
+ elif message_type == "subscribe_debug":
86
+ await handle_subscribe_debug_message(websocket, message)
64
87
  else:
65
88
  await websocket.send_text(
66
89
  json.dumps({"error": f"Unknown message type: {message_type}"})
@@ -76,3 +99,48 @@ async def sdk_websocket_endpoint(websocket: WebSocket):
76
99
  logger.error(f"SDK WebSocket连接处理异常: {e}")
77
100
  await websocket_manager.disconnect(websocket)
78
101
 
102
+
103
+ async def handle_subscribe_debug_message(websocket: WebSocket, message: dict):
104
+ # 解析debug版本的工作流配置
105
+ try:
106
+ workflow_config_str = message.get("workflow_config")
107
+ config = OmegaConf.to_object(OmegaConf.create(workflow_config_str))
108
+ except Exception as e:
109
+ await websocket.send_text(
110
+ json.dumps({"error": f"Failed to parse workflow config: {str(e)}"})
111
+ )
112
+ return
113
+
114
+ # 加载debug版本的工作流
115
+ try:
116
+ service = get_service()
117
+ workflow_id = await service.load_workflow_from_config(config=config, debug_version=True)
118
+ except Exception as e:
119
+ await websocket.send_text(
120
+ json.dumps({"error": f"Failed to load workflow of the debug version: {str(e)}"})
121
+ )
122
+ return
123
+
124
+ # 先指定task_id并订阅
125
+ task_id = uuid.uuid4()
126
+ try:
127
+ success = await websocket_manager.subscribe(websocket, task_id)
128
+ response = {"success": success, "type": "subscribe_response", "task_id": str(task_id), "workflow_id": str(workflow_id)}
129
+ await websocket.send_text(json.dumps(response))
130
+ except Exception as e:
131
+ await websocket.send_text(
132
+ json.dumps({"error": f"Failed to subscribe the debug task: {str(e)}"})
133
+ )
134
+ return
135
+
136
+ # 触发debug版本的工作流
137
+ try:
138
+ trigger_args = message.get("trigger_args", {})
139
+ task_id = service.trigger_workflow_by_id(str(workflow_id), "", assigned_task_id=task_id, **trigger_args)
140
+ response = {"success": True, "type": "trigger_response", "task_id": str(task_id), "workflow_id": str(workflow_id)}
141
+ await websocket.send_text(json.dumps(response))
142
+ except Exception as e:
143
+ await websocket.send_text(
144
+ json.dumps({"error": f"Failed to trigger workflow of the debug version: {str(e)}"})
145
+ )
146
+ return
@@ -0,0 +1,9 @@
1
+ from pathlib import Path
2
+
3
+ from starlette.staticfiles import StaticFiles
4
+
5
+ def get_studio_static_files() -> StaticFiles:
6
+ frontend_dist_path = Path(__file__).parent.parent / "frontend" / "static"
7
+ return StaticFiles(directory=str(frontend_dist_path), html=True)
8
+
9
+ studio_static_files = get_studio_static_files()
@@ -7,6 +7,7 @@ from typing import AsyncGenerator
7
7
  from loguru import logger
8
8
  from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine
9
9
  from service_forge.service_config import ServiceConfig
10
+ from pymongo import AsyncMongoClient
10
11
 
11
12
  class PostgresDatabase:
12
13
  def __init__(
@@ -114,6 +115,7 @@ class MongoDatabase:
114
115
  self.mongo_password = mongo_password
115
116
  self.mongo_db = mongo_db or ""
116
117
  self.client = pymongo.MongoClient(self.database_url)
118
+ self.async_client = AsyncMongoClient(self.database_url)
117
119
  self.test_connection()
118
120
 
119
121
  @property
@@ -129,6 +131,21 @@ class MongoDatabase:
129
131
  logger.error(f"MongoDB connection test failed for database '{self.name}': {e}")
130
132
  return False
131
133
 
134
+ async def test_async_connection(self) -> bool:
135
+ try:
136
+ await self.async_client.admin.command('ping')
137
+ logger.info(f"Async MongoDB connection test successful for database '{self.name}'")
138
+ return True
139
+ except Exception as e:
140
+ logger.error(f"Async MongoDB connection test failed for database '{self.name}': {e}")
141
+ return False
142
+
143
+ def get_sync_collection(self, collection_name: str):
144
+ return self.client[self.mongo_db][collection_name]
145
+
146
+ def get_async_collection(self, collection_name: str):
147
+ return self.async_client[self.mongo_db][collection_name]
148
+
132
149
  class RedisDatabase:
133
150
  def __init__(
134
151
  self,
@@ -0,0 +1,106 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ from contextlib import contextmanager
5
+ from contextvars import ContextVar, Token
6
+ from dataclasses import dataclass, field, replace
7
+ from typing import Any, Iterator, Mapping
8
+
9
+ from loguru import logger
10
+ from opentelemetry.context import Context as OtelContext
11
+ from opentelemetry.trace import Span
12
+
13
+ """
14
+ ExecutionContext helper for tracing/logging/state propagation.
15
+
16
+ Usage constraints (async-aware):
17
+ - Entrypoints must call set_current_context/use_execution_context so spawned coroutines inherit the context; create tasks after setting it.
18
+ - ContextVar does not flow into run_in_executor/thread pools; set_current_context manually in threads if needed.
19
+ - Use with_state to copy + mutate state; instances are immutable to avoid concurrent writes.
20
+ - Logger/span are shared references; set a new ExecutionContext when switching spans to prevent cross-task leakage.
21
+ - State payload is masked for sensitive keys and capped at ~4KB after JSON serialization; oversize payload raises ValueError.
22
+ """
23
+
24
+ STATE_SIZE_LIMIT_BYTES = 4096
25
+ SENSITIVE_KEYS = (
26
+ "password",
27
+ "passwd",
28
+ "secret",
29
+ "token",
30
+ "api_key",
31
+ "apikey",
32
+ "authorization",
33
+ "auth",
34
+ "credential",
35
+ "cookie",
36
+ )
37
+
38
+
39
+ def _mask_sensitive_value(key: str, value: Any) -> Any:
40
+ lower_key = key.lower()
41
+ if any(sensitive in lower_key for sensitive in SENSITIVE_KEYS):
42
+ return "***"
43
+ return value
44
+
45
+
46
+ def _validate_state_payload(state: Mapping[str, Any]) -> None:
47
+ try:
48
+ payload = json.dumps(state, default=str)
49
+ except Exception:
50
+ payload = str(state)
51
+ if len(payload.encode("utf-8")) > STATE_SIZE_LIMIT_BYTES:
52
+ raise ValueError(
53
+ f"ExecutionContext state exceeds {STATE_SIZE_LIMIT_BYTES} bytes after serialization; "
54
+ "store a compact summary instead of raw payloads."
55
+ )
56
+
57
+
58
+ @dataclass(frozen=True)
59
+ class ExecutionContext:
60
+ """Shared execution-scoped data passed across API, workflow, and node layers."""
61
+
62
+ trace_context: OtelContext | None = None
63
+ span: Span | None = None
64
+ logger = logger
65
+ state: Mapping[str, Any] = field(default_factory=dict)
66
+ metadata: Mapping[str, Any] = field(default_factory=dict)
67
+
68
+ def with_state(self, **updates: Any) -> ExecutionContext:
69
+ merged_state = {
70
+ key: _mask_sensitive_value(key, value)
71
+ for key, value in {**self.state, **updates}.items()
72
+ }
73
+ _validate_state_payload(merged_state)
74
+ return replace(self, state=merged_state)
75
+
76
+
77
+ current_context: ContextVar[ExecutionContext | None] = ContextVar(
78
+ "current_execution_context",
79
+ default=None,
80
+ )
81
+
82
+
83
+ def get_current_context(
84
+ default: ExecutionContext | None = None,
85
+ ) -> ExecutionContext | None:
86
+ context = current_context.get()
87
+ if context is None:
88
+ return default
89
+ return context
90
+
91
+
92
+ def set_current_context(context: ExecutionContext | None) -> Token:
93
+ return current_context.set(context)
94
+
95
+
96
+ def reset_current_context(token: Token) -> None:
97
+ current_context.reset(token)
98
+
99
+
100
+ @contextmanager
101
+ def use_execution_context(context: ExecutionContext) -> Iterator[ExecutionContext]:
102
+ token = set_current_context(context)
103
+ try:
104
+ yield context
105
+ finally:
106
+ reset_current_context(token)
@@ -0,0 +1 @@
1
+ import"./api-DyiqpKJK.js";import{u as S,W as x}from"./WorkflowStudio-FCyhGD4y.js";/* empty css *//* empty css */import{u as B}from"./vue-flow-Dn7R8GPr.js";import{u as O}from"./WorkflowEditor-CcaGGbko.js";import{e as T,C as D,D as F,F as j,G as U,t as W}from"./element-ui-D3x2y3TA.js";import{v as h,i as G,c as f,r as K,w as M,J as v,y as r,K as s,U as a,a2 as _,B as p,x as R,H as A,a9 as H,k as I,u as J}from"./vue-core-DL-LgTX0.js";import{_ as L}from"./index-BMvodlwc.js";import"./ui-components-DQ7-U3pr.js";import"./js-yaml-yTPt38rv.js";import"./elkjs-Dm5QV7uy.js";import"./code-editor-DBSql_sc.js";const P={class:"type-info"},q=h({__name:"CreateNewNodeDialog",props:{pos:{}},setup(b){const y=b,l=G(x.CREATE_NEW_NODE_OPEN),d=S().nodeSchemaMap,N=B(),{addNewNode:E}=O(N),n=f(()=>[...d.values()]),t=K(n.value[0]?.name??"");M(n,()=>{n.value[0]&&(t.value=t.value||n.value[0]?.name)});const i=f(()=>d.get(t.value)),w=()=>{l&&(l.value=!1)},V=()=>{t.value&&(E(t.value,y.pos),l&&(l.value=!1))};return(z,e)=>{const C=F,g=D,u=U,m=j,c=T,k=W;return r(),v(k,{modelValue:J(l),"onUpdate:modelValue":e[1]||(e[1]=o=>I(l)?l.value=o:null),title:"新建节点 · 选择节点类型","align-center":""},{default:s(()=>[a(g,{modelValue:t.value,"onUpdate:modelValue":e[0]||(e[0]=o=>t.value=o),placeholder:n.value[0]?.name},{default:s(()=>[(r(!0),R(A,null,H(n.value,o=>(r(),v(C,{key:o.name,label:o.name,value:o.name},null,8,["label","value"]))),128))]),_:1},8,["modelValue","placeholder"]),p("div",P,[p("div",null,[e[2]||(e[2]=p("p",{class:"type-info-entry-title"},"输入",-1)),a(m,{data:Object.values(i.value?.inputs||{})},{default:s(()=>[a(u,{prop:"name",label:"端口名",width:"140"}),a(u,{prop:"type",label:"数据类型"}),a(u,{prop:"default_value",label:"默认值"})]),_:1},8,["data"])]),p("div",null,[e[3]||(e[3]=p("p",{class:"type-info-entry-title"},"输出",-1)),a(m,{data:Object.values(i.value?.outputs||{})},{default:s(()=>[a(u,{prop:"name",label:"端口名",width:"140"}),a(u,{prop:"type",label:"数据类型"})]),_:1},8,["data"])])])]),footer:s(()=>[a(c,{onClick:w},{default:s(()=>[...e[4]||(e[4]=[_("取消",-1)])]),_:1}),a(c,{type:"primary",onClick:V,disabled:!t.value},{default:s(()=>[...e[5]||(e[5]=[_(" 新建 ",-1)])]),_:1},8,["disabled"])]),_:1},8,["modelValue"])}}}),pe=L(q,[["__scopeId","data-v-ce5c662a"]]);export{pe as default};