alayaflow 0.1.3__tar.gz → 0.1.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alayaflow-0.1.4/.alaya.ai/alayaflow/workflows/alayamem_chat/1.0.0/metadata.json +9 -0
- alayaflow-0.1.4/.alaya.ai/alayaflow/workflows/alayamem_chat/1.0.0/schemas.py +61 -0
- alayaflow-0.1.4/.alaya.ai/alayaflow/workflows/alayamem_chat/1.0.0/workflow.py +246 -0
- alayaflow-0.1.4/.alaya.ai/alayaflow/workflows/simple_chat/1.0.0/metadata.py +16 -0
- alayaflow-0.1.4/.alaya.ai/alayaflow/workflows/simple_chat/1.0.0/requirements.txt +11 -0
- alayaflow-0.1.4/.alaya.ai/alayaflow/workflows/simple_chat/1.0.0/workflow.py +49 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/PKG-INFO +1 -1
- alayaflow-0.1.4/examples/alayamem_demo.py +142 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/pyproject.toml +1 -1
- alayaflow-0.1.4/src/alayaflow/__init__.py +7 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/api/api_singleton.py +27 -2
- alayaflow-0.1.4/src/alayaflow/clients/alayamem/base_client.py +31 -0
- alayaflow-0.1.4/src/alayaflow/clients/alayamem/http_client.py +87 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/common/config.py +10 -8
- alayaflow-0.1.4/src/alayaflow/component/memory.py +52 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/execution/executor_manager.py +19 -1
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/execution/executors/base_executor.py +5 -1
- alayaflow-0.1.4/src/alayaflow/execution/executors/naive_executor.py +81 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/execution/executors/uv_executor.py +10 -1
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/execution/executors/worker_executor.py +3 -1
- alayaflow-0.1.4/src/alayaflow/utils/coroutine.py +20 -0
- alayaflow-0.1.4/src/alayaflow/utils/logger.py +79 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/uv.lock +8 -19
- alayaflow-0.1.3/.alaya.ai/alayaflow/workflows/simple_chat/1.0.0/workflow.py +0 -94
- alayaflow-0.1.3/src/alayaflow/__init__.py +0 -5
- alayaflow-0.1.3/src/alayaflow/clients/alayamem/base_client.py +0 -19
- alayaflow-0.1.3/src/alayaflow/clients/alayamem/http_client.py +0 -64
- alayaflow-0.1.3/src/alayaflow/component/memory.py +0 -50
- alayaflow-0.1.3/src/alayaflow/execution/executors/naive_executor.py +0 -119
- alayaflow-0.1.3/tests/clients/__init__.py +0 -1
- alayaflow-0.1.3/tests/clients/conftest.py +0 -9
- alayaflow-0.1.3/tests/clients/test_alayamem.py +0 -57
- {alayaflow-0.1.3/.alaya.ai/alayaflow/workflows/simple_chat → alayaflow-0.1.4/.alaya.ai/alayaflow/workflows/alayamem_chat}/1.0.0/metadata.py +0 -0
- {alayaflow-0.1.3/.alaya.ai/alayaflow/workflows/simple_chat → alayaflow-0.1.4/.alaya.ai/alayaflow/workflows/alayamem_chat}/1.0.0/requirements.txt +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/.alaya.ai/alayaflow/workflows/autotable/1.0.0/metadata.json +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/.alaya.ai/alayaflow/workflows/autotable/1.0.0/metadata.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/.alaya.ai/alayaflow/workflows/autotable/1.0.0/requirements.txt +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/.alaya.ai/alayaflow/workflows/autotable/1.0.0/schemas.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/.alaya.ai/alayaflow/workflows/autotable/1.0.0/utils.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/.alaya.ai/alayaflow/workflows/autotable/1.0.0/workflow.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/.alaya.ai/alayaflow/workflows/simple_chat/1.0.0/metadata.json +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/.alaya.ai/alayaflow/workflows/simple_chat/1.0.0/schemas.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/.github/workflows/pr-test.yml +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/.gitignore +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/LICENSE +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/README.md +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/examples/autotable_demo.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/examples/chat_demo.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/pyproject.origin.toml +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/api/__init__.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/component/__init__.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/component/chat_model.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/component/intent_classifier.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/component/langflow/__init__.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/component/langflow/intent_classifier.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/component/llm_node.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/component/model/__init__.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/component/model/model_manager.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/component/model/schemas.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/component/retrieve_node.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/component/search_node.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/component/web_search.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/execution/__init__.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/execution/env_manager.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/execution/executors/__init__.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/execution/langfuse_tracing.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/execution/workflow_runner.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/utils/singleton.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/workflow/__init__.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/workflow/runnable/__init__.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/workflow/runnable/base_runnable_workflow.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/workflow/runnable/state_graph_runnable_workflow.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/workflow/workflow_info.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/workflow/workflow_loader.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/src/alayaflow/workflow/workflow_manager.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/tests/__init__.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/tests/component/test_intent_classifier.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/tests/component/test_llm_node.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/tests/execution/test_env_reuse.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/tests/workflow/__init__.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/tests/workflow/conftest.py +0 -0
- {alayaflow-0.1.3 → alayaflow-0.1.4}/tests/workflow/test_workflow_loader.py +0 -0
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from typing import TypedDict, List, Optional
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel, Field
|
|
4
|
+
|
|
5
|
+
from langchain_core.messages import BaseMessage, AIMessageChunk
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class WorkflowInitArgs(BaseModel):
|
|
9
|
+
alayamem_url: str = Field(..., description="AlayaMem URL")
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Input(BaseModel):
|
|
13
|
+
messages: List[BaseMessage] = Field(..., description="List of input messages")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class WorkflowContext(BaseModel):
|
|
17
|
+
user_id: str = Field(..., description="User ID")
|
|
18
|
+
session_id: str = Field(..., description="Session ID")
|
|
19
|
+
chat_model_id: str = Field(..., description="Chat Model ID")
|
|
20
|
+
|
|
21
|
+
system_prompt: str = Field(
|
|
22
|
+
default="你是一个有帮助的AI助手。",
|
|
23
|
+
description="系统基础提示词"
|
|
24
|
+
)
|
|
25
|
+
user_profile_prompt: str = Field(
|
|
26
|
+
default="## 用户画像\n{user_profile}",
|
|
27
|
+
description="用户画像提示词模板,使用 {user_profile} 作为占位符"
|
|
28
|
+
)
|
|
29
|
+
session_summary_prompt: str = Field(
|
|
30
|
+
default="## 会话摘要\n{session_summary}",
|
|
31
|
+
description="会话摘要提示词模板,使用 {session_summary} 作为占位符"
|
|
32
|
+
)
|
|
33
|
+
retrieved_docs_prompt: str = Field(
|
|
34
|
+
default="## 相关参考资料\n{retrieved_docs}",
|
|
35
|
+
description="检索文档提示词模板,使用 {retrieved_docs} 作为占位符"
|
|
36
|
+
)
|
|
37
|
+
history_turns_prompt: str = Field(
|
|
38
|
+
default="## 历史对话\n{history_turns}",
|
|
39
|
+
description="历史对话提示词模板,使用 {history_turns} 作为占位符"
|
|
40
|
+
)
|
|
41
|
+
context_wrapper_prompt: str = Field(
|
|
42
|
+
default="# 上下文信息\n\n{context_parts}\n\n请基于以上信息来回答用户的问题。",
|
|
43
|
+
description="上下文包装提示词模板,使用 {context_parts} 作为占位符"
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class Output(BaseModel):
|
|
48
|
+
chat_response: dict = Field(..., description="Chat response")
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class WorkflowState(TypedDict):
|
|
52
|
+
messages: List[BaseMessage]
|
|
53
|
+
memory_initialized: bool
|
|
54
|
+
retrieved_docs: Optional[List[str]]
|
|
55
|
+
stream_chunks: List[AIMessageChunk]
|
|
56
|
+
chat_response: Optional[dict]
|
|
57
|
+
context: Optional[str]
|
|
58
|
+
# 新增字段
|
|
59
|
+
history_turns: Optional[List[dict]]
|
|
60
|
+
session_summary: Optional[str]
|
|
61
|
+
user_profile: Optional[dict]
|
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
from langgraph.graph import StateGraph, START, END
|
|
2
|
+
from langgraph.runtime import Runtime
|
|
3
|
+
|
|
4
|
+
from alayaflow.component.memory import query_file, commit_turn, turns, summary, profile
|
|
5
|
+
from alayaflow.component.model import ModelManager
|
|
6
|
+
|
|
7
|
+
from .schemas import WorkflowInitArgs, WorkflowState, WorkflowContext, Input, Output
|
|
8
|
+
|
|
9
|
+
def mk_turns_node(alayamem_url: str):
|
|
10
|
+
def turns_node(state: WorkflowState, runtime: Runtime[WorkflowContext]):
|
|
11
|
+
session_id = runtime.context.session_id
|
|
12
|
+
user_id = runtime.context.user_id
|
|
13
|
+
original_result = turns(alayamem_url, session_id, user_id)
|
|
14
|
+
|
|
15
|
+
# 打印返回字段
|
|
16
|
+
print(f"\n[1. turns 接口返回]")
|
|
17
|
+
print(f" 返回类型: {type(original_result)}")
|
|
18
|
+
print(f" 返回值: {original_result}")
|
|
19
|
+
|
|
20
|
+
updated_state = state.copy()
|
|
21
|
+
# turns 接口直接返回列表
|
|
22
|
+
updated_state["history_turns"] = original_result if isinstance(original_result, list) else []
|
|
23
|
+
return updated_state
|
|
24
|
+
return turns_node
|
|
25
|
+
|
|
26
|
+
def mk_query_file_node(alayamem_url: str):
|
|
27
|
+
def query_file_node(state: WorkflowState, runtime: Runtime[WorkflowContext]):
|
|
28
|
+
user_id = runtime.context.user_id
|
|
29
|
+
messages = state.get("messages", [])
|
|
30
|
+
message = messages[-1] if messages else None
|
|
31
|
+
original_result = query_file(alayamem_url, user_id, message)
|
|
32
|
+
|
|
33
|
+
# 打印返回字段
|
|
34
|
+
print(f"\n[2. query_file 接口返回]")
|
|
35
|
+
print(f" 返回类型: {type(original_result)}")
|
|
36
|
+
if isinstance(original_result, dict):
|
|
37
|
+
print(f" 字段列表: {list(original_result.keys())}")
|
|
38
|
+
print(f" 返回值: {original_result}")
|
|
39
|
+
|
|
40
|
+
updated_state = state.copy()
|
|
41
|
+
# query_file 返回 dict,results 是 dict,包含 documents 字段
|
|
42
|
+
if isinstance(original_result, dict):
|
|
43
|
+
results = original_result.get("results", {})
|
|
44
|
+
updated_state["retrieved_docs"] = results.get("documents", []) if isinstance(results, dict) else []
|
|
45
|
+
else:
|
|
46
|
+
updated_state["retrieved_docs"] = []
|
|
47
|
+
return updated_state
|
|
48
|
+
return query_file_node
|
|
49
|
+
|
|
50
|
+
def mk_summary_node(alayamem_url: str):
|
|
51
|
+
def summary_node(state: WorkflowState, runtime: Runtime[WorkflowContext]):
|
|
52
|
+
session_id = runtime.context.session_id
|
|
53
|
+
user_id = runtime.context.user_id
|
|
54
|
+
original_result = summary(alayamem_url, session_id, user_id)
|
|
55
|
+
|
|
56
|
+
# 打印返回字段
|
|
57
|
+
print(f"\n[2. summary 接口返回]")
|
|
58
|
+
print(f" 返回类型: {type(original_result)}")
|
|
59
|
+
if isinstance(original_result, dict):
|
|
60
|
+
print(f" 字段列表: {list(original_result.keys())}")
|
|
61
|
+
print(f" 返回值: {original_result}")
|
|
62
|
+
|
|
63
|
+
updated_state = state.copy()
|
|
64
|
+
# summary 可能是 None,需要处理
|
|
65
|
+
if isinstance(original_result, dict):
|
|
66
|
+
summary_value = original_result.get("summary", "")
|
|
67
|
+
updated_state["session_summary"] = summary_value if summary_value is not None else ""
|
|
68
|
+
else:
|
|
69
|
+
updated_state["session_summary"] = ""
|
|
70
|
+
return updated_state
|
|
71
|
+
return summary_node
|
|
72
|
+
|
|
73
|
+
def mk_profile_node(alayamem_url: str):
|
|
74
|
+
def profile_node(state: WorkflowState, runtime: Runtime[WorkflowContext]):
|
|
75
|
+
user_id = runtime.context.user_id
|
|
76
|
+
original_result = profile(alayamem_url, user_id)
|
|
77
|
+
|
|
78
|
+
# 打印返回字段
|
|
79
|
+
print(f"\n[3. profile 接口返回]")
|
|
80
|
+
print(f" 返回类型: {type(original_result)}")
|
|
81
|
+
if isinstance(original_result, dict):
|
|
82
|
+
print(f" 字段列表: {list(original_result.keys())}")
|
|
83
|
+
print(f" 返回值: {original_result}")
|
|
84
|
+
|
|
85
|
+
updated_state = state.copy()
|
|
86
|
+
# profile 可能是 None,需要处理
|
|
87
|
+
if isinstance(original_result, dict):
|
|
88
|
+
profile_value = original_result.get("profile", {})
|
|
89
|
+
updated_state["user_profile"] = profile_value if profile_value is not None else {}
|
|
90
|
+
else:
|
|
91
|
+
updated_state["user_profile"] = {}
|
|
92
|
+
return updated_state
|
|
93
|
+
return profile_node
|
|
94
|
+
|
|
95
|
+
def mk_commit_turn_node(alayamem_url: str):
|
|
96
|
+
def commit_turn_node(state: WorkflowState, runtime: Runtime[WorkflowContext]):
|
|
97
|
+
session_id = runtime.context.session_id
|
|
98
|
+
user_id = runtime.context.user_id
|
|
99
|
+
messages = state.get("messages", [])
|
|
100
|
+
chat_response = state.get("chat_response")
|
|
101
|
+
|
|
102
|
+
# 提取用户消息和助手回复
|
|
103
|
+
user_text = ""
|
|
104
|
+
if messages:
|
|
105
|
+
last_msg = messages[-1]
|
|
106
|
+
if hasattr(last_msg, 'content'):
|
|
107
|
+
user_text = last_msg.content
|
|
108
|
+
elif isinstance(last_msg, dict):
|
|
109
|
+
user_text = last_msg.get('content', '')
|
|
110
|
+
|
|
111
|
+
assistant_text = ""
|
|
112
|
+
if chat_response:
|
|
113
|
+
if hasattr(chat_response, 'content'):
|
|
114
|
+
assistant_text = chat_response.content
|
|
115
|
+
elif isinstance(chat_response, dict):
|
|
116
|
+
assistant_text = chat_response.get('content', '')
|
|
117
|
+
|
|
118
|
+
# 调用 commit_turn
|
|
119
|
+
original_result = commit_turn(alayamem_url, session_id, user_text,
|
|
120
|
+
assistant_text, user_id, window_size=3)
|
|
121
|
+
|
|
122
|
+
# 打印返回字段
|
|
123
|
+
print(f"\n[4. commit_turn 接口返回]")
|
|
124
|
+
print(f" 返回类型: {type(original_result)}")
|
|
125
|
+
if isinstance(original_result, dict):
|
|
126
|
+
print(f" 字段列表: {list(original_result.keys())}")
|
|
127
|
+
print(f" 返回值: {original_result}")
|
|
128
|
+
|
|
129
|
+
return state.copy()
|
|
130
|
+
return commit_turn_node
|
|
131
|
+
|
|
132
|
+
def mk_chat_node():
|
|
133
|
+
model_manager = ModelManager()
|
|
134
|
+
|
|
135
|
+
def chat_node(state: WorkflowState, runtime: Runtime[WorkflowContext]):
|
|
136
|
+
model_id = runtime.context.chat_model_id
|
|
137
|
+
chat_model = model_manager.get_model(model_id)
|
|
138
|
+
if not chat_model:
|
|
139
|
+
raise ValueError(f"无法找到模型ID为 '{model_id}' 的模型配置")
|
|
140
|
+
|
|
141
|
+
from langchain_core.messages import SystemMessage, HumanMessage, AIMessage
|
|
142
|
+
|
|
143
|
+
# 获取 prompt 模板配置
|
|
144
|
+
ctx = runtime.context
|
|
145
|
+
system_prompt = ctx.system_prompt
|
|
146
|
+
user_profile_prompt = ctx.user_profile_prompt
|
|
147
|
+
session_summary_prompt = ctx.session_summary_prompt
|
|
148
|
+
retrieved_docs_prompt = ctx.retrieved_docs_prompt
|
|
149
|
+
history_turns_prompt = ctx.history_turns_prompt
|
|
150
|
+
context_wrapper_prompt = ctx.context_wrapper_prompt
|
|
151
|
+
|
|
152
|
+
# 构建完整的消息列表
|
|
153
|
+
full_messages = []
|
|
154
|
+
|
|
155
|
+
# 添加系统基础提示词
|
|
156
|
+
if system_prompt:
|
|
157
|
+
full_messages.append(SystemMessage(content=system_prompt))
|
|
158
|
+
|
|
159
|
+
# 构建上下文信息部分
|
|
160
|
+
context_parts = []
|
|
161
|
+
|
|
162
|
+
# 添加用户画像
|
|
163
|
+
# user_profile = state.get("user_profile", {})
|
|
164
|
+
# if user_profile:
|
|
165
|
+
# profile_text = user_profile_prompt.format(user_profile=str(user_profile))
|
|
166
|
+
# context_parts.append(profile_text)
|
|
167
|
+
|
|
168
|
+
# 添加会话摘要
|
|
169
|
+
session_summary = state.get("session_summary", "")
|
|
170
|
+
if session_summary:
|
|
171
|
+
summary_text = session_summary_prompt.format(session_summary=session_summary)
|
|
172
|
+
context_parts.append(summary_text)
|
|
173
|
+
|
|
174
|
+
# 添加检索文档
|
|
175
|
+
retrieved_docs = state.get("retrieved_docs", [])
|
|
176
|
+
if retrieved_docs:
|
|
177
|
+
docs_content = "\n\n".join([str(doc) for doc in retrieved_docs])
|
|
178
|
+
docs_text = retrieved_docs_prompt.format(retrieved_docs=docs_content)
|
|
179
|
+
context_parts.append(docs_text)
|
|
180
|
+
|
|
181
|
+
# 添加历史对话
|
|
182
|
+
history_turns = state.get("history_turns", [])
|
|
183
|
+
if history_turns:
|
|
184
|
+
history_content = "\n".join([
|
|
185
|
+
f"用户: {turn.get('user_text', '')}\n助手: {turn.get('assistant_text', '')}"
|
|
186
|
+
for turn in history_turns
|
|
187
|
+
])
|
|
188
|
+
history_text = history_turns_prompt.format(history_turns=history_content)
|
|
189
|
+
context_parts.append(history_text)
|
|
190
|
+
|
|
191
|
+
# 如果有上下文信息,添加上下文系统消息
|
|
192
|
+
if context_parts:
|
|
193
|
+
context_content = context_wrapper_prompt.format(
|
|
194
|
+
context_parts="\n\n".join(context_parts)
|
|
195
|
+
)
|
|
196
|
+
full_messages.append(SystemMessage(content=context_content))
|
|
197
|
+
|
|
198
|
+
# 添加当前用户消息
|
|
199
|
+
current_messages = state["messages"]
|
|
200
|
+
full_messages.extend(current_messages)
|
|
201
|
+
|
|
202
|
+
# 打印最终输入到模型的消息
|
|
203
|
+
print(f"\n[5. 模型输入消息]")
|
|
204
|
+
print(f" 消息数量: {len(full_messages)}")
|
|
205
|
+
print("-" * 50)
|
|
206
|
+
full_content = ""
|
|
207
|
+
for msg in full_messages:
|
|
208
|
+
msg_type = type(msg).__name__
|
|
209
|
+
content = msg.content if hasattr(msg, 'content') else str(msg)
|
|
210
|
+
full_content += f"[{msg_type}]\n{content}\n\n"
|
|
211
|
+
print(full_content)
|
|
212
|
+
print("-" * 50)
|
|
213
|
+
|
|
214
|
+
# 调用模型
|
|
215
|
+
response = chat_model.invoke(full_messages)
|
|
216
|
+
|
|
217
|
+
updated_state = state.copy()
|
|
218
|
+
updated_state['chat_response'] = response
|
|
219
|
+
return updated_state
|
|
220
|
+
return chat_node
|
|
221
|
+
|
|
222
|
+
def create_graph(init_args: WorkflowInitArgs | dict):
|
|
223
|
+
if isinstance(init_args, dict):
|
|
224
|
+
init_args = WorkflowInitArgs(**init_args)
|
|
225
|
+
alayamem_url = init_args.alayamem_url
|
|
226
|
+
|
|
227
|
+
graph = StateGraph(WorkflowState, WorkflowContext, input_type=Input, output_type=Output)
|
|
228
|
+
|
|
229
|
+
# 添加节点
|
|
230
|
+
graph.add_node("turns_node", mk_turns_node(alayamem_url))
|
|
231
|
+
graph.add_node("query_file_node", mk_query_file_node(alayamem_url))
|
|
232
|
+
#
|
|
233
|
+
graph.add_node("summary_node", mk_summary_node(alayamem_url))
|
|
234
|
+
graph.add_node("profile_node", mk_profile_node(alayamem_url))
|
|
235
|
+
graph.add_node("chat_node", mk_chat_node())
|
|
236
|
+
graph.add_node("commit_turn_node", mk_commit_turn_node(alayamem_url))
|
|
237
|
+
|
|
238
|
+
# 添加边:获取历史 -> 获取摘要 -> 获取画像 -> 聊天 -> 提交记录
|
|
239
|
+
graph.add_edge(START, "turns_node")
|
|
240
|
+
graph.add_edge("turns_node", "summary_node")
|
|
241
|
+
graph.add_edge("summary_node", "profile_node")
|
|
242
|
+
graph.add_edge("profile_node", "chat_node")
|
|
243
|
+
graph.add_edge("chat_node", "commit_turn_node")
|
|
244
|
+
graph.add_edge("commit_turn_node", END)
|
|
245
|
+
|
|
246
|
+
return graph.compile()
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from alayaflow.workflow import WorkflowInfo
|
|
4
|
+
|
|
5
|
+
def get_metadata():
|
|
6
|
+
meta = {
|
|
7
|
+
"id": "simple_chat",
|
|
8
|
+
"name": "Simple Chatbot",
|
|
9
|
+
"description": "一个简单的 LLM 对话工作流示例",
|
|
10
|
+
"version": "1.0.0",
|
|
11
|
+
"tags": ["chat", "basic"],
|
|
12
|
+
"entry_file": "workflow.py",
|
|
13
|
+
"entry_point": "create_graph",
|
|
14
|
+
"wf_dir": Path(__file__).parent
|
|
15
|
+
}
|
|
16
|
+
return WorkflowInfo(**meta)
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
from langgraph.graph import StateGraph, START, END
|
|
2
|
+
from langgraph.runtime import Runtime
|
|
3
|
+
|
|
4
|
+
from alayaflow.component.model import ModelManager
|
|
5
|
+
|
|
6
|
+
from .schemas import WorkflowInitArgs, WorkflowState, WorkflowContext, Input, Output
|
|
7
|
+
|
|
8
|
+
def mk_chat_node():
|
|
9
|
+
model_manager = ModelManager()
|
|
10
|
+
|
|
11
|
+
def chat_node(state: WorkflowState, runtime: Runtime[WorkflowContext]):
|
|
12
|
+
model_id = runtime.context.chat_model_id
|
|
13
|
+
chat_model = model_manager.get_model(model_id)
|
|
14
|
+
if not chat_model:
|
|
15
|
+
raise ValueError(f"无法找到模型ID为 '{model_id}' 的模型配置")
|
|
16
|
+
|
|
17
|
+
messages = state["messages"].copy()
|
|
18
|
+
updated_state = state.copy()
|
|
19
|
+
|
|
20
|
+
retrieved_docs = state.get("retrieved_docs", [])
|
|
21
|
+
if retrieved_docs:
|
|
22
|
+
context_text = "\n\n".join([str(doc) for doc in retrieved_docs])
|
|
23
|
+
from langchain_core.messages import SystemMessage
|
|
24
|
+
context_message = SystemMessage(
|
|
25
|
+
content=f"以下是相关的参考资料,请基于这些资料回答用户的问题:\n\n{context_text}"
|
|
26
|
+
)
|
|
27
|
+
messages.insert(0, context_message)
|
|
28
|
+
|
|
29
|
+
response = chat_model.invoke(messages)
|
|
30
|
+
updated_state['chat_response'] = response
|
|
31
|
+
return updated_state
|
|
32
|
+
return chat_node
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def create_graph(init_args: WorkflowInitArgs | dict):
|
|
36
|
+
if isinstance(init_args, dict):
|
|
37
|
+
init_args = WorkflowInitArgs(**init_args)
|
|
38
|
+
alayamem_url = init_args.alayamem_url
|
|
39
|
+
|
|
40
|
+
graph = StateGraph(WorkflowState, WorkflowContext, input_type=Input, output_type=Output)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
graph.add_node("chat_node", mk_chat_node())
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
graph.add_edge(START, "chat_node")
|
|
47
|
+
graph.add_edge("chat_node", END)
|
|
48
|
+
|
|
49
|
+
return graph.compile()
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
|
|
4
|
+
from dotenv import load_dotenv
|
|
5
|
+
from alayaflow.api import Flow
|
|
6
|
+
|
|
7
|
+
# Load environment variables from .env file
|
|
8
|
+
load_dotenv()
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def init_flow():
|
|
12
|
+
"""Initialize flow api singleton"""
|
|
13
|
+
|
|
14
|
+
# Flow() is a singleton instance
|
|
15
|
+
# So it can be "constructed" in multiple places
|
|
16
|
+
# But there is only one instance
|
|
17
|
+
flow = Flow()
|
|
18
|
+
|
|
19
|
+
flow.init({
|
|
20
|
+
"alayahub_url": "http://your-alayahub-url",
|
|
21
|
+
})
|
|
22
|
+
flow.register_models([
|
|
23
|
+
{
|
|
24
|
+
# Local used fields
|
|
25
|
+
"name": "DeepSeek Chat",
|
|
26
|
+
"model_id": "deepseek-chat",
|
|
27
|
+
"provider_name": "DeepSeek",
|
|
28
|
+
# Connection credentials
|
|
29
|
+
"model_name": "deepseek-chat",
|
|
30
|
+
"base_url": "https://api.deepseek.com/v1",
|
|
31
|
+
"api_key": os.getenv("DEEPSEEK_API_KEY"),
|
|
32
|
+
}
|
|
33
|
+
])
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def init_workflows():
|
|
37
|
+
flow = Flow()
|
|
38
|
+
|
|
39
|
+
workflow_id = 'alayamem_chat'
|
|
40
|
+
workflow_version = '1.0.0'
|
|
41
|
+
init_args = {
|
|
42
|
+
"alayamem_url": "http://localhost:5555",
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
print(f"\n正在加载工作流: {workflow_id} v{workflow_version}")
|
|
46
|
+
try:
|
|
47
|
+
result = flow.load_workflow(workflow_id, workflow_version, init_args)
|
|
48
|
+
except Exception as e:
|
|
49
|
+
import traceback
|
|
50
|
+
traceback.print_exc()
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def run_single_turn(flow, workflow_id, workflow_version, question, context):
|
|
54
|
+
"""执行单轮对话"""
|
|
55
|
+
inputs = {
|
|
56
|
+
"messages": [{"role": "user", "content": question}]
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
def handle_chat_model_stream(event):
|
|
60
|
+
content = event["data"]["chunk"]['content']
|
|
61
|
+
if content:
|
|
62
|
+
print(content, end="", flush=True)
|
|
63
|
+
|
|
64
|
+
def handle_error(event):
|
|
65
|
+
sys.stderr.write(f"\n[Error Event Detected]\n")
|
|
66
|
+
sys.stderr.write("--- Error message ---\n")
|
|
67
|
+
sys.stderr.write(f"{event.get('error', 'Unknown error')}\n")
|
|
68
|
+
if 'traceback' in event:
|
|
69
|
+
sys.stderr.write("\n--- Stack Trace ---\n")
|
|
70
|
+
sys.stderr.write(f"{event['traceback']}\n")
|
|
71
|
+
# 打印完整的 event 信息以便调试
|
|
72
|
+
sys.stderr.write("\n--- Complete Event ---\n")
|
|
73
|
+
sys.stderr.write(f"{event}\n")
|
|
74
|
+
|
|
75
|
+
for event in flow.exec_workflow(workflow_id, workflow_version, inputs, context):
|
|
76
|
+
if "error" in event:
|
|
77
|
+
handle_error(event)
|
|
78
|
+
return False
|
|
79
|
+
kind = event["event"]
|
|
80
|
+
if kind == "on_chat_model_stream":
|
|
81
|
+
handle_chat_model_stream(event)
|
|
82
|
+
print()
|
|
83
|
+
return True
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def run_workflow():
|
|
87
|
+
"""运行多轮对话测试"""
|
|
88
|
+
# 使用已初始化的 Flow 单例
|
|
89
|
+
flow = Flow()
|
|
90
|
+
|
|
91
|
+
workflow_id = 'alayamem_chat'
|
|
92
|
+
workflow_version = '1.0.0'
|
|
93
|
+
context = {
|
|
94
|
+
"user_id": "kekeke",
|
|
95
|
+
"session_id": "test_session_multi_turn",
|
|
96
|
+
"chat_model_id": "deepseek-chat",
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
# 定义多轮对话问题
|
|
100
|
+
questions = [
|
|
101
|
+
"你是谁",
|
|
102
|
+
"介绍一下transformers",
|
|
103
|
+
"我问了你几个问题",
|
|
104
|
+
"你觉得我是做什么工作的"
|
|
105
|
+
]
|
|
106
|
+
|
|
107
|
+
print("\n" + "="*80)
|
|
108
|
+
print("开始多轮对话测试 - 测试 AlayaMem 记忆功能")
|
|
109
|
+
print("="*80)
|
|
110
|
+
|
|
111
|
+
for i, question in enumerate(questions, 1):
|
|
112
|
+
print(f"\n{'='*80}")
|
|
113
|
+
print(f"第 {i} 轮对话")
|
|
114
|
+
print(f"{'='*80}")
|
|
115
|
+
print(f"\n用户: {question}")
|
|
116
|
+
print(f"助手: ", end="")
|
|
117
|
+
|
|
118
|
+
success = run_single_turn(flow, workflow_id, workflow_version, question, context)
|
|
119
|
+
if not success:
|
|
120
|
+
print(f"\n第 {i} 轮对话失败,停止测试")
|
|
121
|
+
break
|
|
122
|
+
|
|
123
|
+
print(f"\n{'-'*80}")
|
|
124
|
+
|
|
125
|
+
print(f"\n{'='*80}")
|
|
126
|
+
print("多轮对话测试完成")
|
|
127
|
+
print(f"{'='*80}\n")
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def main():
|
|
131
|
+
init_flow()
|
|
132
|
+
init_workflows()
|
|
133
|
+
|
|
134
|
+
# 添加调试信息:验证工作流是否已加载
|
|
135
|
+
flow = Flow()
|
|
136
|
+
print(f"已加载的工作流: {flow._workflow_manager._workflows.keys() if hasattr(flow, '_workflow_manager') else 'Unknown'}")
|
|
137
|
+
|
|
138
|
+
run_workflow()
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
if __name__ == "__main__":
|
|
142
|
+
main()
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Generator, Dict, List, Self, Optional
|
|
1
|
+
from typing import Generator, Dict, List, Self, Optional, AsyncGenerator
|
|
2
2
|
|
|
3
3
|
from alayaflow.utils.singleton import SingletonMeta
|
|
4
4
|
from alayaflow.workflow import WorkflowManager
|
|
@@ -6,6 +6,10 @@ from alayaflow.execution import ExecutorManager, ExecutorType
|
|
|
6
6
|
from alayaflow.common.config import settings
|
|
7
7
|
from alayaflow.component.model import ModelManager, ModelProfile
|
|
8
8
|
from alayaflow.workflow.runnable import BaseRunnableWorkflow
|
|
9
|
+
from alayaflow.utils.logger import AlayaFlowLogger
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
logger = AlayaFlowLogger()
|
|
9
13
|
|
|
10
14
|
|
|
11
15
|
class APISingleton(metaclass=SingletonMeta):
|
|
@@ -15,7 +19,7 @@ class APISingleton(metaclass=SingletonMeta):
|
|
|
15
19
|
workflow_manager=self.workflow_manager
|
|
16
20
|
)
|
|
17
21
|
self.model_manager = ModelManager()
|
|
18
|
-
self._inited = False
|
|
22
|
+
self._inited = False
|
|
19
23
|
|
|
20
24
|
def is_inited(self) -> bool:
|
|
21
25
|
return self._inited
|
|
@@ -34,6 +38,8 @@ class APISingleton(metaclass=SingletonMeta):
|
|
|
34
38
|
settings.langfuse_public_key = config.get("langfuse_public_key", settings.langfuse_public_key)
|
|
35
39
|
settings.langfuse_secret_key = config.get("langfuse_secret_key", settings.langfuse_secret_key)
|
|
36
40
|
settings.langfuse_url = config.get("langfuse_url", settings.langfuse_url)
|
|
41
|
+
|
|
42
|
+
logger.info(f"AlayaFlow is initialized with config: {settings.model_dump()}")
|
|
37
43
|
|
|
38
44
|
self._inited = True
|
|
39
45
|
return self
|
|
@@ -97,3 +103,22 @@ class APISingleton(metaclass=SingletonMeta):
|
|
|
97
103
|
executor_type=executor_type
|
|
98
104
|
):
|
|
99
105
|
yield event
|
|
106
|
+
|
|
107
|
+
async def exec_workflow_async(
|
|
108
|
+
self,
|
|
109
|
+
workflow_id: str,
|
|
110
|
+
version: str,
|
|
111
|
+
inputs: dict,
|
|
112
|
+
context: dict,
|
|
113
|
+
executor_type: str | ExecutorType = ExecutorType.NAIVE
|
|
114
|
+
) -> AsyncGenerator[dict, None]:
|
|
115
|
+
"""异步执行工作流"""
|
|
116
|
+
self._check_init()
|
|
117
|
+
async for event in self.executor_manager.exec_workflow_async(
|
|
118
|
+
workflow_id=workflow_id,
|
|
119
|
+
version=version,
|
|
120
|
+
inputs=inputs,
|
|
121
|
+
context=context,
|
|
122
|
+
executor_type=executor_type
|
|
123
|
+
):
|
|
124
|
+
yield event
|