alayaflow 0.1.0__py3-none-any.whl → 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alayaflow/__init__.py +1 -1
- alayaflow/api/__init__.py +3 -0
- alayaflow/api/api_singleton.py +29 -11
- alayaflow/component/chat_model.py +2 -3
- alayaflow/component/llm_node.py +6 -15
- alayaflow/component/model/__init__.py +8 -0
- alayaflow/component/model/model_manager.py +60 -0
- alayaflow/component/model/schemas.py +33 -0
- alayaflow/component/retrieve_node.py +1 -7
- alayaflow/component/search_node.py +147 -0
- alayaflow/component/web_search.py +12 -12
- alayaflow/execution/env_manager.py +1 -0
- alayaflow/execution/executor_manager.py +5 -5
- alayaflow/execution/executors/base_executor.py +1 -1
- alayaflow/execution/executors/naive_executor.py +10 -12
- alayaflow/execution/executors/uv_executor.py +2 -2
- alayaflow/execution/executors/worker_executor.py +1 -1
- alayaflow/workflow/__init__.py +5 -1
- alayaflow/workflow/runnable/base_runnable_workflow.py +7 -5
- alayaflow/workflow/runnable/state_graph_runnable_workflow.py +17 -11
- alayaflow/workflow/workflow_loader.py +11 -7
- alayaflow/workflow/workflow_manager.py +18 -6
- {alayaflow-0.1.0.dist-info → alayaflow-0.1.2.dist-info}/METADATA +1 -1
- alayaflow-0.1.2.dist-info/RECORD +41 -0
- alayaflow-0.1.0.dist-info/RECORD +0 -37
- {alayaflow-0.1.0.dist-info → alayaflow-0.1.2.dist-info}/WHEEL +0 -0
- {alayaflow-0.1.0.dist-info → alayaflow-0.1.2.dist-info}/licenses/LICENSE +0 -0
alayaflow/__init__.py
CHANGED
alayaflow/api/__init__.py
CHANGED
alayaflow/api/api_singleton.py
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
|
-
from typing import Generator, Dict, List, Self
|
|
1
|
+
from typing import Generator, Dict, List, Self, Optional
|
|
2
2
|
|
|
3
3
|
from alayaflow.utils.singleton import SingletonMeta
|
|
4
4
|
from alayaflow.workflow import WorkflowManager
|
|
5
5
|
from alayaflow.execution import ExecutorManager, ExecutorType
|
|
6
6
|
from alayaflow.common.config import settings
|
|
7
|
+
from alayaflow.component.model import ModelManager, ModelProfile
|
|
8
|
+
from alayaflow.workflow.runnable import BaseRunnableWorkflow
|
|
7
9
|
|
|
8
10
|
|
|
9
11
|
class APISingleton(metaclass=SingletonMeta):
|
|
@@ -12,6 +14,7 @@ class APISingleton(metaclass=SingletonMeta):
|
|
|
12
14
|
self.executor_manager = ExecutorManager(
|
|
13
15
|
workflow_manager=self.workflow_manager
|
|
14
16
|
)
|
|
17
|
+
self.model_manager = ModelManager()
|
|
15
18
|
self._inited = False
|
|
16
19
|
|
|
17
20
|
def is_inited(self) -> bool:
|
|
@@ -21,17 +24,26 @@ class APISingleton(metaclass=SingletonMeta):
|
|
|
21
24
|
if not self._inited:
|
|
22
25
|
raise ValueError("Flow APISingleton 未初始化,请先调用 init 方法")
|
|
23
26
|
|
|
24
|
-
def init(self, config: dict =
|
|
27
|
+
def init(self, config: dict = None) -> Self:
|
|
25
28
|
"""初始化 Flow APISingleton"""
|
|
29
|
+
if config is None:
|
|
30
|
+
config = {}
|
|
26
31
|
# Overwrite all for now
|
|
27
32
|
settings.alayahub_url = config.get("alayahub_url", settings.alayahub_url)
|
|
28
33
|
settings.langfuse_enabled = config.get("langfuse_enabled", settings.langfuse_enabled)
|
|
29
34
|
settings.langfuse_public_key = config.get("langfuse_public_key", settings.langfuse_public_key)
|
|
30
35
|
settings.langfuse_secret_key = config.get("langfuse_secret_key", settings.langfuse_secret_key)
|
|
31
36
|
settings.langfuse_url = config.get("langfuse_url", settings.langfuse_url)
|
|
37
|
+
|
|
32
38
|
self._inited = True
|
|
33
39
|
return self
|
|
34
40
|
|
|
41
|
+
def register_models(self, model_profiles: List[dict | ModelProfile]) -> None:
|
|
42
|
+
self._check_init()
|
|
43
|
+
profiles = [ModelProfile(**profile) if isinstance(profile, dict) else profile for profile in model_profiles]
|
|
44
|
+
for profile in profiles:
|
|
45
|
+
self.model_manager.register_profile(profile)
|
|
46
|
+
|
|
35
47
|
def list_loaded_workflow_ids(self) -> List[str]:
|
|
36
48
|
"""列举已加载的工作流"""
|
|
37
49
|
self._check_init()
|
|
@@ -57,25 +69,31 @@ class APISingleton(metaclass=SingletonMeta):
|
|
|
57
69
|
self._check_init()
|
|
58
70
|
return self.workflow_manager.uninstall_workflow(workflow_id)
|
|
59
71
|
|
|
60
|
-
def load_workflow(self, workflow_id: str, version: str, init_args:
|
|
61
|
-
"""
|
|
72
|
+
def load_workflow(self, workflow_id: str, version: str, init_args: Optional[Dict] = None, storage_path: str = settings.workflow_storage_path) -> None:
|
|
73
|
+
"""加载本地工作流"""
|
|
62
74
|
self._check_init()
|
|
63
|
-
return self.workflow_manager.load_workflow(workflow_id, version, init_args)
|
|
75
|
+
return self.workflow_manager.load_workflow(workflow_id, version, init_args or {}, storage_path)
|
|
76
|
+
|
|
77
|
+
def register_workflow(self, runnable: BaseRunnableWorkflow, requirements: List[str] = None) -> None:
|
|
78
|
+
"""加载Python项目中的工作流"""
|
|
79
|
+
self._check_init()
|
|
80
|
+
self.workflow_manager.register_workflow(runnable, requirements or [])
|
|
64
81
|
|
|
65
82
|
def exec_workflow(
|
|
66
83
|
self,
|
|
67
84
|
workflow_id: str,
|
|
68
85
|
version: str,
|
|
69
|
-
|
|
70
|
-
|
|
86
|
+
inputs: dict,
|
|
87
|
+
context: dict,
|
|
71
88
|
executor_type: str | ExecutorType = ExecutorType.NAIVE
|
|
72
89
|
) -> Generator[dict, None, None]:
|
|
73
90
|
"""执行工作流"""
|
|
74
91
|
self._check_init()
|
|
75
|
-
|
|
92
|
+
for event in self.executor_manager.exec_workflow(
|
|
76
93
|
workflow_id=workflow_id,
|
|
77
94
|
version=version,
|
|
78
|
-
|
|
79
|
-
|
|
95
|
+
inputs=inputs,
|
|
96
|
+
context=context,
|
|
80
97
|
executor_type=executor_type
|
|
81
|
-
)
|
|
98
|
+
):
|
|
99
|
+
yield event
|
|
@@ -1,13 +1,12 @@
|
|
|
1
|
-
import
|
|
1
|
+
import os
|
|
2
2
|
|
|
3
3
|
from langchain_openai import ChatOpenAI
|
|
4
4
|
|
|
5
|
-
from alayaflow.common.config import settings
|
|
6
5
|
|
|
7
6
|
def mk_chat_model_deepseek():
|
|
8
7
|
return ChatOpenAI(
|
|
9
8
|
model="deepseek-chat",
|
|
10
|
-
api_key="
|
|
9
|
+
api_key=os.getenv("DEEPSEEK_API_KEY"),
|
|
11
10
|
base_url="https://api.deepseek.com/v1",
|
|
12
11
|
)
|
|
13
12
|
|
alayaflow/component/llm_node.py
CHANGED
|
@@ -2,13 +2,13 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import json
|
|
4
4
|
from enum import IntEnum
|
|
5
|
-
from functools import cached_property
|
|
6
5
|
from typing import Any, Dict, Optional
|
|
7
6
|
|
|
8
|
-
from langchain_openai import ChatOpenAI
|
|
9
7
|
from langchain_core.messages import SystemMessage, HumanMessage, AIMessage, BaseMessage
|
|
10
8
|
from langchain_core.runnables import Runnable
|
|
11
9
|
|
|
10
|
+
from alayaflow.component.model import ModelManager
|
|
11
|
+
|
|
12
12
|
|
|
13
13
|
class ResponseFormat(IntEnum):
|
|
14
14
|
TEXT = 0
|
|
@@ -19,7 +19,7 @@ class LLMComponent:
|
|
|
19
19
|
self,
|
|
20
20
|
*,
|
|
21
21
|
# ===== 模型 & prompt =====
|
|
22
|
-
|
|
22
|
+
model_id: str,
|
|
23
23
|
system_prompt: str,
|
|
24
24
|
prompt: str,
|
|
25
25
|
|
|
@@ -35,7 +35,7 @@ class LLMComponent:
|
|
|
35
35
|
retry_json_once: bool = True,
|
|
36
36
|
):
|
|
37
37
|
# —— 配置即成员变量(= Spec)——
|
|
38
|
-
self.
|
|
38
|
+
self.model_id = model_id
|
|
39
39
|
self.system_prompt = system_prompt
|
|
40
40
|
self.prompt = prompt
|
|
41
41
|
|
|
@@ -47,14 +47,8 @@ class LLMComponent:
|
|
|
47
47
|
self.json_schema = json_schema
|
|
48
48
|
self.outputs = outputs or {}
|
|
49
49
|
self.retry_json_once = retry_json_once
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
@cached_property
|
|
53
|
-
def llm(self) -> Runnable:
|
|
54
|
-
return ChatOpenAI(model=self.model_name, api_key="sk-4fe7cd96f5e948c79168025372e2327c", base_url="https://api.deepseek.com/v1")
|
|
55
50
|
|
|
56
51
|
def _get_llm(self) -> Runnable:
|
|
57
|
-
llm = self.llm
|
|
58
52
|
bind_kwargs: Dict[str, Any] = {}
|
|
59
53
|
|
|
60
54
|
if self.temperature is not None:
|
|
@@ -64,11 +58,8 @@ class LLMComponent:
|
|
|
64
58
|
if self.max_tokens is not None:
|
|
65
59
|
bind_kwargs["max_tokens"] = self.max_tokens
|
|
66
60
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
llm = llm.bind(**bind_kwargs)
|
|
70
|
-
except Exception:
|
|
71
|
-
pass
|
|
61
|
+
model_manager = ModelManager()
|
|
62
|
+
llm = model_manager.get_model(self.model_id, runtime_config=bind_kwargs)
|
|
72
63
|
|
|
73
64
|
return llm
|
|
74
65
|
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
from typing import Dict, Optional, Any, Union
|
|
2
|
+
from langchain_core.language_models import BaseChatModel
|
|
3
|
+
from langchain_openai import ChatOpenAI
|
|
4
|
+
|
|
5
|
+
from alayaflow.utils.singleton import SingletonMeta
|
|
6
|
+
from alayaflow.component.model.schemas import ModelProfile, GenerationConfig
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ModelManager(metaclass=SingletonMeta):
|
|
10
|
+
def __init__(self):
|
|
11
|
+
self._profiles: Dict[str, ModelProfile] = {}
|
|
12
|
+
|
|
13
|
+
def register_profile(self, profile: ModelProfile, override: bool = False) -> None:
|
|
14
|
+
if profile.model_id in self._profiles and not override:
|
|
15
|
+
raise ValueError(f"Profile with model ID {profile.model_id} already exists.")
|
|
16
|
+
self._profiles[profile.model_id] = profile
|
|
17
|
+
|
|
18
|
+
def get_model(
|
|
19
|
+
self,
|
|
20
|
+
model_id: str,
|
|
21
|
+
runtime_config: Optional[Union[GenerationConfig, Dict[str, Any]]] = None
|
|
22
|
+
) -> BaseChatModel:
|
|
23
|
+
if model_id not in self._profiles:
|
|
24
|
+
raise ValueError(f"Model ID '{model_id}' not found. Please register it first.")
|
|
25
|
+
|
|
26
|
+
profile = self._profiles[model_id]
|
|
27
|
+
|
|
28
|
+
# Merge config
|
|
29
|
+
|
|
30
|
+
final_params = profile.default_config.model_dump(exclude={"extra_kwargs"})
|
|
31
|
+
extra_kwargs = profile.default_config.extra_kwargs.copy()
|
|
32
|
+
|
|
33
|
+
if runtime_config:
|
|
34
|
+
if isinstance(runtime_config, GenerationConfig):
|
|
35
|
+
override_dict = runtime_config.model_dump(exclude_unset=True, exclude={"extra_kwargs"})
|
|
36
|
+
final_params.update(override_dict)
|
|
37
|
+
extra_kwargs.update(runtime_config.extra_kwargs)
|
|
38
|
+
elif isinstance(runtime_config, dict):
|
|
39
|
+
final_params.update(runtime_config)
|
|
40
|
+
|
|
41
|
+
# Instantiate model
|
|
42
|
+
|
|
43
|
+
if profile.model_type == "OpenAI":
|
|
44
|
+
return ChatOpenAI(
|
|
45
|
+
model=profile.model_name,
|
|
46
|
+
openai_api_key=profile.api_key.get_secret_value(),
|
|
47
|
+
openai_api_base=profile.base_url,
|
|
48
|
+
# Generation parameters
|
|
49
|
+
temperature=final_params.get("temperature"),
|
|
50
|
+
max_tokens=final_params.get("max_tokens"),
|
|
51
|
+
top_p=final_params.get("top_p"),
|
|
52
|
+
frequency_penalty=final_params.get("frequency_penalty"),
|
|
53
|
+
presence_penalty=final_params.get("presence_penalty"),
|
|
54
|
+
model_kwargs=extra_kwargs
|
|
55
|
+
)
|
|
56
|
+
else:
|
|
57
|
+
raise ValueError(f"Unsupported model type: {profile.model_type}")
|
|
58
|
+
|
|
59
|
+
def get_profile(self, model_id: str) -> Optional[ModelProfile]:
|
|
60
|
+
return self._profiles.get(model_id)
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
from pydantic import BaseModel, Field, ConfigDict, SecretStr
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class GenerationConfig(BaseModel):
|
|
6
|
+
model_config = ConfigDict(populate_by_name=True)
|
|
7
|
+
|
|
8
|
+
temperature: float = Field(default=0.7, ge=0.0, le=2.0, description="采样温度")
|
|
9
|
+
max_tokens: Optional[int] = Field(default=None, description="最大生成 Token 数")
|
|
10
|
+
top_p: float = Field(default=1.0, description="核采样阈值")
|
|
11
|
+
frequency_penalty: float = Field(default=0.0, ge=-2.0, le=2.0)
|
|
12
|
+
presence_penalty: float = Field(default=0.0, ge=-2.0, le=2.0)
|
|
13
|
+
|
|
14
|
+
extra_kwargs: dict = Field(default_factory=dict, description="其他参数")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class ModelProfile(BaseModel):
|
|
18
|
+
model_config = ConfigDict(use_enum_values=True)
|
|
19
|
+
|
|
20
|
+
# Local used fields
|
|
21
|
+
name: str = Field(..., description="模型名称")
|
|
22
|
+
model_id: str = Field(..., description="系统内部使用的唯一 ID")
|
|
23
|
+
model_type: str = Field(default="OpenAI", description="模型类型工厂标识")
|
|
24
|
+
provider_name: str = Field(default="Unknown", description="提供商名称")
|
|
25
|
+
|
|
26
|
+
# Connection Credentials
|
|
27
|
+
model_name: str = Field(..., description="厂商的模型名称")
|
|
28
|
+
base_url: str = Field(..., description="API Base URL")
|
|
29
|
+
api_key: SecretStr = Field(default=SecretStr(""), description="API Key")
|
|
30
|
+
|
|
31
|
+
# Default Generation Config
|
|
32
|
+
default_config: GenerationConfig = Field(default_factory=GenerationConfig)
|
|
33
|
+
|
|
@@ -8,10 +8,4 @@ class RetrieveComponent:
|
|
|
8
8
|
|
|
9
9
|
def __call__(self, query: str, collection_name: str, limit: int = 3) -> list[str]:
|
|
10
10
|
result = self.client.vdb_query([query], limit, collection_name)
|
|
11
|
-
return result.get('documents', [[]])[0] if result.get('documents') else []
|
|
12
|
-
|
|
13
|
-
if __name__ == "__main__":
|
|
14
|
-
client = HttpAlayaMemClient("http://10.16.70.46:5555")
|
|
15
|
-
res = client.vdb_query(messages="姓名", limit=5, collection_name="file_watcher_collection")
|
|
16
|
-
|
|
17
|
-
print(res)
|
|
11
|
+
return result.get('documents', [[]])[0] if result.get('documents') else []
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import requests
|
|
4
|
+
import urllib.parse
|
|
5
|
+
from typing import List, Dict, Any, Optional, Literal
|
|
6
|
+
|
|
7
|
+
class WebSearchJinaComponent:
|
|
8
|
+
def __init__(
|
|
9
|
+
self,
|
|
10
|
+
*,
|
|
11
|
+
api_key: str,
|
|
12
|
+
|
|
13
|
+
query: str,
|
|
14
|
+
|
|
15
|
+
top_k: int = 5,
|
|
16
|
+
site_limit: Optional[str] = None,
|
|
17
|
+
timeout: int = 60,
|
|
18
|
+
|
|
19
|
+
no_cache: bool = False,
|
|
20
|
+
|
|
21
|
+
response_format: Literal['markdown', 'html', 'text'] = 'markdown',
|
|
22
|
+
|
|
23
|
+
retain_images: Literal['none', 'all', 'occluded'] = 'none',
|
|
24
|
+
|
|
25
|
+
with_generated_alt: bool = True,
|
|
26
|
+
|
|
27
|
+
with_links_summary: bool = False,
|
|
28
|
+
):
|
|
29
|
+
self.api_key = api_key
|
|
30
|
+
self.query = query
|
|
31
|
+
self.top_k = top_k
|
|
32
|
+
self.site_limit = site_limit
|
|
33
|
+
self.timeout = timeout
|
|
34
|
+
|
|
35
|
+
self.no_cache = no_cache
|
|
36
|
+
self.response_format = response_format
|
|
37
|
+
self.retain_images = retain_images
|
|
38
|
+
self.with_generated_alt = with_generated_alt
|
|
39
|
+
self.with_links_summary = with_links_summary
|
|
40
|
+
|
|
41
|
+
self.base_url = "https://s.jina.ai/"
|
|
42
|
+
|
|
43
|
+
def _construct_query(self) -> str:
|
|
44
|
+
final_query = self.query
|
|
45
|
+
if self.site_limit:
|
|
46
|
+
final_query = f"{final_query} site:{self.site_limit}"
|
|
47
|
+
return final_query
|
|
48
|
+
|
|
49
|
+
def _get_headers(self) -> Dict[str, str]:
|
|
50
|
+
headers = {
|
|
51
|
+
"Authorization": f"Bearer {self.api_key}",
|
|
52
|
+
"Accept": "application/json",
|
|
53
|
+
|
|
54
|
+
"X-Retain-Images": self.retain_images,
|
|
55
|
+
"X-Return-Format": self.response_format,
|
|
56
|
+
"X-With-Generated-Alt": "true" if self.with_generated_alt else "false",
|
|
57
|
+
"X-With-Links-Summary": "true" if self.with_links_summary else "false",
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
if self.no_cache:
|
|
61
|
+
headers["X-No-Cache"] = "true"
|
|
62
|
+
|
|
63
|
+
return headers
|
|
64
|
+
|
|
65
|
+
def __call__(self) -> List[Dict[str, Any]]:
|
|
66
|
+
query_str = self._construct_query()
|
|
67
|
+
encoded_query = urllib.parse.quote(query_str)
|
|
68
|
+
url = f"{self.base_url}{encoded_query}"
|
|
69
|
+
|
|
70
|
+
headers = self._get_headers()
|
|
71
|
+
|
|
72
|
+
try:
|
|
73
|
+
response = requests.get(url, headers=headers, timeout=self.timeout)
|
|
74
|
+
|
|
75
|
+
if response.status_code != 200:
|
|
76
|
+
print(f"Error: API returned status {response.status_code}")
|
|
77
|
+
print(f"Message: {response.text}")
|
|
78
|
+
return []
|
|
79
|
+
json_resp = response.json()
|
|
80
|
+
|
|
81
|
+
items = []
|
|
82
|
+
if isinstance(json_resp, dict):
|
|
83
|
+
if "data" in json_resp and isinstance(json_resp["data"], list):
|
|
84
|
+
items = json_resp["data"]
|
|
85
|
+
elif "results" in json_resp:
|
|
86
|
+
items = json_resp["results"]
|
|
87
|
+
elif isinstance(json_resp, list):
|
|
88
|
+
items = json_resp
|
|
89
|
+
if not items:
|
|
90
|
+
return []
|
|
91
|
+
results = items[:self.top_k]
|
|
92
|
+
clean_results = []
|
|
93
|
+
for item in results:
|
|
94
|
+
entry = {
|
|
95
|
+
"title": item.get("title", "No Title"),
|
|
96
|
+
"url": item.get("url", ""),
|
|
97
|
+
"description": item.get("description", ""),
|
|
98
|
+
"content": item.get("content", ""),
|
|
99
|
+
}
|
|
100
|
+
if self.with_links_summary and "links" in item:
|
|
101
|
+
entry["links"] = item["links"]
|
|
102
|
+
|
|
103
|
+
clean_results.append(entry)
|
|
104
|
+
|
|
105
|
+
return clean_results
|
|
106
|
+
|
|
107
|
+
except requests.exceptions.Timeout:
|
|
108
|
+
print("Error: Search request timed out.")
|
|
109
|
+
return []
|
|
110
|
+
except requests.exceptions.RequestException as e:
|
|
111
|
+
print(f"Error calling Jina API: {e}")
|
|
112
|
+
return []
|
|
113
|
+
except Exception as e:
|
|
114
|
+
print(f"Unexpected error: {e}")
|
|
115
|
+
return []
|
|
116
|
+
|
|
117
|
+
def to_string_context(self) -> str:
|
|
118
|
+
results = self.__call__()
|
|
119
|
+
if not results:
|
|
120
|
+
return "No search results found."
|
|
121
|
+
|
|
122
|
+
parts = []
|
|
123
|
+
for i, item in enumerate(results, 1):
|
|
124
|
+
# limit content length to avoid overly long context
|
|
125
|
+
content_preview = item['content'][:2000] if item['content'] else item['description']
|
|
126
|
+
|
|
127
|
+
parts.append(
|
|
128
|
+
f"### Result {i}\n"
|
|
129
|
+
f"**Title**: {item['title']}\n"
|
|
130
|
+
f"**Source**: {item['url']}\n"
|
|
131
|
+
f"**Content**:\n{content_preview}\n"
|
|
132
|
+
)
|
|
133
|
+
return "\n\n".join(parts)
|
|
134
|
+
|
|
135
|
+
if __name__ == "__main__":
|
|
136
|
+
import os
|
|
137
|
+
JINA_API_KEY = os.getenv("JINA_API_KEY")
|
|
138
|
+
search_query = "DeepSeek-V3 的技术架构特点"
|
|
139
|
+
search_node = WebSearchJinaComponent(
|
|
140
|
+
api_key=JINA_API_KEY,
|
|
141
|
+
query=search_query,
|
|
142
|
+
top_k=3
|
|
143
|
+
)
|
|
144
|
+
print(f"正在搜索: {search_query} ...")
|
|
145
|
+
search_context = search_node.to_string_context()
|
|
146
|
+
print("搜索完成,部分上下文预览:")
|
|
147
|
+
print(search_context + "...\n")
|
|
@@ -111,16 +111,16 @@ def get_config_schema() -> Type[TypedDict]:
|
|
|
111
111
|
return WFConfig
|
|
112
112
|
|
|
113
113
|
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
print(f"result: {result}")
|
|
114
|
+
if __name__ == "__main__":
|
|
115
|
+
# Example usage
|
|
116
|
+
graph = create_graph(search_api_key="your-api-key-here")
|
|
117
|
+
result = graph.invoke({
|
|
118
|
+
"query": "search query",
|
|
119
|
+
}, config={
|
|
120
|
+
"configurable": WFConfig(
|
|
121
|
+
search_api_key="your-api-key-here",
|
|
122
|
+
search_url="https://your-search-api-url",
|
|
123
|
+
)
|
|
124
|
+
})
|
|
125
|
+
print(f"result: {result}")
|
|
126
126
|
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import
|
|
1
|
+
from typing import Dict, Generator, Any
|
|
2
2
|
from enum import Enum
|
|
3
3
|
|
|
4
4
|
from alayaflow.execution.executors.base_executor import BaseExecutor
|
|
@@ -43,10 +43,10 @@ class ExecutorManager:
|
|
|
43
43
|
self,
|
|
44
44
|
workflow_id: str,
|
|
45
45
|
version: str,
|
|
46
|
-
|
|
47
|
-
|
|
46
|
+
inputs: dict,
|
|
47
|
+
context: dict,
|
|
48
48
|
executor_type: ExecutorType | str = ExecutorType.NAIVE
|
|
49
|
-
) -> Generator[Dict, None, None]:
|
|
49
|
+
) -> Generator[Dict[str, Any], None, None]:
|
|
50
50
|
if isinstance(executor_type, str):
|
|
51
51
|
executor_type = ExecutorType(executor_type)
|
|
52
52
|
if executor_type not in self._executor_map:
|
|
@@ -55,5 +55,5 @@ class ExecutorManager:
|
|
|
55
55
|
f"Supported kinds: {list(self._executor_map.keys())}"
|
|
56
56
|
)
|
|
57
57
|
executor = self._executor_map[executor_type]
|
|
58
|
-
yield from executor.execute_stream(workflow_id, version,
|
|
58
|
+
yield from executor.execute_stream(workflow_id, version, inputs, context)
|
|
59
59
|
|
|
@@ -4,6 +4,6 @@ from typing import Generator, Dict
|
|
|
4
4
|
|
|
5
5
|
class BaseExecutor(ABC):
|
|
6
6
|
@abstractmethod
|
|
7
|
-
def execute_stream(self, workflow_id: str, version: str,
|
|
7
|
+
def execute_stream(self, workflow_id: str, version: str, inputs: dict, context: dict) -> Generator[Dict, None, None]:
|
|
8
8
|
pass
|
|
9
9
|
|
|
@@ -36,8 +36,8 @@ class NaiveExecutor(BaseExecutor):
|
|
|
36
36
|
self,
|
|
37
37
|
workflow_id: str,
|
|
38
38
|
version: str,
|
|
39
|
-
|
|
40
|
-
|
|
39
|
+
inputs: dict,
|
|
40
|
+
context: dict
|
|
41
41
|
) -> Generator[Dict, None, None]:
|
|
42
42
|
|
|
43
43
|
# 1) resolve workflow
|
|
@@ -47,7 +47,7 @@ class NaiveExecutor(BaseExecutor):
|
|
|
47
47
|
yield {"error": str(e), "workflow_id": workflow_id, "version": version}
|
|
48
48
|
return
|
|
49
49
|
|
|
50
|
-
print(f"NaiveExecutor execute_stream: {workflow_id} {version} {
|
|
50
|
+
print(f"NaiveExecutor execute_stream: {workflow_id} {version} {inputs} {context}")
|
|
51
51
|
|
|
52
52
|
# TODO: Support langflow workflow
|
|
53
53
|
# Only support StateGraphRunnableWorkflow now.
|
|
@@ -60,7 +60,7 @@ class NaiveExecutor(BaseExecutor):
|
|
|
60
60
|
|
|
61
61
|
def run_async_producer():
|
|
62
62
|
try:
|
|
63
|
-
asyncio.run(self._produce_events_to_queue(runnable,
|
|
63
|
+
asyncio.run(self._produce_events_to_queue(runnable, inputs, context, event_queue))
|
|
64
64
|
except Exception as e:
|
|
65
65
|
event_queue.put({"error": str(e), "traceback": traceback.format_exc(), "workflow_id": workflow_id, "version": version})
|
|
66
66
|
finally:
|
|
@@ -95,20 +95,18 @@ class NaiveExecutor(BaseExecutor):
|
|
|
95
95
|
yield self._serialize_event(item)
|
|
96
96
|
|
|
97
97
|
|
|
98
|
-
async def _produce_events_to_queue(self, runnable: BaseRunnableWorkflow,
|
|
98
|
+
async def _produce_events_to_queue(self, runnable: BaseRunnableWorkflow, inputs: dict, context: dict, event_queue: queue.Queue):
|
|
99
99
|
try:
|
|
100
100
|
# Setup tracing
|
|
101
101
|
tracing = get_tracing(settings)
|
|
102
102
|
langfuse_cb = tracing.build_callback()
|
|
103
|
-
|
|
104
|
-
# Merge user_config and tracing config
|
|
105
|
-
merged_config = {
|
|
106
|
-
"configurable": user_config
|
|
107
|
-
}
|
|
103
|
+
|
|
108
104
|
if langfuse_cb:
|
|
109
|
-
|
|
105
|
+
config = tracing.build_config(inputs, runnable.info, langfuse_cb)
|
|
106
|
+
else:
|
|
107
|
+
config = {}
|
|
110
108
|
|
|
111
|
-
async for chunk in runnable.
|
|
109
|
+
async for chunk in runnable.stream_events_async(inputs, context, config):
|
|
112
110
|
event_queue.put(chunk) # Put each event immediately (real-time)
|
|
113
111
|
except Exception as e:
|
|
114
112
|
# If execution fails, put error event in queue
|
|
@@ -28,8 +28,8 @@ class UvExecutor(BaseExecutor):
|
|
|
28
28
|
self,
|
|
29
29
|
workflow_id: str,
|
|
30
30
|
version: str,
|
|
31
|
-
|
|
32
|
-
|
|
31
|
+
inputs: dict,
|
|
32
|
+
context: dict,
|
|
33
33
|
) -> Generator[Dict, None, None]:
|
|
34
34
|
raise NotImplementedError("uv executor not supported yet")
|
|
35
35
|
|
|
@@ -7,6 +7,6 @@ class WorkerExecutor(BaseExecutor):
|
|
|
7
7
|
def __init__(self, workflow_manager: WorkflowManager):
|
|
8
8
|
self.workflow_manager = workflow_manager
|
|
9
9
|
|
|
10
|
-
def execute_stream(self, workflow_id: str, version: str,
|
|
10
|
+
def execute_stream(self, workflow_id: str, version: str, inputs: dict, context: dict) -> Generator[Dict, None, None]:
|
|
11
11
|
raise NotImplementedError("worker executor not supported yet")
|
|
12
12
|
|
alayaflow/workflow/__init__.py
CHANGED
|
@@ -2,5 +2,9 @@ from alayaflow.workflow.workflow_info import WorkflowInfo
|
|
|
2
2
|
from alayaflow.workflow.workflow_loader import WorkflowLoader
|
|
3
3
|
from alayaflow.workflow.workflow_manager import WorkflowManager
|
|
4
4
|
|
|
5
|
-
__all__ = [
|
|
5
|
+
__all__ = [
|
|
6
|
+
"WorkflowInfo",
|
|
7
|
+
"WorkflowLoader",
|
|
8
|
+
"WorkflowManager",
|
|
9
|
+
]
|
|
6
10
|
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
from abc import ABC
|
|
2
|
-
from typing import Dict,
|
|
2
|
+
from typing import Dict, Any, AsyncGenerator
|
|
3
|
+
|
|
4
|
+
from langgraph.graph.state import RunnableConfig
|
|
3
5
|
|
|
4
6
|
from alayaflow.workflow.workflow_info import WorkflowInfo
|
|
5
7
|
|
|
@@ -11,9 +13,9 @@ class BaseRunnableWorkflow(ABC):
|
|
|
11
13
|
def info(self) -> WorkflowInfo:
|
|
12
14
|
return self._info
|
|
13
15
|
|
|
14
|
-
def invoke(self,
|
|
15
|
-
raise NotImplementedError("invoke method must be implemented in derived classes")
|
|
16
|
-
|
|
17
|
-
def astream_events(self, input_data: dict, user_config: dict) -> Generator[Dict, None, None]:
|
|
16
|
+
def invoke(self, inputs: dict, context: dict, config: RunnableConfig) -> dict:
|
|
18
17
|
raise NotImplementedError("invoke method must be implemented in derived classes")
|
|
19
18
|
|
|
19
|
+
async def stream_events_async(self, inputs: dict, context: dict, config: RunnableConfig) -> AsyncGenerator[Dict[str, Any], None]:
|
|
20
|
+
raise NotImplementedError("stream_events_async method must be implemented in derived classes")
|
|
21
|
+
|
|
@@ -1,7 +1,6 @@
|
|
|
1
|
-
from
|
|
2
|
-
from typing import Dict, Generator
|
|
1
|
+
from typing import Dict, Any, AsyncGenerator
|
|
3
2
|
|
|
4
|
-
from langgraph.graph.state import CompiledStateGraph
|
|
3
|
+
from langgraph.graph.state import CompiledStateGraph, RunnableConfig
|
|
5
4
|
|
|
6
5
|
from alayaflow.workflow.runnable.base_runnable_workflow import BaseRunnableWorkflow
|
|
7
6
|
from alayaflow.workflow.workflow_info import WorkflowInfo
|
|
@@ -11,13 +10,20 @@ class StateGraphRunnableWorkflow(BaseRunnableWorkflow):
|
|
|
11
10
|
super().__init__(info)
|
|
12
11
|
self._graph = graph
|
|
13
12
|
|
|
14
|
-
def invoke(self,
|
|
15
|
-
return self._graph.invoke(
|
|
16
|
-
"configurable": user_config
|
|
17
|
-
})
|
|
13
|
+
def invoke(self, inputs: dict, context: dict, config: RunnableConfig) -> dict:
|
|
14
|
+
return self._graph.invoke(inputs, config, context=context)
|
|
18
15
|
|
|
19
|
-
def
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
16
|
+
async def stream_events_async(
|
|
17
|
+
self,
|
|
18
|
+
inputs: dict,
|
|
19
|
+
context: dict,
|
|
20
|
+
config: RunnableConfig
|
|
21
|
+
) -> AsyncGenerator[Dict[str, Any], None]:
|
|
22
|
+
async for event in self._graph.astream_events(
|
|
23
|
+
inputs,
|
|
24
|
+
config,
|
|
25
|
+
version="v2",
|
|
26
|
+
context=context
|
|
27
|
+
):
|
|
28
|
+
yield event
|
|
23
29
|
|
|
@@ -105,8 +105,11 @@ class WorkflowLoader:
|
|
|
105
105
|
|
|
106
106
|
return get_input_schema_func, create_graph_func
|
|
107
107
|
|
|
108
|
-
def load_workflow(self, init_args: dict =
|
|
108
|
+
def load_workflow(self, init_args: dict = None, force_reload: bool = False) -> BaseRunnableWorkflow:
|
|
109
109
|
"""加载工作流,返回包含 info 和 creator 的加载结果"""
|
|
110
|
+
if init_args is None:
|
|
111
|
+
init_args = {}
|
|
112
|
+
|
|
110
113
|
info = self.load_info()
|
|
111
114
|
|
|
112
115
|
# now we only support StateGraph
|
|
@@ -116,12 +119,14 @@ class WorkflowLoader:
|
|
|
116
119
|
|
|
117
120
|
return runnable
|
|
118
121
|
|
|
119
|
-
|
|
122
|
+
@classmethod
|
|
123
|
+
def _sanitize_module_name(cls, wf_id: str, version: str) -> str:
|
|
120
124
|
safe_version = version.replace(".", "_")
|
|
121
125
|
safe_wf_id = re.sub(r'[^0-9a-zA-Z_]', '_', wf_id)
|
|
122
126
|
return f"workflow.{safe_wf_id}.{safe_version}"
|
|
123
|
-
|
|
124
|
-
|
|
127
|
+
|
|
128
|
+
@classmethod
|
|
129
|
+
def _load_module(cls, module_name: str, filepath: Path) -> Any:
|
|
125
130
|
"""加载 Python 模块,支持相对导入"""
|
|
126
131
|
try:
|
|
127
132
|
# 工作流目录(包含 workflow.py 的目录)
|
|
@@ -151,7 +156,8 @@ class WorkflowLoader:
|
|
|
151
156
|
del sys.modules[module_name]
|
|
152
157
|
raise ImportError(f"加载工作流模块失败: {e}") from e
|
|
153
158
|
|
|
154
|
-
|
|
159
|
+
@classmethod
|
|
160
|
+
def _resolve_entry_point(cls, module: Any, entry_point: str, workflow_id: str) -> Any:
|
|
155
161
|
parts = entry_point.split('.')
|
|
156
162
|
obj = module
|
|
157
163
|
|
|
@@ -164,5 +170,3 @@ class WorkflowLoader:
|
|
|
164
170
|
obj = getattr(obj, part)
|
|
165
171
|
|
|
166
172
|
return obj
|
|
167
|
-
|
|
168
|
-
|
|
@@ -4,7 +4,7 @@ import shutil
|
|
|
4
4
|
import tempfile
|
|
5
5
|
|
|
6
6
|
from pathlib import Path
|
|
7
|
-
from typing import
|
|
7
|
+
from typing import Dict, List, Optional
|
|
8
8
|
import requests
|
|
9
9
|
|
|
10
10
|
from alayaflow.workflow.workflow_info import WorkflowKey, WorkflowInfo
|
|
@@ -24,11 +24,23 @@ class WorkflowManager:
|
|
|
24
24
|
self._workflow_cache.clear()
|
|
25
25
|
self._requirements_cache.clear()
|
|
26
26
|
|
|
27
|
-
def load_workflow(self, workflow_id: str, version: str, init_args:
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
27
|
+
def load_workflow(self, workflow_id: str, version: str, init_args: Optional[Dict] = None, storage_path: str = settings.workflow_storage_path) -> None:
|
|
28
|
+
if init_args is None:
|
|
29
|
+
init_args = {}
|
|
30
|
+
loader = WorkflowLoader(storage_path, workflow_id, version)
|
|
31
|
+
runnable = loader.load_workflow(init_args)
|
|
32
|
+
requirements = loader.load_requirements()
|
|
33
|
+
self._register_workflow(runnable, requirements)
|
|
34
|
+
|
|
35
|
+
def register_workflow(self, runnable: BaseRunnableWorkflow, requirements: List[str] = None) -> None:
|
|
36
|
+
self._register_workflow( runnable, requirements or [])
|
|
37
|
+
|
|
38
|
+
def _register_workflow(self, runnable: BaseRunnableWorkflow, requirements: List[str]) -> None:
|
|
39
|
+
info = runnable.info
|
|
40
|
+
key = WorkflowKey(info.id, info.version)
|
|
41
|
+
self._info_cache[key] = info
|
|
42
|
+
self._workflow_cache[key] = runnable
|
|
43
|
+
self._requirements_cache[key] = requirements
|
|
32
44
|
|
|
33
45
|
# Keep for future reference
|
|
34
46
|
# def load_workflows(self) -> None:
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
alayaflow/__init__.py,sha256=_EoEy39ORwhkwio9UhypCa7wCJSLmxv6reHH1Aqt5Kw,121
|
|
2
|
+
alayaflow/api/__init__.py,sha256=y6nWgqC3jhOffTqixKlv3OU_NEAFxbzHSvwJrE5bHNs,187
|
|
3
|
+
alayaflow/api/api_singleton.py,sha256=SvWeLHPYHT1ABACwmoNj2dWrFjpqLCpSMz1-KW3hsZQ,11640
|
|
4
|
+
alayaflow/clients/alayamem/base_client.py,sha256=pyU2WF2jqNEgBEe8JOZSg13gHQ2pJcBgJ_6YP-5mWkw,540
|
|
5
|
+
alayaflow/clients/alayamem/http_client.py,sha256=n0hAh_ddzEwFfNMsdw5s2dqvuMsyZNGOT-RcHsIXuEw,2171
|
|
6
|
+
alayaflow/common/config.py,sha256=pi4zH_Pi0u6Fb8ZIs4u3qFOUOUeqxxUqqksoUp-hynM,3806
|
|
7
|
+
alayaflow/component/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
|
+
alayaflow/component/chat_model.py,sha256=GH2xcOhtvh3YqN2LMITmu1j-0_-t9d4hiMuRFctF2og,402
|
|
9
|
+
alayaflow/component/intent_classifier.py,sha256=5KH52LIqIDpw2hlX4gi3Ff7SFVhenCFdFV-fXag0sDM,3765
|
|
10
|
+
alayaflow/component/llm_node.py,sha256=rvj1f8gqKmdGZkM8Mo8z1PDafdpxhn52nT8L19ZxmaI,3510
|
|
11
|
+
alayaflow/component/memory.py,sha256=Xl5ABW89dswC9oMwkeS6NFAZniKFw8BuGuLAAHddRRA,1448
|
|
12
|
+
alayaflow/component/retrieve_node.py,sha256=mPzAiAXMCGWFYLnmU7kHuf9ejUhzpE_QSHJwc7LOlAU,451
|
|
13
|
+
alayaflow/component/search_node.py,sha256=JNFD6qXDd2_NPpXQf7z8xklJFZs7UrFhWet43nrs25Y,4950
|
|
14
|
+
alayaflow/component/web_search.py,sha256=HZp9j0X0YMBC_mhGqzi0g0pbvmlWiLiNdRxzbEFzl6s,3403
|
|
15
|
+
alayaflow/component/langflow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
|
+
alayaflow/component/langflow/intent_classifier.py,sha256=0xqq2wpVVnEErgjOenKYHMsXMkCrHOlCpEZRJqw3PoM,2822
|
|
17
|
+
alayaflow/component/model/__init__.py,sha256=v63w90tlcAIEy6cN0wTvnvx3afqwNYSn_fWI2U5fSwY,171
|
|
18
|
+
alayaflow/component/model/model_manager.py,sha256=VTqkCLXxhGqSXi7GcBxaRHw3wTTfEk1rP31UMYvPW7Y,2503
|
|
19
|
+
alayaflow/component/model/schemas.py,sha256=YADx6OGltGMzpUSNEYcUbGOrrJvtV6dFdAub3FfAw9w,1404
|
|
20
|
+
alayaflow/execution/__init__.py,sha256=9gj_xgIJxlq3EuwsDMb5X05yP70HoD0_T25Khd495Xc,117
|
|
21
|
+
alayaflow/execution/env_manager.py,sha256=-7npm1f-FNhHyOt8NZGbGA3i7JMHqQXoWV1uAe1bpyE,16744
|
|
22
|
+
alayaflow/execution/executor_manager.py,sha256=_t_tr58yUsMbIW_xfevUXv8ba-7lGbBJFhjPsQ-ricM,2147
|
|
23
|
+
alayaflow/execution/langfuse_tracing.py,sha256=BuRMHDH7Gub7CMkJM5ECLzs4vjy3VqAgzh2INE9zbOI,3882
|
|
24
|
+
alayaflow/execution/workflow_runner.py,sha256=XEX4Em0Hv1sI8Im0lREjXq3fN1jYVwFnMMW3pphIAZk,3243
|
|
25
|
+
alayaflow/execution/executors/__init__.py,sha256=RYwYg880smrZ8EX5iwVsJe0Rtgo8-tF82pY5jA3926g,412
|
|
26
|
+
alayaflow/execution/executors/base_executor.py,sha256=mtBJM9bo_VLWAG9nnuq9xCjlD83bsvk1NPY5-aeD8TQ,254
|
|
27
|
+
alayaflow/execution/executors/naive_executor.py,sha256=ICXslitv-9ONvJ3kLC-3-vTTBg1dWQlp0evCtxMO_MI,4749
|
|
28
|
+
alayaflow/execution/executors/uv_executor.py,sha256=IIwP4j-BuDmfNoizuLsHcW0hRmzsLArWRVtVToX3_dM,4622
|
|
29
|
+
alayaflow/execution/executors/worker_executor.py,sha256=niyTqsxB1iHvkuYb3xd35UwnsQllKul-Z6ikenJZ9Hk,513
|
|
30
|
+
alayaflow/utils/singleton.py,sha256=5crFVfOkr9pU_j83ywqAMaL07BvVN5Ke_VGjT9qyUN0,432
|
|
31
|
+
alayaflow/workflow/__init__.py,sha256=mzqmL4P7q7ixTp2b0rZE-5iEBh7vv4YaTyvqnQZKmos,267
|
|
32
|
+
alayaflow/workflow/workflow_info.py,sha256=rnpAwYE4trhiv7o8LPmQpyQ3CDFfNN2yk1CLKRnWz0w,1259
|
|
33
|
+
alayaflow/workflow/workflow_loader.py,sha256=0EP_SEgcGyvSKgWF3DDL4CEmy5NcxRyCpFV5yXzFviM,6729
|
|
34
|
+
alayaflow/workflow/workflow_manager.py,sha256=dUFS6B5V64mdsopxrM6f3LvJn497eTBqMJaXokypBkI,11980
|
|
35
|
+
alayaflow/workflow/runnable/__init__.py,sha256=sNybFeRxLwbDLHiZxlVFXsn3w2n1Jn0Mtun2W6fvjFU,257
|
|
36
|
+
alayaflow/workflow/runnable/base_runnable_workflow.py,sha256=ap53fOeC5iUh2zm45LpEDjLJ4uqfO2C6FCN6WGm13kw,776
|
|
37
|
+
alayaflow/workflow/runnable/state_graph_runnable_workflow.py,sha256=PMSHks46kmNM2uDVmf5TNcLW7AR6dgfJFohxs8Dcfm4,972
|
|
38
|
+
alayaflow-0.1.2.dist-info/METADATA,sha256=CMWzmY6f6eWQNd6l8vC-PXnfIsSpoyDaNP18vlEGZZo,1925
|
|
39
|
+
alayaflow-0.1.2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
40
|
+
alayaflow-0.1.2.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
|
|
41
|
+
alayaflow-0.1.2.dist-info/RECORD,,
|
alayaflow-0.1.0.dist-info/RECORD
DELETED
|
@@ -1,37 +0,0 @@
|
|
|
1
|
-
alayaflow/__init__.py,sha256=Twp-G7fdsTkTspyDl9VKx9yieHHF9b8BalhEtnqNKWs,121
|
|
2
|
-
alayaflow/api/__init__.py,sha256=Y33eVzsA9v20VLpRK-v3DZOF-DsjfExag6C1taK2WRc,74
|
|
3
|
-
alayaflow/api/api_singleton.py,sha256=5ffWXL4umEOD3VNBOL0X2c38-PjOEEeQkK5yWbTI1BU,10721
|
|
4
|
-
alayaflow/clients/alayamem/base_client.py,sha256=pyU2WF2jqNEgBEe8JOZSg13gHQ2pJcBgJ_6YP-5mWkw,540
|
|
5
|
-
alayaflow/clients/alayamem/http_client.py,sha256=n0hAh_ddzEwFfNMsdw5s2dqvuMsyZNGOT-RcHsIXuEw,2171
|
|
6
|
-
alayaflow/common/config.py,sha256=pi4zH_Pi0u6Fb8ZIs4u3qFOUOUeqxxUqqksoUp-hynM,3806
|
|
7
|
-
alayaflow/component/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
|
-
alayaflow/component/chat_model.py,sha256=aXBRDdtp0HOHlonwfXTzOIcjepjd5zBh8znlJpK9oVU,461
|
|
9
|
-
alayaflow/component/intent_classifier.py,sha256=5KH52LIqIDpw2hlX4gi3Ff7SFVhenCFdFV-fXag0sDM,3765
|
|
10
|
-
alayaflow/component/llm_node.py,sha256=URRqSj-snRjbPk-7S7uEd8DJk3L2srYe8Qslu8T6u28,3773
|
|
11
|
-
alayaflow/component/memory.py,sha256=Xl5ABW89dswC9oMwkeS6NFAZniKFw8BuGuLAAHddRRA,1448
|
|
12
|
-
alayaflow/component/retrieve_node.py,sha256=vJqK-5KC-w9ZE_cxA1hHDTWuA-vbPrvxJkionKfniYE,652
|
|
13
|
-
alayaflow/component/web_search.py,sha256=PoUZV_H4vTDSwTqQarbmukQzpyUcBTatJxLZso-0Kok,3282
|
|
14
|
-
alayaflow/component/langflow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
15
|
-
alayaflow/component/langflow/intent_classifier.py,sha256=0xqq2wpVVnEErgjOenKYHMsXMkCrHOlCpEZRJqw3PoM,2822
|
|
16
|
-
alayaflow/execution/__init__.py,sha256=9gj_xgIJxlq3EuwsDMb5X05yP70HoD0_T25Khd495Xc,117
|
|
17
|
-
alayaflow/execution/env_manager.py,sha256=fQm8RO4HwEIfbqdwA4Qc76TgrJiBj39NCaJuy7XhaAk,16672
|
|
18
|
-
alayaflow/execution/executor_manager.py,sha256=EwSlU6WnsRkhO0z2cxESwtsQnaIOjfmDazVyUob6DVA,2158
|
|
19
|
-
alayaflow/execution/langfuse_tracing.py,sha256=BuRMHDH7Gub7CMkJM5ECLzs4vjy3VqAgzh2INE9zbOI,3882
|
|
20
|
-
alayaflow/execution/workflow_runner.py,sha256=XEX4Em0Hv1sI8Im0lREjXq3fN1jYVwFnMMW3pphIAZk,3243
|
|
21
|
-
alayaflow/execution/executors/__init__.py,sha256=RYwYg880smrZ8EX5iwVsJe0Rtgo8-tF82pY5jA3926g,412
|
|
22
|
-
alayaflow/execution/executors/base_executor.py,sha256=yMP2Fw2uf6TCOj9axQtFApIZCSw94QOXuxvEpzy9UW0,257
|
|
23
|
-
alayaflow/execution/executors/naive_executor.py,sha256=lLqjsHZBbXGUO2HMvr_0hYD9tgUKMWep7v0QBQUF8Lk,4892
|
|
24
|
-
alayaflow/execution/executors/uv_executor.py,sha256=XCqECDdieBlZ36CcMSGVyml11Oi9p4HnJLhyapBDlfQ,4630
|
|
25
|
-
alayaflow/execution/executors/worker_executor.py,sha256=o_O8RjguTifGye4avuBkhKGZcrB_xAAbVuMNve8isfY,521
|
|
26
|
-
alayaflow/utils/singleton.py,sha256=5crFVfOkr9pU_j83ywqAMaL07BvVN5Ke_VGjT9qyUN0,432
|
|
27
|
-
alayaflow/workflow/__init__.py,sha256=9IqNPAtWt7DzASmxg48iTRu-STymvUBd8_7-JsR2pgk,250
|
|
28
|
-
alayaflow/workflow/workflow_info.py,sha256=rnpAwYE4trhiv7o8LPmQpyQ3CDFfNN2yk1CLKRnWz0w,1259
|
|
29
|
-
alayaflow/workflow/workflow_loader.py,sha256=fJi7i714JRY5bESahLxcTNei_f-YEL5fnZtgHG7ChG4,6623
|
|
30
|
-
alayaflow/workflow/workflow_manager.py,sha256=bfPGP1UWom4B2ZfuWyyKI0tfFxyn4j2_wBMECc8-Fu8,11536
|
|
31
|
-
alayaflow/workflow/runnable/__init__.py,sha256=sNybFeRxLwbDLHiZxlVFXsn3w2n1Jn0Mtun2W6fvjFU,257
|
|
32
|
-
alayaflow/workflow/runnable/base_runnable_workflow.py,sha256=gN8d2pUijugu1JZr3RrHS95ziu8Of401IQQtmTM6_lc,655
|
|
33
|
-
alayaflow/workflow/runnable/state_graph_runnable_workflow.py,sha256=K2ahaGN9ubr9G2wDeBTQR_0sYo03SWhQbGVUjIb7w0U,843
|
|
34
|
-
alayaflow-0.1.0.dist-info/METADATA,sha256=ribiSb6KxPEN_sDGa9Yd4v8w21jFzKegHvkD0448zsw,1925
|
|
35
|
-
alayaflow-0.1.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
36
|
-
alayaflow-0.1.0.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
|
|
37
|
-
alayaflow-0.1.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|