alayaflow 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alayaflow/__init__.py +5 -0
- alayaflow/api/__init__.py +5 -0
- alayaflow/api/api_singleton.py +81 -0
- alayaflow/clients/alayamem/base_client.py +19 -0
- alayaflow/clients/alayamem/http_client.py +64 -0
- alayaflow/common/config.py +106 -0
- alayaflow/component/__init__.py +0 -0
- alayaflow/component/chat_model.py +20 -0
- alayaflow/component/intent_classifier.py +94 -0
- alayaflow/component/langflow/__init__.py +0 -0
- alayaflow/component/langflow/intent_classifier.py +83 -0
- alayaflow/component/llm_node.py +123 -0
- alayaflow/component/memory.py +50 -0
- alayaflow/component/retrieve_node.py +17 -0
- alayaflow/component/web_search.py +126 -0
- alayaflow/execution/__init__.py +6 -0
- alayaflow/execution/env_manager.py +424 -0
- alayaflow/execution/executor_manager.py +59 -0
- alayaflow/execution/executors/__init__.py +9 -0
- alayaflow/execution/executors/base_executor.py +9 -0
- alayaflow/execution/executors/naive_executor.py +121 -0
- alayaflow/execution/executors/uv_executor.py +125 -0
- alayaflow/execution/executors/worker_executor.py +12 -0
- alayaflow/execution/langfuse_tracing.py +104 -0
- alayaflow/execution/workflow_runner.py +98 -0
- alayaflow/utils/singleton.py +14 -0
- alayaflow/workflow/__init__.py +6 -0
- alayaflow/workflow/runnable/__init__.py +7 -0
- alayaflow/workflow/runnable/base_runnable_workflow.py +19 -0
- alayaflow/workflow/runnable/state_graph_runnable_workflow.py +23 -0
- alayaflow/workflow/workflow_info.py +50 -0
- alayaflow/workflow/workflow_loader.py +168 -0
- alayaflow/workflow/workflow_manager.py +257 -0
- alayaflow-0.1.0.dist-info/METADATA +99 -0
- alayaflow-0.1.0.dist-info/RECORD +37 -0
- alayaflow-0.1.0.dist-info/WHEEL +4 -0
- alayaflow-0.1.0.dist-info/licenses/LICENSE +661 -0
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import json
|
|
3
|
+
import importlib.util
|
|
4
|
+
import re
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any, List, Callable
|
|
7
|
+
from functools import cache
|
|
8
|
+
|
|
9
|
+
from pydantic import BaseModel
|
|
10
|
+
|
|
11
|
+
from alayaflow.workflow.runnable import StateGraphRunnableWorkflow
|
|
12
|
+
from alayaflow.workflow.workflow_info import WorkflowInfo
|
|
13
|
+
from alayaflow.workflow.runnable import BaseRunnableWorkflow
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class WorkflowLoader:
|
|
17
|
+
def __init__(self, workflows_dir: Path | str, workflow_id: str, version: str) -> None:
|
|
18
|
+
workflows_dir = Path(workflows_dir) if isinstance(workflows_dir, str) else workflows_dir
|
|
19
|
+
self.workflow_dir = workflows_dir / workflow_id / version
|
|
20
|
+
|
|
21
|
+
@cache
|
|
22
|
+
def load_info(self) -> WorkflowInfo:
|
|
23
|
+
metadata_file = self.workflow_dir / "metadata.json"
|
|
24
|
+
if not metadata_file.exists():
|
|
25
|
+
raise FileNotFoundError(f"工作流目录 {self.workflow_dir} 中没有 metadata.json 文件")
|
|
26
|
+
|
|
27
|
+
try:
|
|
28
|
+
with open(metadata_file, 'r', encoding='utf-8') as f:
|
|
29
|
+
metadata = json.load(f)
|
|
30
|
+
except json.JSONDecodeError as e:
|
|
31
|
+
raise ValueError(f"解析 metadata.json 失败: {e}")
|
|
32
|
+
except Exception as e:
|
|
33
|
+
raise IOError(f"读取 metadata.json 失败: {e}")
|
|
34
|
+
|
|
35
|
+
required_fields = ["id", "version", "entry_file", "entry_point"]
|
|
36
|
+
missing_fields = [field for field in required_fields if field not in metadata]
|
|
37
|
+
|
|
38
|
+
if missing_fields:
|
|
39
|
+
raise ValueError(
|
|
40
|
+
f"工作流目录 {self.workflow_dir} 的 metadata.json 中缺少必需字段: {', '.join(missing_fields)}"
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
return WorkflowInfo(
|
|
44
|
+
id=metadata["id"],
|
|
45
|
+
name=metadata.get("name", metadata["id"]),
|
|
46
|
+
description=metadata.get("description", ""),
|
|
47
|
+
version=metadata.get("version"),
|
|
48
|
+
tags=metadata.get("tags", []),
|
|
49
|
+
entry_file=metadata["entry_file"],
|
|
50
|
+
entry_point=metadata["entry_point"],
|
|
51
|
+
wf_dir=self.workflow_dir,
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
@cache
|
|
55
|
+
def load_requirements(self) -> List[str]:
|
|
56
|
+
requirements_file = self.workflow_dir / "requirements.txt"
|
|
57
|
+
if not requirements_file.exists():
|
|
58
|
+
return []
|
|
59
|
+
|
|
60
|
+
requirements: List[str] = []
|
|
61
|
+
try:
|
|
62
|
+
with open(requirements_file, 'r', encoding='utf-8') as f:
|
|
63
|
+
for line in f:
|
|
64
|
+
line = line.strip()
|
|
65
|
+
if line and not line.startswith("#"):
|
|
66
|
+
requirements.append(line)
|
|
67
|
+
except Exception as e:
|
|
68
|
+
raise IOError(f"读取 requirements.txt 失败: {e}")
|
|
69
|
+
|
|
70
|
+
return requirements
|
|
71
|
+
|
|
72
|
+
def _clear_module_cache(self, module_name: str) -> None:
|
|
73
|
+
"""清理模块及其子模块缓存(用于 force_reload)"""
|
|
74
|
+
prefix = module_name + "."
|
|
75
|
+
for key in list(sys.modules.keys()):
|
|
76
|
+
if key == module_name or key.startswith(prefix):
|
|
77
|
+
del sys.modules[key]
|
|
78
|
+
|
|
79
|
+
def _load_runnable_data(self, force_reload: bool = False) -> tuple[Callable[..., BaseModel], Callable[..., Any]]:
|
|
80
|
+
info = self.load_info()
|
|
81
|
+
|
|
82
|
+
entry_filepath = info.wf_dir / info.entry_file
|
|
83
|
+
if not entry_filepath.exists():
|
|
84
|
+
raise FileNotFoundError(
|
|
85
|
+
f"工作流 {info.id} 的入口文件不存在: {entry_filepath}"
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
module_name = self._sanitize_module_name(info.id, info.version)
|
|
89
|
+
|
|
90
|
+
if force_reload:
|
|
91
|
+
self._clear_module_cache(module_name)
|
|
92
|
+
module = sys.modules.get(module_name)
|
|
93
|
+
if module is None:
|
|
94
|
+
module = self._load_module(module_name, entry_filepath)
|
|
95
|
+
|
|
96
|
+
create_graph_func = self._resolve_entry_point(module, info.entry_point, info.id)
|
|
97
|
+
if not callable(create_graph_func):
|
|
98
|
+
raise TypeError(
|
|
99
|
+
f"工作流 {info.id} 的入口点 '{info.entry_point}' 不是可调用对象,"
|
|
100
|
+
f"而是 {type(create_graph_func)}"
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
# TODO: Support loading input schema
|
|
104
|
+
get_input_schema_func = None
|
|
105
|
+
|
|
106
|
+
return get_input_schema_func, create_graph_func
|
|
107
|
+
|
|
108
|
+
def load_workflow(self, init_args: dict = {}, force_reload: bool = False) -> BaseRunnableWorkflow:
|
|
109
|
+
"""加载工作流,返回包含 info 和 creator 的加载结果"""
|
|
110
|
+
info = self.load_info()
|
|
111
|
+
|
|
112
|
+
# now we only support StateGraph
|
|
113
|
+
get_input_schema_func, create_graph_func = self._load_runnable_data(force_reload)
|
|
114
|
+
graph = create_graph_func(init_args)
|
|
115
|
+
runnable = StateGraphRunnableWorkflow(info=info, graph=graph)
|
|
116
|
+
|
|
117
|
+
return runnable
|
|
118
|
+
|
|
119
|
+
def _sanitize_module_name(self, wf_id: str, version: str) -> str:
|
|
120
|
+
safe_version = version.replace(".", "_")
|
|
121
|
+
safe_wf_id = re.sub(r'[^0-9a-zA-Z_]', '_', wf_id)
|
|
122
|
+
return f"workflow.{safe_wf_id}.{safe_version}"
|
|
123
|
+
|
|
124
|
+
def _load_module(self, module_name: str, filepath: Path) -> Any:
|
|
125
|
+
"""加载 Python 模块,支持相对导入"""
|
|
126
|
+
try:
|
|
127
|
+
# 工作流目录(包含 workflow.py 的目录)
|
|
128
|
+
workflow_dir = filepath.parent
|
|
129
|
+
|
|
130
|
+
# 使用 spec_from_file_location 加载模块
|
|
131
|
+
# submodule_search_locations 参数使相对导入能够工作
|
|
132
|
+
spec = importlib.util.spec_from_file_location(
|
|
133
|
+
module_name,
|
|
134
|
+
filepath,
|
|
135
|
+
submodule_search_locations=[str(workflow_dir)]
|
|
136
|
+
)
|
|
137
|
+
if spec is None:
|
|
138
|
+
raise ImportError(f"无法为文件 {filepath} 创建模块规范")
|
|
139
|
+
|
|
140
|
+
if spec.loader is None:
|
|
141
|
+
raise ImportError(f"文件 {filepath} 没有加载器")
|
|
142
|
+
|
|
143
|
+
module = importlib.util.module_from_spec(spec)
|
|
144
|
+
sys.modules[module_name] = module
|
|
145
|
+
spec.loader.exec_module(module)
|
|
146
|
+
|
|
147
|
+
return module
|
|
148
|
+
except Exception as e:
|
|
149
|
+
# 如果加载失败,清理 sys.modules
|
|
150
|
+
if module_name in sys.modules:
|
|
151
|
+
del sys.modules[module_name]
|
|
152
|
+
raise ImportError(f"加载工作流模块失败: {e}") from e
|
|
153
|
+
|
|
154
|
+
def _resolve_entry_point(self, module: Any, entry_point: str, workflow_id: str) -> Any:
|
|
155
|
+
parts = entry_point.split('.')
|
|
156
|
+
obj = module
|
|
157
|
+
|
|
158
|
+
for i, part in enumerate(parts):
|
|
159
|
+
if not hasattr(obj, part):
|
|
160
|
+
path_so_far = '.'.join(parts[:i+1])
|
|
161
|
+
raise AttributeError(
|
|
162
|
+
f"工作流 {workflow_id} 的模块中没有找到入口点 '{path_so_far}'"
|
|
163
|
+
)
|
|
164
|
+
obj = getattr(obj, part)
|
|
165
|
+
|
|
166
|
+
return obj
|
|
167
|
+
|
|
168
|
+
|
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import zipfile
|
|
3
|
+
import shutil
|
|
4
|
+
import tempfile
|
|
5
|
+
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Optional, Dict, List, Tuple, Callable, Any
|
|
8
|
+
import requests
|
|
9
|
+
|
|
10
|
+
from alayaflow.workflow.workflow_info import WorkflowKey, WorkflowInfo
|
|
11
|
+
from alayaflow.workflow.workflow_loader import WorkflowLoader
|
|
12
|
+
from alayaflow.workflow.runnable import BaseRunnableWorkflow
|
|
13
|
+
from alayaflow.common.config import settings
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class WorkflowManager:
|
|
17
|
+
def __init__(self):
|
|
18
|
+
self._info_cache: Dict[WorkflowKey, WorkflowInfo] = {}
|
|
19
|
+
self._workflow_cache: Dict[WorkflowKey, BaseRunnableWorkflow] = {}
|
|
20
|
+
self._requirements_cache: Dict[WorkflowKey, List[str]] = {}
|
|
21
|
+
|
|
22
|
+
def clear_cache(self):
|
|
23
|
+
self._info_cache.clear()
|
|
24
|
+
self._workflow_cache.clear()
|
|
25
|
+
self._requirements_cache.clear()
|
|
26
|
+
|
|
27
|
+
def load_workflow(self, workflow_id: str, version: str, init_args: dict = {}) -> None:
|
|
28
|
+
loader = WorkflowLoader(settings.workflow_storage_path, workflow_id, version)
|
|
29
|
+
self._info_cache[WorkflowKey(workflow_id, version)] = loader.load_info()
|
|
30
|
+
self._workflow_cache[WorkflowKey(workflow_id, version)] = loader.load_workflow(init_args)
|
|
31
|
+
self._requirements_cache[WorkflowKey(workflow_id, version)] = loader.load_requirements()
|
|
32
|
+
|
|
33
|
+
# Keep for future reference
|
|
34
|
+
# def load_workflows(self) -> None:
|
|
35
|
+
# if not settings.workflow_storage_path.exists():
|
|
36
|
+
# raise FileNotFoundError(f"Workflow storage path {settings.workflow_storage_path} does not exist")
|
|
37
|
+
# for workflow_id_dir in settings.workflow_storage_path.iterdir():
|
|
38
|
+
# if not workflow_id_dir.is_dir() or workflow_id_dir.name.startswith("_"):
|
|
39
|
+
# continue
|
|
40
|
+
# workflow_id = workflow_id_dir.name
|
|
41
|
+
# for version_dir in workflow_id_dir.iterdir():
|
|
42
|
+
# if not version_dir.is_dir() or version_dir.name.startswith("_"):
|
|
43
|
+
# continue
|
|
44
|
+
|
|
45
|
+
# version = version_dir.name
|
|
46
|
+
# try:
|
|
47
|
+
# self.load_workflow(workflow_id, version)
|
|
48
|
+
# except (FileNotFoundError, ValueError, IOError, ImportError, AttributeError, TypeError) as e:
|
|
49
|
+
# print(f"Loading workflow {workflow_id} version {version} failed: {e}")
|
|
50
|
+
# continue
|
|
51
|
+
|
|
52
|
+
def get_workflow_info(self, workflow_id: str, version: str) -> WorkflowInfo:
|
|
53
|
+
key = WorkflowKey(workflow_id, version)
|
|
54
|
+
info = self._info_cache.get(key)
|
|
55
|
+
if info is None:
|
|
56
|
+
loader = WorkflowLoader(settings.workflow_storage_path, workflow_id, version)
|
|
57
|
+
info = loader.load_info()
|
|
58
|
+
self._info_cache[key] = info
|
|
59
|
+
return info
|
|
60
|
+
|
|
61
|
+
def get_workflow_requirements(self, workflow_id: str, version: str) -> List[str]:
|
|
62
|
+
key = WorkflowKey(workflow_id, version)
|
|
63
|
+
reqs = self._requirements_cache.get(key)
|
|
64
|
+
if reqs is None:
|
|
65
|
+
loader = WorkflowLoader(settings.workflow_storage_path, workflow_id, version)
|
|
66
|
+
reqs = loader.load_requirements()
|
|
67
|
+
self._requirements_cache[key] = reqs
|
|
68
|
+
return reqs
|
|
69
|
+
|
|
70
|
+
def get_workflow_runnable(self, workflow_id: str, version: str) -> BaseRunnableWorkflow:
|
|
71
|
+
key = WorkflowKey(workflow_id, version)
|
|
72
|
+
runnable = self._workflow_cache.get(key)
|
|
73
|
+
if runnable is None:
|
|
74
|
+
raise ValueError(f"Workflow {workflow_id} version {version} does not loaded. Call load_workflow first.")
|
|
75
|
+
return runnable
|
|
76
|
+
|
|
77
|
+
def list_loaded_workflows(self) -> List[str]:
|
|
78
|
+
return [f'{wf.id}_{wf.version}' for wf in self._workflow_cache.keys()]
|
|
79
|
+
|
|
80
|
+
def list_loaded_workflow_ids(self) -> List[str]:
|
|
81
|
+
return list(set(wf.id for wf in self._workflow_cache.keys()))
|
|
82
|
+
|
|
83
|
+
def list_local_workflow_info_list(self) -> List[WorkflowInfo]:
|
|
84
|
+
if not settings.workflow_storage_path.exists():
|
|
85
|
+
raise FileNotFoundError(f"Workflow storage path {settings.workflow_storage_path} does not exist")
|
|
86
|
+
info_list = []
|
|
87
|
+
for workflow_id_dir in settings.workflow_storage_path.iterdir():
|
|
88
|
+
if not workflow_id_dir.is_dir() or workflow_id_dir.name.startswith("_"):
|
|
89
|
+
continue
|
|
90
|
+
workflow_id = workflow_id_dir.name
|
|
91
|
+
for version_dir in workflow_id_dir.iterdir():
|
|
92
|
+
if not version_dir.is_dir() or version_dir.name.startswith("_"):
|
|
93
|
+
continue
|
|
94
|
+
|
|
95
|
+
version = version_dir.name
|
|
96
|
+
try:
|
|
97
|
+
info = self.get_workflow_info(workflow_id, version)
|
|
98
|
+
info_list.append(info)
|
|
99
|
+
except (FileNotFoundError, ValueError, IOError, ImportError, AttributeError, TypeError) as e:
|
|
100
|
+
print(f"Loading workflow {workflow_id} version {version} failed: {e}")
|
|
101
|
+
continue
|
|
102
|
+
return info_list
|
|
103
|
+
|
|
104
|
+
def list_local_workflows(self) -> List[str]:
|
|
105
|
+
return list(set(wf.id for wf in self.list_local_workflow_info_list()))
|
|
106
|
+
|
|
107
|
+
def list_local_workflow_versions(self, workflow_id: str) -> List[str]:
|
|
108
|
+
return [wf.version for wf in self.list_local_workflow_info_list() if wf.id == workflow_id]
|
|
109
|
+
|
|
110
|
+
def list_remote_workflow_ids(self) -> List[str]:
|
|
111
|
+
remote_workflow_ids = []
|
|
112
|
+
try:
|
|
113
|
+
list_url = f"{settings.alayahub_url}/api/workflows/list"
|
|
114
|
+
params = {"page": 1, "page_size": 100}
|
|
115
|
+
|
|
116
|
+
response = requests.get(list_url, params=params, timeout=10)
|
|
117
|
+
response.raise_for_status()
|
|
118
|
+
|
|
119
|
+
data = response.json()
|
|
120
|
+
|
|
121
|
+
if isinstance(data, dict):
|
|
122
|
+
workflows_data = data.get("workflows", data.get("items", []))
|
|
123
|
+
elif isinstance(data, list):
|
|
124
|
+
workflows_data = data
|
|
125
|
+
else:
|
|
126
|
+
workflows_data = []
|
|
127
|
+
|
|
128
|
+
for workflow in workflows_data:
|
|
129
|
+
if isinstance(workflow, dict):
|
|
130
|
+
workflow_id = workflow.get("id") or workflow.get("workflow_id")
|
|
131
|
+
if workflow_id:
|
|
132
|
+
remote_workflow_ids.append(workflow_id)
|
|
133
|
+
elif isinstance(workflow, str):
|
|
134
|
+
remote_workflow_ids.append(workflow)
|
|
135
|
+
|
|
136
|
+
except requests.RequestException:
|
|
137
|
+
pass
|
|
138
|
+
|
|
139
|
+
# 去重后返回
|
|
140
|
+
return list(set(remote_workflow_ids))
|
|
141
|
+
|
|
142
|
+
def list_available_workflows(self) -> Dict[str, List[str]]:
|
|
143
|
+
"""列举工作流,分别返回本地已安装和远程未安装的工作流"""
|
|
144
|
+
# 获取本地已安装的工作流,格式为 ID_version
|
|
145
|
+
local_workflows = self.list_local_workflows()
|
|
146
|
+
local_set = set(local_workflows)
|
|
147
|
+
|
|
148
|
+
# 获取远程平台上的工作流列表
|
|
149
|
+
remote_workflows = []
|
|
150
|
+
try:
|
|
151
|
+
list_url = f"{self.platform_api_url}/api/workflows/list"
|
|
152
|
+
params = {"page": 1, "page_size": 100} # 获取前100个工作流
|
|
153
|
+
|
|
154
|
+
response = requests.get(list_url, params=params, timeout=10)
|
|
155
|
+
response.raise_for_status()
|
|
156
|
+
|
|
157
|
+
data = response.json()
|
|
158
|
+
|
|
159
|
+
if isinstance(data, dict):
|
|
160
|
+
workflows_data = data.get("workflows", data.get("items", []))
|
|
161
|
+
elif isinstance(data, list):
|
|
162
|
+
workflows_data = data
|
|
163
|
+
else:
|
|
164
|
+
workflows_data = []
|
|
165
|
+
|
|
166
|
+
# 提取工作流,格式为 id_version,只保留未安装的
|
|
167
|
+
for workflow in workflows_data:
|
|
168
|
+
if isinstance(workflow, dict):
|
|
169
|
+
workflow_id = workflow.get("id")
|
|
170
|
+
workflow_version = workflow.get("version")
|
|
171
|
+
workflow_key = f"{workflow_id}_{workflow_version}"
|
|
172
|
+
# 检查该 id_version 是否已在本地安装
|
|
173
|
+
if workflow_key not in local_set:
|
|
174
|
+
remote_workflows.append(workflow_key)
|
|
175
|
+
|
|
176
|
+
except requests.RequestException:
|
|
177
|
+
pass
|
|
178
|
+
|
|
179
|
+
return {
|
|
180
|
+
"local": local_workflows,
|
|
181
|
+
"remote": remote_workflows,
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def install_workflow(self, workflow_id: str) -> bool:
|
|
186
|
+
try:
|
|
187
|
+
# 1. 从平台下载工作流 zip 文件
|
|
188
|
+
download_url = f"{settings.alayahub_url}/api/workflows/{workflow_id}/download"
|
|
189
|
+
response = requests.get(download_url, stream=True, timeout=30)
|
|
190
|
+
response.raise_for_status()
|
|
191
|
+
|
|
192
|
+
# 2. 创建临时目录存放 zip 文件
|
|
193
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
|
194
|
+
zip_path = Path(temp_dir) / f"{workflow_id}.zip"
|
|
195
|
+
|
|
196
|
+
# 保存 zip 文件
|
|
197
|
+
with open(zip_path, "wb") as f:
|
|
198
|
+
for chunk in response.iter_content(chunk_size=8192):
|
|
199
|
+
f.write(chunk)
|
|
200
|
+
|
|
201
|
+
# 3. 读取 metadata.json 获取工作流信息
|
|
202
|
+
with zipfile.ZipFile(zip_path, "r") as zip_ref:
|
|
203
|
+
# 检查是否有 metadata.json
|
|
204
|
+
metadata_content = None
|
|
205
|
+
for file_info in zip_ref.namelist():
|
|
206
|
+
if file_info.endswith("metadata.json"):
|
|
207
|
+
metadata_content = zip_ref.read(file_info).decode("utf-8")
|
|
208
|
+
break
|
|
209
|
+
|
|
210
|
+
if not metadata_content:
|
|
211
|
+
raise ValueError(f"工作流 {workflow_id} 的 zip 文件中未找到 metadata.json")
|
|
212
|
+
|
|
213
|
+
# 解析 metadata.json 获取版本
|
|
214
|
+
metadata = json.loads(metadata_content)
|
|
215
|
+
version = metadata.get("version", "1.0.0")
|
|
216
|
+
|
|
217
|
+
# 确定本地工作流目录(格式:workflow_id/version)
|
|
218
|
+
workflow_id_dir = settings.workflow_storage_path / workflow_id
|
|
219
|
+
workflow_dir = workflow_id_dir / version
|
|
220
|
+
|
|
221
|
+
# 4. 如果该版本目录已存在,先删除
|
|
222
|
+
if workflow_dir.exists():
|
|
223
|
+
shutil.rmtree(workflow_dir)
|
|
224
|
+
|
|
225
|
+
# 5. 创建目录并解压文件
|
|
226
|
+
workflow_dir.mkdir(parents=True, exist_ok=True)
|
|
227
|
+
|
|
228
|
+
# 解压所有文件到目标目录
|
|
229
|
+
zip_ref.extractall(workflow_dir)
|
|
230
|
+
|
|
231
|
+
# 6. 清理缓存以便重新加载
|
|
232
|
+
self.clear_cache()
|
|
233
|
+
|
|
234
|
+
return True
|
|
235
|
+
|
|
236
|
+
except requests.RequestException as e:
|
|
237
|
+
raise ValueError(f"下载工作流 {workflow_id} 失败: {e}") from e
|
|
238
|
+
except Exception as e:
|
|
239
|
+
raise ValueError(f"安装工作流 {workflow_id} 失败: {e}") from e
|
|
240
|
+
|
|
241
|
+
def uninstall_workflow(self, workflow_id: str) -> bool:
|
|
242
|
+
try:
|
|
243
|
+
# 确定工作流目录(格式:workflow_id/)
|
|
244
|
+
workflow_id_dir = settings.workflow_storage_path / workflow_id
|
|
245
|
+
|
|
246
|
+
if not workflow_id_dir.exists() or not workflow_id_dir.is_dir():
|
|
247
|
+
raise ValueError(f"工作流 {workflow_id} 未安装")
|
|
248
|
+
|
|
249
|
+
# 删除整个 workflow_id 目录(包含所有版本)
|
|
250
|
+
shutil.rmtree(workflow_id_dir)
|
|
251
|
+
|
|
252
|
+
# 清理缓存
|
|
253
|
+
self.clear_cache()
|
|
254
|
+
return True
|
|
255
|
+
|
|
256
|
+
except Exception as e:
|
|
257
|
+
raise ValueError(f"卸载工作流 {workflow_id} 失败: {e}") from e
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: alayaflow
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A desktop platform for executing LangGraph workflows with uv-managed sandboxes.
|
|
5
|
+
Author-email: alayaflow group <dev@example.com>
|
|
6
|
+
License: MIT
|
|
7
|
+
License-File: LICENSE
|
|
8
|
+
Requires-Python: >=3.10
|
|
9
|
+
Requires-Dist: fastapi<0.116.0,>=0.100.0
|
|
10
|
+
Requires-Dist: langchain
|
|
11
|
+
Requires-Dist: langchain-community
|
|
12
|
+
Requires-Dist: langchain-openai
|
|
13
|
+
Requires-Dist: langgraph
|
|
14
|
+
Requires-Dist: pydantic-settings>=2.0.0
|
|
15
|
+
Requires-Dist: pydantic>=2.0.0
|
|
16
|
+
Requires-Dist: requests>=2.31.0
|
|
17
|
+
Requires-Dist: rich
|
|
18
|
+
Requires-Dist: typer>=0.9.0
|
|
19
|
+
Description-Content-Type: text/markdown
|
|
20
|
+
|
|
21
|
+
# AlayaFlow
|
|
22
|
+
|
|
23
|
+
A desktop platform for executing LangGraph workflows with multiple executors.
|
|
24
|
+
|
|
25
|
+
## 简介
|
|
26
|
+
|
|
27
|
+
AlayaFlow 是一个桌面平台,用于执行 LangGraph 工作流。
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
## 安装
|
|
31
|
+
|
|
32
|
+
### 从 PyPI 安装(推荐)
|
|
33
|
+
|
|
34
|
+
```bash
|
|
35
|
+
pip install alayaflow
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
### 从源码安装
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
git clone git@github.com:AlayaDB-AI/AlayaFlow.git
|
|
42
|
+
cd alayaflow
|
|
43
|
+
pip install -e .
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
## 系统要求
|
|
47
|
+
|
|
48
|
+
- Python >= 3.10
|
|
49
|
+
- uv (Rust-based Python package installer)
|
|
50
|
+
|
|
51
|
+
## 使用示例
|
|
52
|
+
|
|
53
|
+
```python
|
|
54
|
+
from alayaflow.workflow import WorkflowManager, WorkflowLoader
|
|
55
|
+
|
|
56
|
+
# 创建工作流管理器
|
|
57
|
+
manager = WorkflowManager()
|
|
58
|
+
|
|
59
|
+
# 加载工作流
|
|
60
|
+
manager.load_workflow(workflow_id="my-workflow", version="1.0.0")
|
|
61
|
+
|
|
62
|
+
# 执行工作流...
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
## 配置
|
|
66
|
+
|
|
67
|
+
项目支持通过 `.env` 文件进行配置。主要配置项包括:
|
|
68
|
+
|
|
69
|
+
- `dev_mode`: 开发模式(dev/dev-uneditable/prod)
|
|
70
|
+
- `alayahub_url`: AlayaHub API 地址
|
|
71
|
+
- `langfuse_enabled`: 是否启用 Langfuse 追踪
|
|
72
|
+
- 各种存储路径配置
|
|
73
|
+
|
|
74
|
+
## 开发
|
|
75
|
+
|
|
76
|
+
### 安装开发依赖
|
|
77
|
+
|
|
78
|
+
```bash
|
|
79
|
+
pip install -e ".[dev]"
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
### 运行测试
|
|
83
|
+
|
|
84
|
+
```bash
|
|
85
|
+
pytest
|
|
86
|
+
```
|
|
87
|
+
|
|
88
|
+
## 许可证
|
|
89
|
+
|
|
90
|
+
本项目采用 MIT License。详情请参阅 [LICENSE](LICENSE) 文件。
|
|
91
|
+
|
|
92
|
+
## 贡献
|
|
93
|
+
|
|
94
|
+
欢迎提交 Issue 和 Pull Request!
|
|
95
|
+
|
|
96
|
+
## 相关链接
|
|
97
|
+
|
|
98
|
+
- [LangGraph](https://github.com/langchain-ai/langgraph)
|
|
99
|
+
- [uv](https://github.com/astral-sh/uv)
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
alayaflow/__init__.py,sha256=Twp-G7fdsTkTspyDl9VKx9yieHHF9b8BalhEtnqNKWs,121
|
|
2
|
+
alayaflow/api/__init__.py,sha256=Y33eVzsA9v20VLpRK-v3DZOF-DsjfExag6C1taK2WRc,74
|
|
3
|
+
alayaflow/api/api_singleton.py,sha256=5ffWXL4umEOD3VNBOL0X2c38-PjOEEeQkK5yWbTI1BU,10721
|
|
4
|
+
alayaflow/clients/alayamem/base_client.py,sha256=pyU2WF2jqNEgBEe8JOZSg13gHQ2pJcBgJ_6YP-5mWkw,540
|
|
5
|
+
alayaflow/clients/alayamem/http_client.py,sha256=n0hAh_ddzEwFfNMsdw5s2dqvuMsyZNGOT-RcHsIXuEw,2171
|
|
6
|
+
alayaflow/common/config.py,sha256=pi4zH_Pi0u6Fb8ZIs4u3qFOUOUeqxxUqqksoUp-hynM,3806
|
|
7
|
+
alayaflow/component/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
|
+
alayaflow/component/chat_model.py,sha256=aXBRDdtp0HOHlonwfXTzOIcjepjd5zBh8znlJpK9oVU,461
|
|
9
|
+
alayaflow/component/intent_classifier.py,sha256=5KH52LIqIDpw2hlX4gi3Ff7SFVhenCFdFV-fXag0sDM,3765
|
|
10
|
+
alayaflow/component/llm_node.py,sha256=URRqSj-snRjbPk-7S7uEd8DJk3L2srYe8Qslu8T6u28,3773
|
|
11
|
+
alayaflow/component/memory.py,sha256=Xl5ABW89dswC9oMwkeS6NFAZniKFw8BuGuLAAHddRRA,1448
|
|
12
|
+
alayaflow/component/retrieve_node.py,sha256=vJqK-5KC-w9ZE_cxA1hHDTWuA-vbPrvxJkionKfniYE,652
|
|
13
|
+
alayaflow/component/web_search.py,sha256=PoUZV_H4vTDSwTqQarbmukQzpyUcBTatJxLZso-0Kok,3282
|
|
14
|
+
alayaflow/component/langflow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
15
|
+
alayaflow/component/langflow/intent_classifier.py,sha256=0xqq2wpVVnEErgjOenKYHMsXMkCrHOlCpEZRJqw3PoM,2822
|
|
16
|
+
alayaflow/execution/__init__.py,sha256=9gj_xgIJxlq3EuwsDMb5X05yP70HoD0_T25Khd495Xc,117
|
|
17
|
+
alayaflow/execution/env_manager.py,sha256=fQm8RO4HwEIfbqdwA4Qc76TgrJiBj39NCaJuy7XhaAk,16672
|
|
18
|
+
alayaflow/execution/executor_manager.py,sha256=EwSlU6WnsRkhO0z2cxESwtsQnaIOjfmDazVyUob6DVA,2158
|
|
19
|
+
alayaflow/execution/langfuse_tracing.py,sha256=BuRMHDH7Gub7CMkJM5ECLzs4vjy3VqAgzh2INE9zbOI,3882
|
|
20
|
+
alayaflow/execution/workflow_runner.py,sha256=XEX4Em0Hv1sI8Im0lREjXq3fN1jYVwFnMMW3pphIAZk,3243
|
|
21
|
+
alayaflow/execution/executors/__init__.py,sha256=RYwYg880smrZ8EX5iwVsJe0Rtgo8-tF82pY5jA3926g,412
|
|
22
|
+
alayaflow/execution/executors/base_executor.py,sha256=yMP2Fw2uf6TCOj9axQtFApIZCSw94QOXuxvEpzy9UW0,257
|
|
23
|
+
alayaflow/execution/executors/naive_executor.py,sha256=lLqjsHZBbXGUO2HMvr_0hYD9tgUKMWep7v0QBQUF8Lk,4892
|
|
24
|
+
alayaflow/execution/executors/uv_executor.py,sha256=XCqECDdieBlZ36CcMSGVyml11Oi9p4HnJLhyapBDlfQ,4630
|
|
25
|
+
alayaflow/execution/executors/worker_executor.py,sha256=o_O8RjguTifGye4avuBkhKGZcrB_xAAbVuMNve8isfY,521
|
|
26
|
+
alayaflow/utils/singleton.py,sha256=5crFVfOkr9pU_j83ywqAMaL07BvVN5Ke_VGjT9qyUN0,432
|
|
27
|
+
alayaflow/workflow/__init__.py,sha256=9IqNPAtWt7DzASmxg48iTRu-STymvUBd8_7-JsR2pgk,250
|
|
28
|
+
alayaflow/workflow/workflow_info.py,sha256=rnpAwYE4trhiv7o8LPmQpyQ3CDFfNN2yk1CLKRnWz0w,1259
|
|
29
|
+
alayaflow/workflow/workflow_loader.py,sha256=fJi7i714JRY5bESahLxcTNei_f-YEL5fnZtgHG7ChG4,6623
|
|
30
|
+
alayaflow/workflow/workflow_manager.py,sha256=bfPGP1UWom4B2ZfuWyyKI0tfFxyn4j2_wBMECc8-Fu8,11536
|
|
31
|
+
alayaflow/workflow/runnable/__init__.py,sha256=sNybFeRxLwbDLHiZxlVFXsn3w2n1Jn0Mtun2W6fvjFU,257
|
|
32
|
+
alayaflow/workflow/runnable/base_runnable_workflow.py,sha256=gN8d2pUijugu1JZr3RrHS95ziu8Of401IQQtmTM6_lc,655
|
|
33
|
+
alayaflow/workflow/runnable/state_graph_runnable_workflow.py,sha256=K2ahaGN9ubr9G2wDeBTQR_0sYo03SWhQbGVUjIb7w0U,843
|
|
34
|
+
alayaflow-0.1.0.dist-info/METADATA,sha256=ribiSb6KxPEN_sDGa9Yd4v8w21jFzKegHvkD0448zsw,1925
|
|
35
|
+
alayaflow-0.1.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
36
|
+
alayaflow-0.1.0.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
|
|
37
|
+
alayaflow-0.1.0.dist-info/RECORD,,
|