alayaflow 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. alayaflow/__init__.py +5 -0
  2. alayaflow/api/__init__.py +5 -0
  3. alayaflow/api/api_singleton.py +81 -0
  4. alayaflow/clients/alayamem/base_client.py +19 -0
  5. alayaflow/clients/alayamem/http_client.py +64 -0
  6. alayaflow/common/config.py +106 -0
  7. alayaflow/component/__init__.py +0 -0
  8. alayaflow/component/chat_model.py +20 -0
  9. alayaflow/component/intent_classifier.py +94 -0
  10. alayaflow/component/langflow/__init__.py +0 -0
  11. alayaflow/component/langflow/intent_classifier.py +83 -0
  12. alayaflow/component/llm_node.py +123 -0
  13. alayaflow/component/memory.py +50 -0
  14. alayaflow/component/retrieve_node.py +17 -0
  15. alayaflow/component/web_search.py +126 -0
  16. alayaflow/execution/__init__.py +6 -0
  17. alayaflow/execution/env_manager.py +424 -0
  18. alayaflow/execution/executor_manager.py +59 -0
  19. alayaflow/execution/executors/__init__.py +9 -0
  20. alayaflow/execution/executors/base_executor.py +9 -0
  21. alayaflow/execution/executors/naive_executor.py +121 -0
  22. alayaflow/execution/executors/uv_executor.py +125 -0
  23. alayaflow/execution/executors/worker_executor.py +12 -0
  24. alayaflow/execution/langfuse_tracing.py +104 -0
  25. alayaflow/execution/workflow_runner.py +98 -0
  26. alayaflow/utils/singleton.py +14 -0
  27. alayaflow/workflow/__init__.py +6 -0
  28. alayaflow/workflow/runnable/__init__.py +7 -0
  29. alayaflow/workflow/runnable/base_runnable_workflow.py +19 -0
  30. alayaflow/workflow/runnable/state_graph_runnable_workflow.py +23 -0
  31. alayaflow/workflow/workflow_info.py +50 -0
  32. alayaflow/workflow/workflow_loader.py +168 -0
  33. alayaflow/workflow/workflow_manager.py +257 -0
  34. alayaflow-0.1.0.dist-info/METADATA +99 -0
  35. alayaflow-0.1.0.dist-info/RECORD +37 -0
  36. alayaflow-0.1.0.dist-info/WHEEL +4 -0
  37. alayaflow-0.1.0.dist-info/licenses/LICENSE +661 -0
@@ -0,0 +1,121 @@
1
+ import asyncio
2
+ import traceback
3
+ import queue
4
+ import threading
5
+ from typing import Generator, Dict, Optional, Any
6
+
7
+ from alayaflow.execution.executors.base_executor import BaseExecutor
8
+ from alayaflow.workflow.workflow_manager import WorkflowManager
9
+ from alayaflow.workflow.runnable import BaseRunnableWorkflow, StateGraphRunnableWorkflow
10
+ from alayaflow.common.config import settings
11
+ from alayaflow.execution.langfuse_tracing import get_tracing
12
+
13
+ _SENTINEL = object()
14
+
15
+ class NaiveExecutor(BaseExecutor):
16
+ def __init__(self, workflow_manager: WorkflowManager):
17
+ self.workflow_manager = workflow_manager
18
+
19
+ def _to_jsonable(self, obj: Any) -> Any:
20
+ """Recursively convert object to JSON-serializable format."""
21
+ if obj is None or isinstance(obj, (bool, int, float, str)):
22
+ return obj
23
+ if isinstance(obj, dict):
24
+ return {k: self._to_jsonable(v) for k, v in obj.items()}
25
+ if isinstance(obj, (list, tuple)):
26
+ return [self._to_jsonable(item) for item in obj]
27
+ if hasattr(obj, 'model_dump') and callable(obj.model_dump):
28
+ return obj.model_dump()
29
+ return str(obj)
30
+
31
+ def _serialize_event(self, event: Dict) -> Dict:
32
+ # Convert event to JSON-serializable format
33
+ return self._to_jsonable(event)
34
+
35
+ def execute_stream(
36
+ self,
37
+ workflow_id: str,
38
+ version: str,
39
+ input_data: dict,
40
+ user_config: dict
41
+ ) -> Generator[Dict, None, None]:
42
+
43
+ # 1) resolve workflow
44
+ try:
45
+ runnable = self.workflow_manager.get_workflow_runnable(workflow_id, version)
46
+ except ValueError as e:
47
+ yield {"error": str(e), "workflow_id": workflow_id, "version": version}
48
+ return
49
+
50
+ print(f"NaiveExecutor execute_stream: {workflow_id} {version} {input_data} {user_config}")
51
+
52
+ # TODO: Support langflow workflow
53
+ # Only support StateGraphRunnableWorkflow now.
54
+ # As in _produce_events_to_queue, we relay on the "configurable" (StateGraph only) to use langfuse tracing.
55
+ if not isinstance(runnable, StateGraphRunnableWorkflow):
56
+ raise ValueError(f"NaiveExecutor only supports StateGraphRunnableWorkflow, but got {type(runnable)}")
57
+
58
+ # 3) async -> sync bridge
59
+ event_queue = queue.Queue(maxsize=1000)
60
+
61
+ def run_async_producer():
62
+ try:
63
+ asyncio.run(self._produce_events_to_queue(runnable, input_data, user_config, event_queue))
64
+ except Exception as e:
65
+ event_queue.put({"error": str(e), "traceback": traceback.format_exc(), "workflow_id": workflow_id, "version": version})
66
+ finally:
67
+ event_queue.put(_SENTINEL)
68
+
69
+ producer_thread = threading.Thread(target=run_async_producer, daemon=True)
70
+ producer_thread.start()
71
+
72
+ # 4) stream
73
+ while True:
74
+ try:
75
+ item = event_queue.get(timeout=1.0)
76
+ except queue.Empty:
77
+ if not producer_thread.is_alive():
78
+ saw_sentinel = False
79
+ while True:
80
+ try:
81
+ item = event_queue.get_nowait()
82
+ if item is _SENTINEL:
83
+ saw_sentinel = True
84
+ break
85
+ yield self._serialize_event(item)
86
+ except queue.Empty:
87
+ break
88
+ if saw_sentinel:
89
+ break
90
+ yield {"error": "producer thread exited unexpectedly", "workflow_id": workflow_id, "version": version}
91
+ break
92
+ continue
93
+ if item is _SENTINEL:
94
+ break
95
+ yield self._serialize_event(item)
96
+
97
+
98
+ async def _produce_events_to_queue(self, runnable: BaseRunnableWorkflow, input_data: dict, user_config: dict, event_queue: queue.Queue):
99
+ try:
100
+ # Setup tracing
101
+ tracing = get_tracing(settings)
102
+ langfuse_cb = tracing.build_callback()
103
+
104
+ # Merge user_config and tracing config
105
+ merged_config = {
106
+ "configurable": user_config
107
+ }
108
+ if langfuse_cb:
109
+ merged_config.update(tracing.build_config(input_data, runnable.info, langfuse_cb))
110
+
111
+ async for chunk in runnable.astream_events(input_data, merged_config):
112
+ event_queue.put(chunk) # Put each event immediately (real-time)
113
+ except Exception as e:
114
+ # If execution fails, put error event in queue
115
+ event_queue.put({
116
+ "error": str(e),
117
+ "traceback": traceback.format_exc(),
118
+ "workflow_id": runnable.info.id,
119
+ "version": runnable.info.version
120
+ })
121
+
@@ -0,0 +1,125 @@
1
+ import socket
2
+ import os
3
+ import struct
4
+ from typing import Generator, Dict, Optional
5
+ from pathlib import Path
6
+
7
+ from alayaflow.execution.executors.base_executor import BaseExecutor
8
+ from alayaflow.execution.env_manager import EnvManager
9
+ from alayaflow.workflow.workflow_manager import WorkflowManager
10
+ from alayaflow.common.config import settings
11
+
12
+
13
+ class UvExecutor(BaseExecutor):
14
+ def __init__(
15
+ self,
16
+ workflow_manager: WorkflowManager,
17
+ env_manager: Optional[EnvManager] = None
18
+ ):
19
+ self.workflow_manager = workflow_manager
20
+ self.env_mgr = env_manager if env_manager is not None else EnvManager()
21
+
22
+ def _get_free_port(self):
23
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
24
+ s.bind(('', 0))
25
+ return s.getsockname()[1]
26
+
27
+ def execute_stream(
28
+ self,
29
+ workflow_id: str,
30
+ version: str,
31
+ input_data: dict,
32
+ user_config: dict,
33
+ ) -> Generator[Dict, None, None]:
34
+ raise NotImplementedError("uv executor not supported yet")
35
+
36
+ # # 1. 确定工作流目录和版本
37
+ # workflow_info = self.workflow_manager.get_workflow_info(workflow_id, version)
38
+ # if not workflow_info:
39
+ # yield {"error": f"工作流 {workflow_id}_{version} 不存在"}
40
+ # return
41
+
42
+ # wf_dir = str(workflow_info.wf_dir)
43
+
44
+ # # 2. 读取工作流依赖
45
+ # requirements = self.workflow_manager.get_workflow_requirements(workflow_id, version)
46
+
47
+ # # 3. 使用 uv 准备环境 (自动管理 python 版本)
48
+ # python_exe = self.env_mgr.ensure_env(workflow_id, version, requirements, use_venv=True)
49
+
50
+ # 4. 准备 Socket Runner
51
+ # if settings.is_dev():
52
+ # # dev 模式:从项目根目录找
53
+ # runner_script_path = os.path.join(settings.alayaflow_root, 'src/alayaflow/execution/workflow_runner.py')
54
+ # else:
55
+ # # dev-uneditable/prod 模式:从安装包中找
56
+ # runner_script_path = str(Path(__file__).parent.parent / 'workflow_runner.py')
57
+
58
+ # # 5. 启动 Server Socket
59
+ # server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
60
+ # port = self._get_free_port()
61
+ # server_socket.bind(('127.0.0.1', port))
62
+ # server_socket.listen(1)
63
+
64
+ # # 6. 启动子进程
65
+ # import subprocess
66
+ # import json
67
+ # process = subprocess.Popen(
68
+ # [python_exe, runner_script_path, str(port), wf_dir],
69
+ # stdin=subprocess.PIPE,
70
+ # stdout=subprocess.PIPE,
71
+ # stderr=subprocess.PIPE,
72
+ # text=True
73
+ # )
74
+
75
+ # process.stdin.write(json.dumps(init_args))
76
+ # process.stdin.write('\n')
77
+ # process.stdin.write(json.dumps(config))
78
+ # process.stdin.write('\n')
79
+ # process.stdin.write(json.dumps(input_data))
80
+ # process.stdin.write('\n')
81
+ # process.stdin.close()
82
+
83
+ # # 7. 等待连接
84
+ # server_socket.settimeout(10.0) # 首次 uv 下载 python 可能稍慢,增加超时
85
+ # conn = None
86
+ # try:
87
+ # conn, addr = server_socket.accept()
88
+
89
+ # # 9. 读取数据流
90
+ # while True:
91
+ # header = conn.recv(4)
92
+ # if not header:
93
+ # break
94
+ # msg_len = struct.unpack('!I', header)[0]
95
+
96
+ # chunks = []
97
+ # bytes_recd = 0
98
+ # while bytes_recd < msg_len:
99
+ # chunk = conn.recv(min(msg_len - bytes_recd, 4096))
100
+ # if not chunk:
101
+ # raise RuntimeError("Socket connection broken")
102
+ # chunks.append(chunk)
103
+ # bytes_recd += len(chunk)
104
+
105
+ # full_msg = b''.join(chunks)
106
+ # full_msg_json = json.loads(full_msg.decode('utf-8'))
107
+ # yield full_msg_json
108
+
109
+ # except socket.timeout:
110
+ # yield {"error": "Runner connection timeout (Maybe UV is downloading python?)"}
111
+ # except Exception as e:
112
+ # yield {"error": f"Socket Error: {e}"}
113
+ # finally:
114
+ # if conn:
115
+ # conn.close()
116
+ # server_socket.close()
117
+
118
+ # logs = process.stdout.read()
119
+ # if logs:
120
+ # print(f"--- [Clean Runner Logs] ---\n{logs}---------------------------")
121
+
122
+ # stderr = process.stderr.read()
123
+ # if stderr:
124
+ # print(f"[Stderr] {stderr}")
125
+
@@ -0,0 +1,12 @@
1
+ from typing import Generator, Dict
2
+
3
+ from alayaflow.execution.executors.base_executor import BaseExecutor
4
+ from alayaflow.workflow.workflow_manager import WorkflowManager
5
+
6
+ class WorkerExecutor(BaseExecutor):
7
+ def __init__(self, workflow_manager: WorkflowManager):
8
+ self.workflow_manager = workflow_manager
9
+
10
+ def execute_stream(self, workflow_id: str, version: str, input_data: dict, user_config: dict) -> Generator[Dict, None, None]:
11
+ raise NotImplementedError("worker executor not supported yet")
12
+
@@ -0,0 +1,104 @@
1
+ from typing import Any, Optional
2
+
3
+ from alayaflow.workflow.workflow_info import WorkflowInfo
4
+
5
+
6
+ class LangfuseTracing:
7
+ def __init__(self, settings: Any) -> None:
8
+ self._settings = settings
9
+ self._client_inited = False
10
+
11
+ def _normalize_value(self, value: Optional[str]) -> Optional[str]:
12
+ return value.strip() if isinstance(value, str) and value.strip() else None
13
+
14
+ def _merge_tags(self, wf_tags: Optional[list], input_tags: Optional[list]) -> list:
15
+ tags = []
16
+ if isinstance(wf_tags, list):
17
+ tags.extend(wf_tags)
18
+ if isinstance(input_tags, list):
19
+ tags.extend(input_tags)
20
+ if tags:
21
+ # 去重并保持顺序
22
+ seen = set()
23
+ tags = [t for t in tags if not (t in seen or seen.add(t))]
24
+ return tags
25
+
26
+ def _init_langfuse_client(self, public_key: str, secret_key: str, host: Optional[str]) -> None:
27
+ if self._client_inited:
28
+ return
29
+ from langfuse import Langfuse
30
+ if host is None:
31
+ Langfuse(public_key=public_key, secret_key=secret_key)
32
+ else:
33
+ Langfuse(public_key=public_key, secret_key=secret_key, host=host)
34
+ self._client_inited = True
35
+
36
+ def _load_langfuse_handler_class(self) -> Optional[Any]:
37
+ try:
38
+ from langfuse.langchain import CallbackHandler
39
+ return CallbackHandler
40
+ except Exception:
41
+ try:
42
+ from langfuse.callback import CallbackHandler
43
+ return CallbackHandler
44
+ except Exception:
45
+ return None
46
+
47
+ def build_callback(self) -> Optional[Any]:
48
+ if not self._settings.langfuse_enabled:
49
+ return None
50
+
51
+ public_key = self._normalize_value(self._settings.langfuse_public_key)
52
+ secret_key = self._normalize_value(self._settings.langfuse_secret_key)
53
+ host = self._normalize_value(self._settings.langfuse_url)
54
+ if not (public_key and secret_key):
55
+ return None
56
+
57
+ handler_cls = self._load_langfuse_handler_class()
58
+ if handler_cls is None:
59
+ print("[Langfuse] SDK 未安装,跳过追踪。请在工作流 requirements.txt 中添加 langfuse。")
60
+ return None
61
+
62
+ try:
63
+ self._init_langfuse_client(public_key, secret_key, host)
64
+ return handler_cls()
65
+ except Exception as e:
66
+ print(f"[Langfuse] 初始化失败,跳过追踪: {e}")
67
+ return None
68
+
69
+ def build_config(self, input_data: dict, wf_info: Optional[WorkflowInfo], cb: Any) -> dict:
70
+ metadata = {
71
+ "workflow_id": getattr(wf_info, "id", None) if wf_info is not None else None,
72
+ "workflow_version": getattr(wf_info, "version", None) if wf_info is not None else None,
73
+ "workflow_name": getattr(wf_info, "name", None) if wf_info is not None else None,
74
+ }
75
+ input_metadata = input_data.get("metadata")
76
+ if isinstance(input_metadata, dict):
77
+ metadata.update(input_metadata)
78
+
79
+ wf_tags = getattr(wf_info, "tags", None) if wf_info is not None else None
80
+ tags = self._merge_tags(wf_tags, input_data.get("tags"))
81
+
82
+ metadata.update({
83
+ "langfuse_user_id": input_data.get("user_id"),
84
+ "langfuse_session_id": input_data.get("session_id"),
85
+ "langfuse_tags": tags or None,
86
+ })
87
+ metadata = {k: v for k, v in metadata.items() if v is not None}
88
+
89
+ return {
90
+ "callbacks": [cb],
91
+ "run_name": getattr(wf_info, "name", None) or getattr(wf_info, "id", None),
92
+ "metadata": metadata,
93
+ }
94
+
95
+
96
+ _GLOBAL_TRACING: Optional[LangfuseTracing] = None
97
+
98
+
99
+ def get_tracing(settings: Any) -> LangfuseTracing:
100
+ global _GLOBAL_TRACING
101
+ if _GLOBAL_TRACING is None:
102
+ _GLOBAL_TRACING = LangfuseTracing(settings)
103
+ return _GLOBAL_TRACING
104
+
@@ -0,0 +1,98 @@
1
+ import sys
2
+ import json
3
+ import importlib.util
4
+ import socket
5
+ import struct
6
+ import traceback
7
+ import asyncio
8
+ from pathlib import Path
9
+
10
+ # 尝试导入 langchain 的序列化工具 (如果环境中有)
11
+ try:
12
+ from langchain_core.messages import BaseMessage
13
+ from pydantic import BaseModel
14
+ except ImportError as e:
15
+ raise ImportError("请安装 langchain_core 和 pydantic 库以支持 LangChain/LangGraph 序列化") from e
16
+
17
+ try:
18
+ import alayaflow
19
+ except ImportError as e:
20
+ raise ImportError("alayaflow没有在worker内安装") from e
21
+
22
+ try:
23
+ from alayaflow.workflow.workflow_loader import WorkflowLoader
24
+ from alayaflow.common.config import settings
25
+ from alayaflow.execution.langfuse_tracing import get_tracing
26
+ except ImportError as e:
27
+ raise e
28
+
29
+ port = int(sys.argv[1])
30
+ wf_dir = sys.argv[2]
31
+
32
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
33
+ try:
34
+ s.connect(('127.0.0.1', port))
35
+ except ConnectionRefusedError:
36
+ sys.exit(1)
37
+
38
+ def default_encoder(obj):
39
+ if hasattr(obj, 'model_dump') and callable(obj.model_dump):
40
+ return obj.model_dump()
41
+ return str(obj)
42
+
43
+ def send_chunk(data_dict):
44
+ try:
45
+ json_bytes = json.dumps(data_dict, default=default_encoder).encode('utf-8')
46
+ header = struct.pack('!I', len(json_bytes))
47
+ s.sendall(header + json_bytes)
48
+ except Exception as e:
49
+ err_msg = json.dumps({"error": f"Serialization Error: {str(e)}"}).encode('utf-8')
50
+ s.sendall(struct.pack('!I', len(err_msg)) + err_msg)
51
+
52
+ async def run():
53
+ try:
54
+ input_str = sys.stdin.read()
55
+ lines = input_str.strip().split('\n')
56
+ init_args = json.loads(lines[0]) if len(lines) > 0 and lines[0].strip() else {}
57
+ config = json.loads(lines[1]) if len(lines) > 1 and lines[1].strip() else {}
58
+ input_data = json.loads(lines[2]) if len(lines) > 2 and lines[2].strip() else {}
59
+
60
+ print("Input data for workflow:", input_data)
61
+
62
+ # 加载工作流元信息和创建器
63
+ # wf_dir 格式: {workflows_dir}/{workflow_id}/{version}
64
+ wf_dir_path = Path(wf_dir)
65
+ workflows_dir = str(wf_dir_path.parent.parent) # 工作流根目录
66
+ workflow_id = wf_dir_path.parent.name # 工作流ID
67
+ version = wf_dir_path.name # 版本号
68
+
69
+ loader = WorkflowLoader(workflows_dir, workflow_id, version)
70
+ result = loader.load_workflow()
71
+ wf_info = result.info
72
+ creator = result.creator
73
+ graph = creator(init_args)
74
+
75
+ tracing = get_tracing(settings)
76
+ langfuse_cb = tracing.build_callback()
77
+
78
+ # 合并用户提供的 config 和 tracing config
79
+ merged_config = dict(config) if config else {}
80
+ if langfuse_cb:
81
+ merged_config.update(tracing.build_config(input_data, wf_info, langfuse_cb))
82
+
83
+ async for chunk in graph.astream_events(input_data, config=merged_config, version="v2"):
84
+ send_chunk(chunk)
85
+
86
+ except Exception as e:
87
+ error_info = {
88
+ "error": str(e),
89
+ "traceback": traceback.format_exc()
90
+ }
91
+ send_chunk(error_info)
92
+ traceback.print_exc()
93
+ print(f"[Runner Critical] {{error_info}}", file=sys.stderr)
94
+
95
+ finally:
96
+ s.close()
97
+
98
+ asyncio.run(run())
@@ -0,0 +1,14 @@
1
+ import threading
2
+
3
+ class SingletonMeta(type):
4
+ _instances = {}
5
+ _lock = threading.RLock()
6
+
7
+ def __call__(cls, *args, **kwargs):
8
+ # Double-Checked Locking
9
+ if cls not in cls._instances:
10
+ with cls._lock:
11
+ if cls not in cls._instances:
12
+ instance = super().__call__(*args, **kwargs)
13
+ cls._instances[cls] = instance
14
+ return cls._instances[cls]
@@ -0,0 +1,6 @@
1
+ from alayaflow.workflow.workflow_info import WorkflowInfo
2
+ from alayaflow.workflow.workflow_loader import WorkflowLoader
3
+ from alayaflow.workflow.workflow_manager import WorkflowManager
4
+
5
+ __all__ = ["WorkflowInfo", "WorkflowLoader", "WorkflowManager"]
6
+
@@ -0,0 +1,7 @@
1
+ from alayaflow.workflow.runnable.base_runnable_workflow import BaseRunnableWorkflow
2
+ from alayaflow.workflow.runnable.state_graph_runnable_workflow import StateGraphRunnableWorkflow
3
+
4
+ __all__ = [
5
+ "BaseRunnableWorkflow",
6
+ "StateGraphRunnableWorkflow"
7
+ ]
@@ -0,0 +1,19 @@
1
+ from abc import ABC
2
+ from typing import Dict, Generator
3
+
4
+ from alayaflow.workflow.workflow_info import WorkflowInfo
5
+
6
+ class BaseRunnableWorkflow(ABC):
7
+ def __init__(self, info: WorkflowInfo) -> None:
8
+ self._info = info
9
+
10
+ @property
11
+ def info(self) -> WorkflowInfo:
12
+ return self._info
13
+
14
+ def invoke(self, input_data: dict, user_config: dict) -> dict:
15
+ raise NotImplementedError("invoke method must be implemented in derived classes")
16
+
17
+ def astream_events(self, input_data: dict, user_config: dict) -> Generator[Dict, None, None]:
18
+ raise NotImplementedError("invoke method must be implemented in derived classes")
19
+
@@ -0,0 +1,23 @@
1
+ from functools import cached_property
2
+ from typing import Dict, Generator
3
+
4
+ from langgraph.graph.state import CompiledStateGraph
5
+
6
+ from alayaflow.workflow.runnable.base_runnable_workflow import BaseRunnableWorkflow
7
+ from alayaflow.workflow.workflow_info import WorkflowInfo
8
+
9
+ class StateGraphRunnableWorkflow(BaseRunnableWorkflow):
10
+ def __init__(self, info: WorkflowInfo, graph: CompiledStateGraph) -> None:
11
+ super().__init__(info)
12
+ self._graph = graph
13
+
14
+ def invoke(self, input_data: dict, user_config: dict) -> dict:
15
+ return self._graph.invoke(input_data, {
16
+ "configurable": user_config
17
+ })
18
+
19
+ def astream_events(self, input_data: dict, user_config: dict) -> Generator[Dict, None, None]:
20
+ return self._graph.astream_events(input_data, {
21
+ "configurable": user_config
22
+ })
23
+
@@ -0,0 +1,50 @@
1
+ from dataclasses import dataclass
2
+ from pathlib import Path
3
+ from typing import List, Callable, Any, NamedTuple
4
+
5
+
6
+ class WorkflowKey(NamedTuple):
7
+ id: str
8
+ version: str
9
+
10
+
11
+ @dataclass
12
+ class WorkflowInfo:
13
+ """工作流元信息"""
14
+ id: str
15
+ name: str
16
+ description: str
17
+ version: str
18
+ tags: List[str]
19
+ entry_file: str
20
+ entry_point: str
21
+ wf_dir: Path
22
+
23
+ @property
24
+ def key(self) -> WorkflowKey:
25
+ return WorkflowKey(self.id, self.version)
26
+
27
+ def to_dict(self) -> dict:
28
+ return {
29
+ "id": self.id,
30
+ "name": self.name,
31
+ "description": self.description,
32
+ "version": self.version,
33
+ "tags": self.tags,
34
+ "entry_file": self.entry_file,
35
+ "entry_point": self.entry_point,
36
+ "path": str(self.wf_dir),
37
+ }
38
+
39
+ @classmethod
40
+ def from_dict(cls, data: dict) -> "WorkflowInfo":
41
+ return cls(
42
+ id=data["id"],
43
+ name=data["name"],
44
+ description=data["description"],
45
+ version=data["version"],
46
+ tags=data["tags"],
47
+ entry_file=data["entry_file"],
48
+ entry_point=data["entry_point"],
49
+ wf_dir=Path(data.get("path", data.get("wf_dir", ""))),
50
+ )