turbo-agent-runtime 0.1.0.dev1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. turbo_agent_runtime-0.1.0.dev1/PKG-INFO +25 -0
  2. turbo_agent_runtime-0.1.0.dev1/README.md +3 -0
  3. turbo_agent_runtime-0.1.0.dev1/pyproject.toml +39 -0
  4. turbo_agent_runtime-0.1.0.dev1/setup.cfg +4 -0
  5. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/__init__.py +14 -0
  6. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/api.py +253 -0
  7. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/base.py +9 -0
  8. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/context.py +3 -0
  9. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/executors/agent.py +839 -0
  10. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/executors/character.py +148 -0
  11. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/executors/llm.py +932 -0
  12. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/executors/tool/__init__.py +43 -0
  13. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/executors/tool/agent_tool.py +183 -0
  14. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/executors/tool/api_tool.py +550 -0
  15. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/executors/tool/base.py +169 -0
  16. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/executors/tool/builtin_tool.py +375 -0
  17. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/executors/tool/frontend_tool.py +503 -0
  18. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/executors/tool/llm_tool.py +525 -0
  19. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/executors/tool/mcp_tool.py +597 -0
  20. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/llm_prompt/basic.py +7 -0
  21. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/llm_prompt/basic_fncall_prompt.py +32 -0
  22. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/llm_prompt/chat_model.py +792 -0
  23. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/llm_prompt/chat_node.py +339 -0
  24. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/llm_prompt/langgraph_agent.py +240 -0
  25. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/llm_prompt/nous_fncall_prompt.py +155 -0
  26. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/llm_prompt/react_fncall_prompt.py +216 -0
  27. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/llm_prompt/react_template.py +291 -0
  28. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/llm_prompt/schemas.py +55 -0
  29. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/llm_prompt/thinking_hijack_template.py +224 -0
  30. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/llm_prompt/tool_call_template.py +355 -0
  31. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/llm_prompt/utils.py +101 -0
  32. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/tool/buildin_tools.py +257 -0
  33. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/tool/rag_chunck.py +113 -0
  34. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/tool/tool_call.py +464 -0
  35. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/tool/tool_template.py +290 -0
  36. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/utils/executor_identity.py +19 -0
  37. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/utils/file_materializer.py +103 -0
  38. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/utils/message_converter.py +244 -0
  39. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/utils/patch.py +115 -0
  40. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/utils/text2img.py +37 -0
  41. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/utils/tool_convert.py +187 -0
  42. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime/utils/tool_template_db_flush.py +562 -0
  43. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime.egg-info/PKG-INFO +25 -0
  44. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime.egg-info/SOURCES.txt +45 -0
  45. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime.egg-info/dependency_links.txt +1 -0
  46. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime.egg-info/requires.txt +15 -0
  47. turbo_agent_runtime-0.1.0.dev1/src/turbo_agent_runtime.egg-info/top_level.txt +1 -0
@@ -0,0 +1,25 @@
1
+ Metadata-Version: 2.4
2
+ Name: turbo-agent-runtime
3
+ Version: 0.1.0.dev1
4
+ Summary: Runtime orchestration for turbo-agent
5
+ Author-email: sherlet <wuzeilmt@gmail.com>
6
+ Requires-Python: <3.14,>=3.10
7
+ Description-Content-Type: text/markdown
8
+ Requires-Dist: turbo-agent-core>=0.1.0
9
+ Requires-Dist: pydantic>=2.7.0
10
+ Requires-Dist: loguru>=0.7.2
11
+ Requires-Dist: langchain-core>=0.2.34
12
+ Requires-Dist: langgraph>=0.2.34
13
+ Requires-Dist: langchain-openai>=0.1.20
14
+ Requires-Dist: httpx>=0.27.0
15
+ Requires-Dist: typing-extensions>=4.9.0
16
+ Requires-Dist: requests-oauthlib>=2.0.0
17
+ Requires-Dist: turbo-agent-auth>=1.0.0
18
+ Requires-Dist: json5>=0.13.0
19
+ Provides-Extra: dev
20
+ Requires-Dist: pytest<9.0.0,>=8.3.5; extra == "dev"
21
+ Requires-Dist: pytest-asyncio<0.27.0,>=0.26.0; extra == "dev"
22
+
23
+ # turbo-agent-runtime
24
+
25
+ Runtime orchestration for turbo-agent.
@@ -0,0 +1,3 @@
1
+ # turbo-agent-runtime
2
+
3
+ Runtime orchestration for turbo-agent.
@@ -0,0 +1,39 @@
1
+ [project]
2
+ name = "turbo-agent-runtime"
3
+ version = "0.1.0.dev1"
4
+ description = "Runtime orchestration for turbo-agent"
5
+ readme = "README.md"
6
+ authors = [ { name = "sherlet", email = "wuzeilmt@gmail.com" } ]
7
+ requires-python = ">=3.10,<3.14"
8
+ dependencies = [
9
+ "turbo-agent-core>=0.1.0",
10
+ "pydantic>=2.7.0",
11
+ "loguru>=0.7.2",
12
+ "langchain-core>=0.2.34",
13
+ "langgraph>=0.2.34",
14
+ "langchain-openai>=0.1.20",
15
+ "httpx>=0.27.0",
16
+ "typing-extensions>=4.9.0",
17
+ "requests-oauthlib>=2.0.0",
18
+ "turbo-agent-auth>=1.0.0",
19
+ "json5>=0.13.0",
20
+ ]
21
+
22
+ [project.optional-dependencies]
23
+ dev = [
24
+ "pytest>=8.3.5,<9.0.0",
25
+ "pytest-asyncio>=0.26.0,<0.27.0",
26
+ ]
27
+
28
+ [tool.uv]
29
+ package = true
30
+
31
+ [tool.uv.sources]
32
+ turbo_agent_runtime = { path = "src/turbo_agent_runtime" }
33
+ turbo-agent-auth = { workspace = true }
34
+
35
+ [dependency-groups]
36
+ dev = [
37
+ "pytest>=8.3.5,<9.0.0",
38
+ "pytest-asyncio>=0.26.0,<0.27.0",
39
+ ]
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,14 @@
1
+ """turbo-agent runtime: orchestration and execution utilities.
2
+
3
+ Importing this package patches `TurboEntity.run` / `TurboEntity.stream_run` to
4
+ dispatch to runtime executors based on `run_type`.
5
+ """
6
+
7
+ from .utils.patch import patch_turbo_entity_methods # noqa: E402
8
+ # from .executor import llm # noqa: F401 (register LLM executor)
9
+ # from .executor import api # noqa: F401 (register API executor)
10
+
11
+ patch_turbo_entity_methods()
12
+
13
+ __all__ = ["__version__", "patch_turbo_entity_methods"]
14
+ __version__ = "0.1.0"
@@ -0,0 +1,253 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import AsyncIterator, List, Optional
4
+ from loguru import logger
5
+
6
+ from turbo_agent_core.schema.enums import RunType, JSON
7
+ from turbo_agent_core.schema.entity import TurboEntity
8
+ from turbo_agent_core.schema.events import (
9
+ BaseEvent,
10
+ RunLifecycleCreatedEvent,
11
+ RunLifecycleCreatedPayload,
12
+ RunLifecycleCompletedEvent,
13
+ RunLifecycleCompletedPayload,
14
+ RunLifecycleFailedEvent,
15
+ RunLifecycleFailedPayload,
16
+ ContentTextDeltaEvent,
17
+ ContentTextDeltaPayload,
18
+ ExecutorMetadata,
19
+ UserInfo,
20
+ )
21
+ from turbo_agent_core.schema.agents import Tool, APITool, LLMTool, AgentTool
22
+ from turbo_agent_core.schema.entity import TurboEntity, LLMModel
23
+
24
+ from .base import BaseExecutor
25
+ from .utils.executor_identity import build_executor_id
26
+ # from .context import ExecutionContext
27
+
28
+ class APIRunExecutor(BaseExecutor):
29
+ run_type = RunType.Tool
30
+
31
+ def _resolve_user_metadata(self, **kwargs) -> UserInfo:
32
+ raw = kwargs.get("user_metadata") or kwargs.get("user_info")
33
+ if isinstance(raw, UserInfo):
34
+ return raw
35
+ if isinstance(raw, dict):
36
+ try:
37
+ return UserInfo.model_validate(raw)
38
+ except Exception:
39
+ pass
40
+ user_id = kwargs.get("user_id")
41
+ username = kwargs.get("username")
42
+ return UserInfo(id=str(user_id or "local"), username=str(username or "local"))
43
+
44
+ async def run(self, entity: TurboEntity, input: JSON) -> JSON:
45
+ # entity = ctx.entity
46
+ if not isinstance(entity, Tool):
47
+ return {"error": "Entity is not a Tool", "entity_id": entity.id}
48
+ version = self._choose_version(entity)
49
+ if isinstance(version, APITool):
50
+ result = await self._run_api_tool(entity, version, input)
51
+ return {"tool": entity.name, "version": version.id, "result": result}
52
+ if isinstance(version, LLMTool):
53
+ content = await self._run_llm_tool(version, input)
54
+ return {"tool": entity.name, "version": version.id, "llm_output": content}
55
+ if isinstance(version, AgentTool) and version.backendAgent:
56
+ # Delegate to backend agent single-shot (not streaming)
57
+ delegated = await version.backendAgent.run(input) # patched separately
58
+ return {"tool": entity.name, "version": version.id, "delegated": delegated}
59
+ return {"tool": entity.name, "version": getattr(version, 'id', None), "result": f"stub result for {entity.name}"}
60
+
61
+ async def stream(self, entity: TurboEntity, input: JSON, trace_id: Optional[str] = None) -> AsyncIterator[BaseEvent]:
62
+ # entity = ctx.entity
63
+ trace_id = trace_id or getattr(entity, "trace_id", None) or str(__import__("uuid").uuid4())
64
+ run_id = f"run_{entity.id}"
65
+ executor_type = self.run_type
66
+ executor_id = build_executor_id(entity, executor_type)
67
+ executor_path = [executor_id] if executor_id else None
68
+
69
+ yield RunLifecycleCreatedEvent(
70
+ trace_id=trace_id,
71
+ trace_path=[],
72
+ run_id=run_id,
73
+ run_path=[run_id],
74
+ executor_id=executor_id,
75
+ executor_type=executor_type,
76
+ executor_path=executor_path,
77
+ executor_metadata=ExecutorMetadata(
78
+ id=getattr(entity, "id", ""),
79
+ name=getattr(entity, "name", None),
80
+ run_type=getattr(entity, "run_type", None),
81
+ version_id=getattr(entity, "version_id", None),
82
+ version=getattr(entity, "version_tag", None),
83
+ ),
84
+ user_metadata=self._resolve_user_metadata(),
85
+ payload=RunLifecycleCreatedPayload(input_data=input),
86
+ )
87
+ try:
88
+ if not isinstance(entity, Tool):
89
+ yield RunLifecycleFailedEvent(
90
+ trace_id=trace_id,
91
+ trace_path=[],
92
+ run_id=run_id,
93
+ run_path=[run_id],
94
+ executor_type=executor_type,
95
+ executor_id=executor_id,
96
+ executor_path=executor_path,
97
+ payload=RunLifecycleFailedPayload(
98
+ error={"code": "TypeError", "message": "Entity is not Tool"}
99
+ ),
100
+ )
101
+ return
102
+ version = self._choose_version(entity)
103
+ if isinstance(version, APITool):
104
+ result = await self._run_api_tool(entity, version, input)
105
+ yield ContentTextDeltaEvent(
106
+ trace_id=trace_id,
107
+ trace_path=[],
108
+ run_id=run_id,
109
+ run_path=[run_id],
110
+ executor_type=executor_type,
111
+ executor_id=executor_id,
112
+ executor_path=executor_path,
113
+ payload=ContentTextDeltaPayload(delta=str(result))
114
+ )
115
+ elif isinstance(version, LLMTool):
116
+ # Stream LLM output tokens
117
+ async for delta in self._stream_llm_tool(
118
+ version,
119
+ input,
120
+ entity,
121
+ trace_id=trace_id,
122
+ run_id=run_id,
123
+ executor_id=executor_id,
124
+ executor_type=executor_type,
125
+ executor_path=executor_path,
126
+ ):
127
+ yield delta
128
+ elif isinstance(version, AgentTool) and version.backendAgent:
129
+ async for evt in version.backendAgent.stream_run(input):
130
+ yield evt
131
+ else:
132
+ yield ContentTextDeltaEvent(
133
+ trace_id=trace_id,
134
+ trace_path=[],
135
+ run_id=run_id,
136
+ run_path=[run_id],
137
+ executor_type=executor_type,
138
+ executor_id=executor_id,
139
+ executor_path=executor_path,
140
+ payload=ContentTextDeltaPayload(delta=f"stub result for {entity.name}")
141
+ )
142
+ yield RunLifecycleCompletedEvent(
143
+ trace_id=trace_id,
144
+ trace_path=[],
145
+ run_id=run_id,
146
+ run_path=[run_id],
147
+ executor_type=executor_type,
148
+ executor_id=executor_id,
149
+ executor_path=executor_path,
150
+ payload=RunLifecycleCompletedPayload(output=None, usage={"total_tokens": 0}),
151
+ )
152
+ except Exception as e: # pragma: no cover
153
+ logger.exception(e)
154
+ yield RunLifecycleFailedEvent(
155
+ trace_id=trace_id,
156
+ trace_path=[],
157
+ run_id=run_id,
158
+ run_path=[run_id],
159
+ executor_type=executor_type,
160
+ executor_id=executor_id,
161
+ executor_path=executor_path,
162
+ payload=RunLifecycleFailedPayload(error={"code": "RuntimeError", "message": str(e)}),
163
+ )
164
+ # ---------------- internal helpers -----------------
165
+ def _choose_version(self, tool: Tool):
166
+ if not tool.versions:
167
+ return None
168
+ if tool.defaultVersionId:
169
+ for v in tool.versions:
170
+ if getattr(v, "id", None) == tool.defaultVersionId:
171
+ return v
172
+ return tool.versions[0]
173
+
174
+ async def _run_api_tool(self, tool: Tool, version: APITool, input: JSON):
175
+ # Placeholder: integrate real HTTP call based on version fields
176
+ logger.debug(f"_run_api_tool {tool.name} path={version.url_path_template} input={input}")
177
+ return {"echo": input}
178
+
179
+ async def _run_llm_tool(self, version: LLMTool, input: JSON):
180
+ model_obj: Optional[LLMModel] = getattr(version, "model", None)
181
+ text = input.get("text") if isinstance(input, dict) else str(input)
182
+ if model_obj and model_obj.instances:
183
+ try:
184
+ from langchain_openai import ChatOpenAI
185
+ instance = model_obj.instances[0]
186
+ endpoint = instance.endpoint
187
+ chat = ChatOpenAI(model=instance.request_model_id or model_obj.id,
188
+ base_url=endpoint.url,
189
+ api_key=endpoint.accessKeyOrToken,
190
+ temperature=0.2)
191
+ resp = await chat.ainvoke([f"你是工具 {version.name}", text]) # type: ignore
192
+ return getattr(resp, "content", None) or str(resp)
193
+ except Exception as e: # pragma: no cover
194
+ logger.warning(f"LLMTool invocation failed: {e}")
195
+ return f"(stub llm-tool response for {version.name}) 输入: {text}"
196
+
197
+ async def _stream_llm_tool(
198
+ self,
199
+ version: LLMTool,
200
+ input: JSON,
201
+ parent: Tool,
202
+ *,
203
+ trace_id: str,
204
+ run_id: str,
205
+ executor_id: str,
206
+ executor_type: RunType,
207
+ executor_path: Optional[List[str]],
208
+ ) -> AsyncIterator[BaseEvent]:
209
+ model_obj: Optional[LLMModel] = getattr(version, "model", None)
210
+ text = input.get("text") if isinstance(input, dict) else str(input)
211
+ # 注意:该函数属于 Parent executor 的事件流产出,应复用调用方的 trace_id/run_id 与执行者上下文。
212
+
213
+ if model_obj and model_obj.instances:
214
+ try:
215
+ from langchain_openai import ChatOpenAI
216
+ instance = model_obj.instances[0]
217
+ endpoint = instance.endpoint
218
+ chat = ChatOpenAI(model=instance.request_model_id or model_obj.id,
219
+ base_url=endpoint.url,
220
+ api_key=endpoint.accessKeyOrToken,
221
+ temperature=0.2)
222
+ accumulated = ""
223
+ async for chunk in chat.astream([f"你是工具 {version.name}", text]): # type: ignore
224
+ part = getattr(chunk, "content", None) or ""
225
+ if not part:
226
+ continue
227
+ accumulated += part
228
+ yield ContentTextDeltaEvent(
229
+ trace_id=trace_id,
230
+ trace_path=[],
231
+ run_id=run_id,
232
+ run_path=[run_id],
233
+ executor_id=executor_id,
234
+ executor_type=executor_type,
235
+ executor_path=executor_path,
236
+ payload=ContentTextDeltaPayload(delta=part)
237
+ )
238
+ return
239
+ except Exception as e: # pragma: no cover
240
+ logger.warning(f"LLMTool stream failed: {e}")
241
+ yield ContentTextDeltaEvent(
242
+ trace_id=trace_id,
243
+ trace_path=[],
244
+ run_id=run_id,
245
+ run_path=[run_id],
246
+ executor_id=executor_id,
247
+ executor_type=executor_type,
248
+ executor_path=executor_path,
249
+ payload=ContentTextDeltaPayload(delta=f"(stub llm-tool stream for {version.name}) 输入: {text}")
250
+ )
251
+
252
+ # Register executor
253
+ APIRunExecutor()
@@ -0,0 +1,9 @@
1
+ from __future__ import annotations
2
+
3
+ # This file is deprecated and will be removed in future versions.
4
+ # EntityRuntime registry pattern is replaced by direct inheritance and patching.
5
+ # See turbo_agent_runtime.utils.patch for details.
6
+
7
+ # from .context import ExecutionContext
8
+
9
+ __all__ = []
@@ -0,0 +1,3 @@
1
+ # ExecutionContext removed
2
+
3
+