xgae 0.2.2__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of xgae might be problematic. Click here for more details.

@@ -59,6 +59,8 @@ class XGATaskEngine:
59
59
 
60
60
  self.task_response_msgs: List[XGAResponseMessage] = []
61
61
 
62
+ self.terminate_task = False
63
+
62
64
  async def run_task_with_final_answer(self,
63
65
  task_input: Dict[str, Any],
64
66
  trace_id: Optional[str] = None) -> XGATaskResult:
@@ -148,6 +150,7 @@ class XGATaskEngine:
148
150
  auto_continue = True
149
151
  while auto_continue and auto_continue_count < self.max_auto_run:
150
152
  auto_continue = False
153
+ iterations = auto_continue_count
151
154
 
152
155
  try:
153
156
  async for chunk in self._run_task_once(continuous_state):
@@ -193,11 +196,12 @@ class XGATaskEngine:
193
196
  self.task_langfuse.root_span.event(name="engine_task_run_once_error", level="ERROR",
194
197
  status_message=f"Call task_run_once error: {run_error}",
195
198
  metadata={"trace": trace})
196
-
197
199
  status_content = {'status_type': "error", 'role': "system", 'message': "Call run_task_once error"}
198
200
  error_msg = self.add_response_message(type="status", content=status_content, is_llm_message=False)
199
201
  yield error_msg
200
-
202
+ finally:
203
+ if not self.running_task_checkpoint("termination_check", iterations):
204
+ break
201
205
 
202
206
  async def _run_task_once(self, continuous_state: TaskRunContinuousState) -> AsyncGenerator[Dict[str, Any], None]:
203
207
  llm_messages = [{"role": "system", "content": self.task_prompt}]
@@ -213,6 +217,9 @@ class XGATaskEngine:
213
217
  llm_messages.append(temp_assistant_message)
214
218
 
215
219
  auto_count = continuous_state.get("auto_continue_count")
220
+
221
+ self.running_task_checkpoint("before_completion", auto_count, llm_messages)
222
+
216
223
  langfuse_metadata = self.task_langfuse.create_llm_langfuse_meta(auto_count)
217
224
 
218
225
  llm_response = await self.llm_client.acompletion(llm_messages, langfuse_metadata)
@@ -220,12 +227,21 @@ class XGATaskEngine:
220
227
 
221
228
  async for chunk in response_processor.process_response(llm_response, llm_messages, continuous_state):
222
229
  self._logging_reponse_chunk(chunk, auto_count)
230
+
231
+ if chunk['type'] == "assistant":
232
+ assis_content = chunk['content']
233
+ self.running_task_checkpoint("after_completion", auto_count, llm_messages, assis_content)
234
+
223
235
  yield chunk
224
236
 
225
237
  def parse_final_result(self, chunks: List[Dict[str, Any]]) -> XGATaskResult:
226
238
  final_result: XGATaskResult = None
227
239
  reverse_chunks = reversed(chunks)
228
240
  chunk = None
241
+
242
+ if self.terminate_task:
243
+ return XGATaskResult(type="error", content="LLM Task is terminated !")
244
+
229
245
  try:
230
246
  finish_reason = ''
231
247
  for chunk in reverse_chunks:
@@ -284,6 +300,25 @@ class XGATaskEngine:
284
300
 
285
301
  return final_result
286
302
 
303
+
304
+ def stop_task(self):
305
+ logging.warning(f"⚠️ Begin Terminate Task: {self.task_id}")
306
+ self.task_langfuse.root_span.event(name="stop_task", level="DEFAULT",
307
+ status_message="Begin Terminate Task")
308
+ self.terminate_task = True
309
+
310
+
311
+ def running_task_checkpoint(self,
312
+ task_state: Literal["before_completion", "after_completion", "termination_check"],
313
+ iterations: int,
314
+ llm_messages: List[Dict[str, Any]] = None,
315
+ llm_response: Dict[str, Any] = None
316
+ )-> bool:
317
+ if self.terminate_task and task_state == "termination_check":
318
+ logging.warning(f"⚠️ TASK: {self.task_id} STOP RUNNING for STOP Command !")
319
+ return not self.terminate_task
320
+
321
+
287
322
  def create_response_message(self, type: XGAResponseMsgType,
288
323
  content: Union[Dict[str, Any], List[Any], str],
289
324
  is_llm_message: bool,
@@ -0,0 +1,122 @@
1
+ import logging
2
+ from typing import Any, Optional, override, Callable, Literal, Dict, List
3
+
4
+ from xgae.engine.engine_base import XGAToolBox
5
+ from xgae.engine.task_engine import XGATaskEngine
6
+ from xgae.utils.llm_client import LLMConfig
7
+
8
+
9
+ class ARETaskEngine(XGATaskEngine):
10
+ def __init__(self,
11
+ agent: Any,
12
+ agent_id: str,
13
+ system_prompt: str,
14
+ max_auto_run: int,
15
+ llm_config: Optional[LLMConfig] = None,
16
+ tool_box: Optional[XGAToolBox] = None,
17
+ pre_run_task_fn : Callable[[Any, int, List[Dict[str, Any]]], Any] = None,
18
+ post_run_task_fn : Callable[[Any, int, Dict[str, Any]], Any] = None,
19
+ terminate_task_fn : Callable[[Any, int], bool] = None,
20
+ ):
21
+ super().__init__(agent_id = agent_id,
22
+ general_tools = [],
23
+ custom_tools = ["*"],
24
+ system_prompt = system_prompt,
25
+ max_auto_run = max_auto_run,
26
+ llm_config = llm_config,
27
+ tool_box = tool_box,
28
+ )
29
+ self.agent = agent
30
+ self.pre_run_task_fn = pre_run_task_fn
31
+ self.post_run_task_fn = post_run_task_fn
32
+ self.terminate_task_fn = terminate_task_fn
33
+
34
+
35
+ @override
36
+ def running_task_checkpoint(self,
37
+ task_state: Literal["before_completion", "after_completion", "termination_check"],
38
+ iterations: int,
39
+ llm_messages: List[Dict[str, Any]] = None,
40
+ llm_response: Dict[str, Any] = None
41
+ )-> bool:
42
+ is_continue_task = True
43
+
44
+ if task_state == "before_completion" and self.pre_run_task_fn:
45
+ self.pre_run_task_fn(self.agent, iterations, llm_messages)
46
+ elif task_state == "after_completion" and self.post_run_task_fn:
47
+ self.post_run_task_fn(self.agent, iterations, llm_response)
48
+ elif task_state == "termination_check":
49
+ if self.terminate_task:
50
+ logging.warning(f"running_task_checkpoint: ⚠️ TASK: {self.task_id} STOP RUNNING for STOP Command !")
51
+
52
+ if self.terminate_task_fn:
53
+ is_terminate = self.terminate_task_fn(self.agent, iterations) if self.terminate_task_fn else False
54
+ if is_terminate:
55
+ logging.warning(f"running_task_checkpoint: ⚠️ TASK: {self.task_id} STOP RUNNING for Termination Function !")
56
+ self.stop_task()
57
+
58
+ is_continue_task = not self.terminate_task
59
+
60
+ return is_continue_task
61
+
62
+
63
+
64
+ if __name__ == "__main__":
65
+ import asyncio
66
+ import os
67
+ from xgae.utils.misc import read_file
68
+ from xgae.utils.setup_env import setup_logging
69
+ from xgae.engine.mcp_tool_box import XGAMcpToolBox
70
+
71
+ setup_logging()
72
+
73
+ def pre_run_task(agent, iterations:int, llm_messages: List[Dict[str, Any]]):
74
+ prompt = "\n\n".join([f"{key}: {value}" for d in llm_messages for key, value in d.items()]) if llm_messages else ""
75
+ logging.info(f"pre_run_task: iterations={iterations}, prompt: \n{prompt}\n")
76
+
77
+
78
+ def post_run_task(agent, iterations: int, llm_response: Dict[str, Any]):
79
+ logging.info(f"post_run_task: iterations={iterations}, prompt: \n{llm_response}\n")
80
+
81
+
82
+ def terminate_task(agent, iterations: int) -> bool:
83
+ logging.info(f"terminate_task: iterations={iterations}")
84
+ return iterations > 6 # can test terminate by > 3
85
+
86
+
87
+ async def main():
88
+ # Before Run Exec: uv run example-fault-tools
89
+ # LLAMA_API_KEY ,
90
+ tool_box = XGAMcpToolBox(custom_mcp_server_file="mcpservers/custom_servers.json")
91
+ system_prompt = read_file("templates/example/fault_user_prompt.txt")
92
+ llm_config = LLMConfig(
93
+ model = "openai/qwen-plus",
94
+ api_key = os.getenv('LLAMA_API_KEY') ,
95
+ api_base = os.getenv('LLAMA_API_BASE'),
96
+ stream =True,
97
+ enable_thinking = False,
98
+ )
99
+
100
+ engine = ARETaskEngine(
101
+ agent = "AREAgent", # Just for test,ARE use real Agent Object
102
+ agent_id = "agent_1",
103
+ system_prompt = system_prompt,
104
+ max_auto_run = 15,
105
+ llm_config = llm_config,
106
+ tool_box = tool_box,
107
+ pre_run_task_fn = pre_run_task,
108
+ post_run_task_fn = post_run_task,
109
+ terminate_task_fn = terminate_task
110
+ )
111
+
112
+ user_input = "locate 10.0.0.1 fault and solution"
113
+ chunks = []
114
+ async for chunk in engine.run_task(task_input={"role": "user", "content": user_input}):
115
+ chunks.append(chunk)
116
+ print(chunk)
117
+
118
+ final_result = engine.parse_final_result(chunks)
119
+ print(f"\n\nFINAL_RESULT: {final_result}")
120
+
121
+
122
+ asyncio.run(main())
@@ -1,11 +1,10 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: xgae
3
- Version: 0.2.2
3
+ Version: 0.3.0
4
4
  Summary: Extreme General Agent Engine
5
- Requires-Python: >=3.13
5
+ Requires-Python: >=3.11
6
6
  Requires-Dist: colorlog==6.9.0
7
7
  Requires-Dist: langchain-mcp-adapters==0.1.9
8
- Requires-Dist: langchain==0.3.27
9
8
  Requires-Dist: langfuse==2.60.9
10
9
  Requires-Dist: litellm==1.74.15
11
10
  Requires-Dist: mcp==1.13.0
@@ -3,11 +3,12 @@ xgae/engine_cli_app.py,sha256=FdmIpq8KDsgyZNfwCDgNX7FEZFeRFyGOt_H1oZF8aKs,2890
3
3
  xgae/engine/engine_base.py,sha256=RR1em2wHiM2jP-peHt77SKdHWjnYOjdIIzN93zT61cA,1715
4
4
  xgae/engine/mcp_tool_box.py,sha256=G4hKIMguwg1cO4Us2NMfdloYim8kuikVyVTIPucJr7o,10903
5
5
  xgae/engine/prompt_builder.py,sha256=6I5rjgvNJ27QJ8DDuBTplutoPZdGs9LYFv3TSgT7zmc,5045
6
- xgae/engine/task_engine.py,sha256=bz3s2w6-QHYRJjwO2KLtCv46gFb1p5JZeEW6cX2A7MI,21720
6
+ xgae/engine/task_engine.py,sha256=1YgkcsX8LAAIIp1DsVbphwOVO9xA0FioABxzaRvers8,23223
7
7
  xgae/engine/task_langfuse.py,sha256=ifkGrPBv2daLTKE-fCfEtOoI0n4Pd-lCwhyRRL0h308,2850
8
8
  xgae/engine/responser/non_stream_responser.py,sha256=zEJjqCgZVe2B8gkHYRFU7tmBV834f7w2a4Ws25P1N-c,5289
9
9
  xgae/engine/responser/responser_base.py,sha256=jhl1Bdz1Fs3KofGEymThNXlQuCORFTTkTAR_U47krds,24403
10
10
  xgae/engine/responser/stream_responser.py,sha256=cv4UGcxj8OksEogW7DUGTCvSJabu-DF6GceFyUwaXI4,7627
11
+ xgae/gaia2/are_engine.py,sha256=GPs6c94CN8jJjmNNsE7XHxdskPJ31p4Ds9QCST7BiI4,5050
11
12
  xgae/tools/without_general_tools_app.py,sha256=KqsdhxD3hvTpiygaGUVHysRFjvv_1A8zOwMKN1J0J0U,3821
12
13
  xgae/utils/__init__.py,sha256=ElaGS-zdeZeu6is41u3Ny7lkvhg7BDSK-jMNg9j6K5A,499
13
14
  xgae/utils/json_helpers.py,sha256=WD4G5U9Dh8N6J9O0L5wGyqj-NHi09kcXHGdLD_26nlc,3607
@@ -15,7 +16,7 @@ xgae/utils/llm_client.py,sha256=rqnu_NYXBC0hl4aozP5UOSyf0q-ONB5ywtnrXzA88OE,1505
15
16
  xgae/utils/misc.py,sha256=aMWOvJ9VW52q-L9Lkjl1hvXqLwpJAmyxA-Z8jzqFG0U,907
16
17
  xgae/utils/setup_env.py,sha256=MqNG0c2QQBDFU1kI8frxr9kB5d08Mmi3QZ1OoorgIa0,2662
17
18
  xgae/utils/xml_tool_parser.py,sha256=Mb0d8kBrfyAEvUwW1Nqir-3BgxZRr0ZX3WymQouuFSo,4859
18
- xgae-0.2.2.dist-info/METADATA,sha256=9VaGgRhzSWNmTuYbKAB9knyXKCJdbNP-40vRenAKi-0,504
19
- xgae-0.2.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
20
- xgae-0.2.2.dist-info/entry_points.txt,sha256=wmvgtMQbtzTbDPETS-tbQJD7jVlcs4hp0w6wOB0ooCc,229
21
- xgae-0.2.2.dist-info/RECORD,,
19
+ xgae-0.3.0.dist-info/METADATA,sha256=OAdIkEtp1xWqtU2MrOBRJfPyu7TLJk6m5EPYh9WYNdQ,471
20
+ xgae-0.3.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
21
+ xgae-0.3.0.dist-info/entry_points.txt,sha256=wmvgtMQbtzTbDPETS-tbQJD7jVlcs4hp0w6wOB0ooCc,229
22
+ xgae-0.3.0.dist-info/RECORD,,
File without changes