xgae 0.1.8__py3-none-any.whl → 0.1.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of xgae might be problematic. Click here for more details.

xgae/__init__.py CHANGED
@@ -0,0 +1,4 @@
1
+ if __name__ == "__main__":
2
+ from xgae.cli_app import main
3
+
4
+ main()
xgae/cli_app.py ADDED
@@ -0,0 +1,87 @@
1
+ import asyncio
2
+ import sys
3
+
4
+ from xgae.engine.mcp_tool_box import XGAMcpToolBox
5
+ from xgae.engine.task_engine import XGATaskEngine
6
+ from xgae.utils.llm_client import LLMConfig
7
+ from xgae.utils.misc import read_file
8
+
9
+ from xgae.utils.setup_env import setup_langfuse, setup_env_logging
10
+
11
+ setup_env_logging()
12
+ langfuse = setup_langfuse()
13
+
14
+ def get_user_message(question)-> str:
15
+ while True:
16
+ user_message = input(f"\n💬 {question}: ")
17
+ if user_message.lower() == 'exit':
18
+ print("\n====== Extreme General Agent Engine CLI EXIT ======")
19
+ sys.exit()
20
+
21
+ if not user_message.strip():
22
+ print("\nuser message is empty, input agin !!!\n")
23
+ continue
24
+
25
+ return user_message
26
+
27
+ async def cli() -> None:
28
+ await asyncio.sleep(1)
29
+ print("\n====== Extreme General Agent Engine CLI START ======")
30
+ user_message = input("\n💬 Start Custom MCP Server and Load User Prompt (Yes/No): ")
31
+ tool_box = None
32
+ system_prompt = None
33
+ general_tools = []
34
+ custom_tools = []
35
+ if user_message.lower() == 'yes':
36
+ print(f"--- Start Custom MCP Server in custom_servers.json")
37
+ print(f"--- Load User Prompt in example/fault_user_prompt.txt")
38
+ tool_box = XGAMcpToolBox(custom_mcp_server_file="mcpservers/custom_servers.json")
39
+ system_prompt = read_file("templates/example/fault_user_prompt.txt")
40
+ custom_tools = ["*"]
41
+ else:
42
+ print(f"--- Start General Agent Server")
43
+ print(f"--- Load System Prompt")
44
+ general_tools = ["*"]
45
+
46
+ while True:
47
+ user_message = get_user_message("Enter your message (or 'exit' to quit)")
48
+
49
+ print("\n🔄 Running XGA Engine ...\n")
50
+ engine = XGATaskEngine(tool_box=tool_box,
51
+ general_tools=general_tools,
52
+ custom_tools=custom_tools,
53
+ llm_config=LLMConfig(stream=False),
54
+ system_prompt=system_prompt,
55
+ max_auto_run=8)
56
+
57
+ # Two task run in same langfuse trace
58
+ trace_id = langfuse.trace(name="xgae_cli").trace_id
59
+
60
+ final_result = await engine.run_task_with_final_answer(
61
+ task_message={"role": "user", "content": user_message},
62
+ trace_id=trace_id
63
+ )
64
+
65
+ if final_result["type"] == "ask":
66
+ await asyncio.sleep(1)
67
+ print(f"\n📌 ASK INFO: {final_result['content']}")
68
+ user_message = get_user_message("Enter ASK information (or 'exit' to quit)")
69
+ final_result = await engine.run_task_with_final_answer(
70
+ task_message={"role": "user", "content": user_message},
71
+ trace_id=trace_id
72
+ )
73
+
74
+ await asyncio.sleep(1)
75
+ result_prefix = "✅" if final_result["type"] == "answer" else "❌"
76
+ if final_result["type"] == "ask":
77
+ print("\n *** IMPORTANT: XGA CLI only support showing ONE TURN ASK !")
78
+ result_prefix = "⚠️"
79
+ print(f"\n {result_prefix} FINAL RESULT: {final_result['content']}")
80
+
81
+
82
+ def main():
83
+ asyncio.run(cli())
84
+
85
+
86
+ if __name__ == "__main__":
87
+ main()
@@ -7,7 +7,7 @@ class XGAError(Exception):
7
7
  pass
8
8
 
9
9
  XGAMsgStatusType = Literal["error", "finish", "tool_error", "tool_started", "tool_completed", "tool_failed", "thread_run_start", "thread_run_end", "assistant_response_start", "assistant_response_end"]
10
- XGAResponseMsgType = Literal["user", "status", "tool", "assistant"]
10
+ XGAResponseMsgType = Literal["user", "status", "tool", "assistant", "assistant_complete"]
11
11
 
12
12
  class XGAResponseMessage(TypedDict, total=False):
13
13
  message_id: str
@@ -190,11 +190,16 @@ class XGAMcpToolBox(XGAToolBox):
190
190
  if __name__ == "__main__":
191
191
  import asyncio
192
192
  from dataclasses import asdict
193
+ from xgae.utils.setup_env import setup_logging
194
+
195
+ setup_logging()
193
196
 
194
197
  async def main():
195
- task_id = "task1"
198
+ ## Before Run Exec: uv run custom_fault_tools
196
199
  mcp_tool_box = XGAMcpToolBox(custom_mcp_server_file="mcpservers/custom_servers.json")
197
200
  #mcp_tool_box = XGAMcpToolBox()
201
+
202
+ task_id = "task1"
198
203
  await mcp_tool_box.load_mcp_tools_schema()
199
204
  await mcp_tool_box.creat_task_tool_box(task_id=task_id, general_tools=["*"], custom_tools=["bomc_fault.*"])
200
205
  tool_schemas = mcp_tool_box.get_task_tool_schemas(task_id, "general_tool")
@@ -3,7 +3,7 @@ import datetime
3
3
 
4
4
  from typing import Optional, List
5
5
 
6
- from xgae.engine.engine_base import XGAToolSchema, XGAError
6
+ from xgae.engine.engine_base import XGAToolSchema
7
7
  from xgae.utils.misc import read_file, format_file_with_args
8
8
 
9
9
 
@@ -1,9 +1,10 @@
1
1
  import logging
2
2
 
3
3
  from typing import List, Dict, Any, AsyncGenerator, override,Optional
4
+ from xgae.utils.json_helpers import format_for_yield
4
5
 
5
6
  from xgae.engine.responser.responser_base import TaskResponseProcessor, TaskResponserContext, TaskRunContinuousState
6
- from xgae.utils.json_helpers import format_for_yield
7
+
7
8
 
8
9
  class NonStreamTaskResponser(TaskResponseProcessor):
9
10
  def __init__(self, response_context: TaskResponserContext):
@@ -47,8 +48,8 @@ class NonStreamTaskResponser(TaskResponseProcessor):
47
48
  else:
48
49
  logging.warning(f"NonStreamTask:LLM response_message is empty")
49
50
 
50
- message_data = {"role": "assistant", "content": llm_content, "index": -1} # index=-1, full llm_content
51
- assistant_msg = self.add_response_message(type="assistant", content=message_data, is_llm_message=True)
51
+ message_data = {"role": "assistant", "content": llm_content} # index=-1, full llm_content
52
+ assistant_msg = self.add_response_message(type="assistant_complete", content=message_data, is_llm_message=True)
52
53
  yield assistant_msg
53
54
 
54
55
  tool_calls_to_execute = [item['tool_call'] for item in parsed_xml_data]
@@ -81,6 +82,9 @@ class NonStreamTaskResponser(TaskResponseProcessor):
81
82
  finish_reason = "completed"
82
83
  break
83
84
  tool_index += 1
85
+ else:
86
+ finish_reason = "non_tool_call"
87
+ logging.warning(f"NonStreamTask: tool_calls is empty, No Tool need to call !")
84
88
 
85
89
  if finish_reason:
86
90
  finish_content = {"status_type": "finish", "finish_reason": finish_reason}
@@ -6,11 +6,12 @@ from abc import ABC, abstractmethod
6
6
  from dataclasses import dataclass
7
7
  from typing import List, Dict, Any, Optional, Tuple, Union, Literal, Callable, TypedDict, AsyncGenerator
8
8
 
9
+ from xgae.utils.json_helpers import safe_json_parse
10
+ from xgae.utils.xml_tool_parser import XMLToolParser
11
+
9
12
  from xgae.engine.engine_base import XGAToolResult, XGAToolBox
10
- from xgae.utils.setup_env import langfuse
13
+ from xgae.engine.task_langfuse import XGATaskLangFuse
11
14
 
12
- from xgae.utils.json_helpers import safe_json_parse, format_for_yield
13
- from xgae.utils.xml_tool_parser import XMLToolParser
14
15
 
15
16
  # Type alias for XML result adding strategy
16
17
  XmlAddingStrategy = Literal["user_message", "assistant_message", "inline_edit"]
@@ -23,14 +24,13 @@ class TaskResponserContext(TypedDict, total=False):
23
24
  task_id: str
24
25
  task_run_id: str
25
26
  task_no: int
26
- trace_id: str
27
- root_span_id: str
28
27
  model_name: str
29
28
  max_xml_tool_calls: int # LLM generate max_xml_tool limit, 0 is no limit
30
- add_response_msg_func: Callable
31
- tool_box: XGAToolBox
32
29
  tool_execution_strategy: ToolExecutionStrategy
33
30
  xml_adding_strategy: XmlAddingStrategy
31
+ add_response_msg_func: Callable
32
+ tool_box: XGAToolBox
33
+ task_langfuse: XGATaskLangFuse
34
34
 
35
35
 
36
36
  class TaskRunContinuousState(TypedDict, total=False):
@@ -63,14 +63,14 @@ class TaskResponseProcessor(ABC):
63
63
  self.xml_adding_strategy = self.response_context.get("xml_adding_strategy", "user_message")
64
64
  self.max_xml_tool_calls = self.response_context.get("max_xml_tool_calls", 0)
65
65
 
66
- self.root_span = langfuse.span(trace_id=response_context.get("trace_id"), id=response_context.get("root_span_id"))
66
+ task_langfuse = response_context.get("task_langfuse")
67
+ self.root_span = task_langfuse.root_span
67
68
  self.add_response_message = response_context.get("add_response_msg_func")
68
69
 
69
70
  self.tool_box = response_context.get("tool_box")
70
71
  self.xml_parser = XMLToolParser()
71
72
 
72
73
 
73
-
74
74
  @abstractmethod
75
75
  async def process_response(self,
76
76
  llm_response: AsyncGenerator,
@@ -1,4 +1,3 @@
1
-
2
1
  import logging
3
2
  import json
4
3
  import os
@@ -6,18 +5,15 @@ import os
6
5
  from typing import List, Any, Dict, Optional, AsyncGenerator, Union, Literal
7
6
  from uuid import uuid4
8
7
 
9
- from xgae.engine.responser.responser_base import TaskResponserContext, TaskResponseProcessor, TaskRunContinuousState
10
- from xgae.engine.engine_base import XGAResponseMsgType, XGAResponseMessage, XGAToolBox, XGATaskResult
11
-
12
8
  from xgae.utils import handle_error
13
- from xgae.utils.setup_env import langfuse
14
-
15
- from xgae.utils.llm_client import LLMClient, LLMConfig, LangfuseMetadata
16
-
9
+ from xgae.utils.llm_client import LLMClient, LLMConfig
17
10
  from xgae.utils.json_helpers import format_for_yield
11
+
12
+ from xgae.engine.engine_base import XGAResponseMsgType, XGAResponseMessage, XGAToolBox, XGATaskResult
13
+ from xgae.engine.task_langfuse import XGATaskLangFuse
18
14
  from xgae.engine.prompt_builder import XGAPromptBuilder
19
15
  from xgae.engine.mcp_tool_box import XGAMcpToolBox
20
-
16
+ from xgae.engine.responser.responser_base import TaskResponserContext, TaskResponseProcessor, TaskRunContinuousState
21
17
 
22
18
  class XGATaskEngine:
23
19
  def __init__(self,
@@ -42,6 +38,7 @@ class XGATaskEngine:
42
38
 
43
39
  self.prompt_builder = prompt_builder or XGAPromptBuilder(system_prompt)
44
40
  self.tool_box: XGAToolBox = tool_box or XGAMcpToolBox()
41
+ self.task_langfuse: XGATaskLangFuse = None
45
42
 
46
43
  self.general_tools:List[str] = general_tools
47
44
  self.custom_tools:List[str] = custom_tools
@@ -53,18 +50,18 @@ class XGATaskEngine:
53
50
 
54
51
  self.task_no = -1
55
52
  self.task_run_id :str = None
56
-
57
53
  self.task_prompt :str = None
58
- self.trace_id :str = None
59
- self.root_span_id :str = None
60
- self.root_span_name :str = None
54
+
61
55
 
62
56
  async def run_task_with_final_answer(self,
63
57
  task_message: Dict[str, Any],
64
58
  trace_id: Optional[str] = None) -> XGATaskResult:
65
59
  final_result:XGATaskResult = None
66
60
  try:
67
- self._init_langfuse("run_task_with_final_answer", task_message, trace_id)
61
+ await self._init_task()
62
+
63
+ self.task_langfuse.start_root_span("run_task_with_final_answer", task_message, trace_id)
64
+
68
65
  chunks = []
69
66
  async for chunk in self.run_task(task_message=task_message, trace_id=trace_id):
70
67
  chunks.append(chunk)
@@ -76,7 +73,7 @@ class XGATaskEngine:
76
73
 
77
74
  return final_result
78
75
  finally:
79
- self._end_langfuse("run_task_with_final_answer", final_result)
76
+ self.task_langfuse.end_root_span("run_task_with_final_answer", final_result)
80
77
 
81
78
 
82
79
  async def run_task(self,
@@ -84,7 +81,8 @@ class XGATaskEngine:
84
81
  trace_id: Optional[str] = None) -> AsyncGenerator[Dict[str, Any], None]:
85
82
  try:
86
83
  await self._init_task()
87
- self._init_langfuse("run_task", task_message, trace_id)
84
+
85
+ self.task_langfuse.start_root_span("run_task", task_message, trace_id)
88
86
 
89
87
  self.add_response_message(type="user", content=task_message, is_llm_message=True)
90
88
 
@@ -92,33 +90,36 @@ class XGATaskEngine:
92
90
  yield chunk
93
91
  finally:
94
92
  await self.tool_box.destroy_task_tool_box(self.task_id)
95
- self._end_langfuse("run_task")
96
-
93
+ self.task_langfuse.end_root_span("run_task")
94
+ self.task_run_id = None
97
95
 
98
96
  async def _init_task(self) -> None:
99
- self.task_no = self.task_no + 1
100
- self.task_run_id = f"{self.task_id}[{self.task_no}]"
97
+ if self.task_run_id is None:
98
+ self.task_no = self.task_no + 1
99
+ self.task_run_id = f"{self.task_id}[{self.task_no}]"
101
100
 
102
- general_tools = self.general_tools or ["complete", "ask"]
103
- if "*" not in general_tools:
104
- if "complete" not in general_tools:
105
- general_tools.append("complete")
106
- elif "ask" not in general_tools:
107
- general_tools.append("ask")
101
+ self.task_langfuse =self._create_task_langfuse()
108
102
 
109
- custom_tools = self.custom_tools or []
110
- if isinstance(self.tool_box, XGAMcpToolBox):
111
- await self.tool_box.load_mcp_tools_schema()
103
+ general_tools = self.general_tools or ["complete", "ask"]
104
+ if "*" not in general_tools:
105
+ if "complete" not in general_tools:
106
+ general_tools.append("complete")
107
+ elif "ask" not in general_tools:
108
+ general_tools.append("ask")
112
109
 
113
- await self.tool_box.creat_task_tool_box(self.task_id, general_tools, custom_tools)
114
- general_tool_schemas = self.tool_box.get_task_tool_schemas(self.task_id, "general_tool")
115
- custom_tool_schemas = self.tool_box.get_task_tool_schemas(self.task_id, "custom_tool")
110
+ custom_tools = self.custom_tools or []
111
+ if isinstance(self.tool_box, XGAMcpToolBox):
112
+ await self.tool_box.load_mcp_tools_schema()
116
113
 
117
- self.task_prompt = self.prompt_builder.build_task_prompt(self.model_name, general_tool_schemas, custom_tool_schemas)
114
+ await self.tool_box.creat_task_tool_box(self.task_id, general_tools, custom_tools)
115
+ general_tool_schemas = self.tool_box.get_task_tool_schemas(self.task_id, "general_tool")
116
+ custom_tool_schemas = self.tool_box.get_task_tool_schemas(self.task_id, "custom_tool")
118
117
 
119
- logging.info("*" * 30 + f" XGATaskEngine Task'{self.task_id}' Initialized " + "*" * 30)
120
- logging.info(f"model_name={self.model_name}, is_stream={self.is_stream}, trace_id={self.trace_id}")
121
- logging.info(f"general_tools={general_tools}, custom_tools={custom_tools}")
118
+ self.task_prompt = self.prompt_builder.build_task_prompt(self.model_name, general_tool_schemas, custom_tool_schemas)
119
+
120
+ logging.info("*" * 10 + f" XGATaskEngine Task'{self.task_id}' Initialized " + "*" * 10)
121
+ logging.info(f"model_name={self.model_name}, is_stream={self.is_stream}")
122
+ logging.info(f"general_tools={general_tools}, custom_tools={custom_tools}")
122
123
 
123
124
 
124
125
  async def _run_task_auto(self) -> AsyncGenerator[Dict[str, Any], None]:
@@ -158,6 +159,10 @@ class XGATaskEngine:
158
159
  logging.warning(f"run_task_auto: Detected finish_reason='xml_tool_limit_reached', stop auto-continue")
159
160
  auto_continue = False
160
161
  break
162
+ elif finish_reason == 'non_tool_call':
163
+ logging.warning(f"run_task_auto: Detected finish_reason='non_tool_call', stop auto-continue")
164
+ auto_continue = False
165
+ break
161
166
  elif finish_reason == 'stop' or finish_reason == 'length': # 'length' never occur
162
167
  auto_continue = True
163
168
  auto_continue_count += 1
@@ -190,13 +195,14 @@ class XGATaskEngine:
190
195
  }
191
196
  llm_messages.append(temp_assistant_message)
192
197
 
193
- llm_count = continuous_state.get("auto_continue_count")
194
- langfuse_metadata = self._create_llm_langfuse_meta(llm_count)
198
+ auto_count = continuous_state.get("auto_continue_count")
199
+ langfuse_metadata = self.task_langfuse.create_llm_langfuse_meta(auto_count)
200
+
195
201
  llm_response = await self.llm_client.create_completion(llm_messages, langfuse_metadata)
196
202
  response_processor = self._create_response_processer()
197
203
 
198
204
  async for chunk in response_processor.process_response(llm_response, llm_messages, continuous_state):
199
- self._logging_reponse_chunk(chunk)
205
+ self._logging_reponse_chunk(chunk, auto_count)
200
206
  yield chunk
201
207
 
202
208
  def _parse_final_result(self, chunks: List[Dict[str, Any]]) -> XGATaskResult:
@@ -236,13 +242,21 @@ class XGATaskEngine:
236
242
  result_type = "answer" if success else "error"
237
243
  result_content = f"Task execute '{tool_name}' {result_type}: {output}"
238
244
  final_result = XGATaskResult(type=result_type, content=result_content)
239
- elif chunk_type == "assistant" and finish_reason == 'stop':
245
+ elif chunk_type == "assistant_complete" and finish_reason == 'non_tool_call':
240
246
  assis_content = chunk.get('content', {})
241
247
  result_content = assis_content.get("content", "LLM output is empty")
242
248
  final_result = XGATaskResult(type="answer", content=result_content)
243
249
 
244
250
  if final_result is not None:
245
251
  break
252
+
253
+ if final_result and finish_reason == "completed":
254
+ logging.info(f"✅ FINAL_RESULT: finish_reason={finish_reason}, final_result={final_result}")
255
+ elif final_result is not None:
256
+ logging.warning(f"⚠️ FINAL_RESULT: finish_reason={finish_reason}, final_result={final_result}")
257
+ else:
258
+ logging.warning(f"❌ FINAL_RESULT: LLM Result is EMPTY, finish_reason={finish_reason}")
259
+ final_result = XGATaskResult(type="error", content="LLM has no answer")
246
260
  except Exception as e:
247
261
  logging.error(f"parse_final_result: Final result pass error: {str(e)}")
248
262
  final_result = XGATaskResult(type="error", content="Parse final result failed!")
@@ -258,7 +272,7 @@ class XGATaskEngine:
258
272
  metadata = metadata or {}
259
273
  metadata["task_id"] = self.task_id
260
274
  metadata["task_run_id"] = self.task_run_id
261
- metadata["trace_id"] = self.trace_id
275
+ metadata["trace_id"] = self.task_langfuse.trace_id
262
276
  metadata["session_id"] = self.session_id
263
277
  metadata["agent_id"] = self.agent_id
264
278
 
@@ -295,41 +309,6 @@ class XGATaskEngine:
295
309
 
296
310
  return response_llm_contents
297
311
 
298
-
299
- def _create_llm_langfuse_meta(self, llm_count:int)-> LangfuseMetadata:
300
- generation_name = f"xga_task_engine_llm_completion[{self.task_no}]({llm_count})"
301
- generation_id = f"{self.task_run_id}({llm_count})"
302
- return LangfuseMetadata(
303
- generation_name=generation_name,
304
- generation_id=generation_id,
305
- existing_trace_id=self.trace_id,
306
- session_id=self.session_id,
307
- )
308
-
309
- def _init_langfuse(self,
310
- root_span_name: str,
311
- task_message: Dict[str, Any],
312
- trace_id: Optional[str] = None):
313
-
314
- if self.root_span_id is None:
315
- trace = None
316
- if trace_id:
317
- self.trace_id = trace_id
318
- trace = langfuse.trace(id=trace_id)
319
- else:
320
- trace = langfuse.trace(name="xga_task_engine")
321
- self.trace_id = trace.id
322
-
323
- span = trace.span(name=root_span_name, input=task_message,metadata={"task_id": self.task_id})
324
- self.root_span_id = span.id
325
- self.root_span_name = root_span_name
326
-
327
- def _end_langfuse(self, root_span_name:str, output: Optional[XGATaskResult]=None):
328
- if self.root_span_id and self.root_span_name == root_span_name:
329
- langfuse.span(trace_id=self.trace_id, id=self.root_span_id).end(output=output)
330
- self.root_span_id = None
331
- self.root_span_name = None
332
-
333
312
  def _create_response_processer(self) -> TaskResponseProcessor:
334
313
  response_context = self._create_response_context()
335
314
  is_stream = response_context.get("is_stream", False)
@@ -346,19 +325,22 @@ class XGATaskEngine:
346
325
  "task_id": self.task_id,
347
326
  "task_run_id": self.task_run_id,
348
327
  "task_no": self.task_no,
349
- "trace_id": self.trace_id,
350
- "root_span_id": self.root_span_id,
351
328
  "model_name": self.model_name,
352
329
  "max_xml_tool_calls": 0,
330
+ "tool_execution_strategy": "parallel" if self.tool_exec_parallel else "sequential", # ,
331
+ "xml_adding_strategy": "user_message",
353
332
  "add_response_msg_func": self.add_response_message,
354
333
  "tool_box": self.tool_box,
355
- "tool_execution_strategy": "parallel" if self.tool_exec_parallel else "sequential" ,#,
356
- "xml_adding_strategy": "user_message",
334
+ "task_langfuse": self.task_langfuse,
357
335
  }
358
336
  return response_context
359
337
 
360
338
 
361
- def _logging_reponse_chunk(self, chunk):
339
+ def _create_task_langfuse(self)-> XGATaskLangFuse:
340
+ return XGATaskLangFuse(self.session_id, self.task_id, self.task_run_id, self.task_no, self.agent_id)
341
+
342
+
343
+ def _logging_reponse_chunk(self, chunk, auto_count: int) -> None:
362
344
  chunk_type = chunk.get('type')
363
345
  prefix = ""
364
346
 
@@ -372,22 +354,28 @@ class XGATaskEngine:
372
354
  tool_name = tool_execution.get('function_name')
373
355
  prefix = "-" + tool_name
374
356
 
375
- logging.info(f"TASK_RESP_CHUNK[{chunk_type}{prefix}]: {chunk}")
357
+ logging.info(f"TASK_RESP_CHUNK[{auto_count}]<{chunk_type}{prefix}>: {chunk}")
376
358
 
377
359
 
378
360
  if __name__ == "__main__":
379
361
  import asyncio
380
362
  from xgae.utils.misc import read_file
363
+ from xgae.utils.setup_env import setup_logging
364
+
365
+ setup_logging()
381
366
 
382
367
  async def main():
368
+ # Before Run Exec: uv run custom_fault_tools
383
369
  tool_box = XGAMcpToolBox(custom_mcp_server_file="mcpservers/custom_servers.json")
384
- system_prompt = read_file("templates/example_user_prompt.txt")
370
+ system_prompt = read_file("templates/example/fault_user_prompt.txt")
385
371
  engine = XGATaskEngine(tool_box=tool_box,
386
372
  general_tools=[],
387
373
  custom_tools=["*"],
388
374
  llm_config=LLMConfig(stream=False),
389
375
  system_prompt=system_prompt,
390
- max_auto_run=8)
376
+ max_auto_run=8,
377
+ session_id="session_1",
378
+ agent_id="agent_1",)
391
379
 
392
380
  final_result = await engine.run_task_with_final_answer(task_message={"role": "user",
393
381
  "content": "locate 10.0.0.1 fault and solution"})
@@ -0,0 +1,65 @@
1
+
2
+ from typing import Any, Dict, Optional
3
+ from langfuse import Langfuse
4
+
5
+ from xgae.utils.setup_env import setup_langfuse
6
+ from xgae.utils.llm_client import LangfuseMetadata
7
+ from xgae.engine.engine_base import XGATaskResult
8
+
9
+ class XGATaskLangFuse:
10
+ langfuse: Langfuse = None
11
+
12
+ def __init__(self,
13
+ session_id: str,
14
+ task_id:str,
15
+ task_run_id: str,
16
+ task_no: int,
17
+ agent_id: str) -> None:
18
+ if XGATaskLangFuse.langfuse is None:
19
+ XGATaskLangFuse.langfuse = setup_langfuse()
20
+
21
+ self.session_id = session_id
22
+ self.task_id = task_id
23
+ self.task_run_id = task_run_id
24
+ self.task_no = task_no
25
+ self.agent_id = agent_id
26
+
27
+ self.trace_id = None
28
+ self.root_span = None
29
+ self.root_span_name = None
30
+
31
+
32
+ def start_root_span(self,
33
+ root_span_name: str,
34
+ task_message: Dict[str, Any],
35
+ trace_id: Optional[str] = None):
36
+ if self.root_span is None:
37
+ trace = None
38
+ if trace_id:
39
+ self.trace_id = trace_id
40
+ trace = XGATaskLangFuse.langfuse.trace(id=trace_id)
41
+ else:
42
+ trace = XGATaskLangFuse.langfuse.trace(name="xga_task_engine")
43
+ self.trace_id = trace.id
44
+
45
+ metadata = {"task_id": self.task_id, "session_id": self.session_id, "agent_id": self.agent_id}
46
+ self.root_span = trace.span(id=self.task_run_id, name=root_span_name, input=task_message,metadata=metadata)
47
+ self.root_span_name = root_span_name
48
+
49
+
50
+ def end_root_span(self, root_span_name:str, output: Optional[XGATaskResult]=None):
51
+ if self.root_span and self.root_span_name == root_span_name:
52
+ self.root_span.end(output=output)
53
+ self.root_span = None
54
+ self.root_span_name = None
55
+
56
+
57
+ def create_llm_langfuse_meta(self, llm_count:int)-> LangfuseMetadata:
58
+ generation_name = f"xga_task_engine_llm_completion[{self.task_no}]({llm_count})"
59
+ generation_id = f"{self.task_run_id}({llm_count})"
60
+ return LangfuseMetadata(
61
+ generation_name=generation_name,
62
+ generation_id=generation_id,
63
+ existing_trace_id=self.trace_id,
64
+ session_id=self.session_id,
65
+ )
@@ -41,7 +41,7 @@ async def end_task(task_id: str) :
41
41
 
42
42
 
43
43
  def main():
44
- #print("="*20 + " XGAE Message Tools Sever Started in Stdio mode " + "="*20)
44
+ print("="*20 + " XGAE Message Tools Sever Started in Stdio mode " + "="*20)
45
45
  mcp.run(transport="stdio")
46
46
 
47
47
  if __name__ == "__main__":
xgae/utils/__init__.py CHANGED
@@ -7,3 +7,9 @@ def handle_error(e: Exception) -> None:
7
7
  logging.error("Traceback details:\n%s", traceback.format_exc())
8
8
  raise (e) from e
9
9
 
10
+
11
+ def to_bool(value: str) -> bool:
12
+ if value is None:
13
+ return False
14
+
15
+ return value.lower() == "true"
@@ -26,8 +26,7 @@ def ensure_dict(value: Union[str, Dict[str, Any], None], default: Dict[str, Any]
26
26
  Returns:
27
27
  A dictionary
28
28
  """
29
- if default is None:
30
- default = {}
29
+ default = default or {}
31
30
 
32
31
  if value is None:
33
32
  return default
@@ -64,8 +63,7 @@ def ensure_list(value: Union[str, List[Any], None], default: List[Any] = None) -
64
63
  Returns:
65
64
  A list
66
65
  """
67
- if default is None:
68
- default = []
66
+ default = default or []
69
67
 
70
68
  if value is None:
71
69
  return default
@@ -84,7 +82,7 @@ def ensure_list(value: Union[str, List[Any], None], default: List[Any] = None) -
84
82
 
85
83
  return default
86
84
 
87
-
85
+ # @todo if all call value is str, delete useless code
88
86
  def safe_json_parse(value: Union[str, Dict, List, Any], default: Any = None) -> Any:
89
87
  """
90
88
  Safely parse a value that might be JSON string or already parsed.
@@ -105,16 +103,13 @@ def safe_json_parse(value: Union[str, Dict, List, Any], default: Any = None) ->
105
103
  # If it's already a dict or list, return as-is
106
104
  if isinstance(value, (dict, list)):
107
105
  return value
108
-
109
- # If it's a string, try to parse it
106
+
110
107
  if isinstance(value, str):
111
108
  try:
112
109
  return json.loads(value)
113
110
  except (json.JSONDecodeError, TypeError):
114
- # If it's not valid JSON, return the string itself
115
111
  return value
116
-
117
- # For any other type, return as-is
112
+
118
113
  return value
119
114
 
120
115
 
@@ -137,9 +132,8 @@ def to_json_string(value: Any) -> str:
137
132
  json.loads(value)
138
133
  return value # It's already a JSON string
139
134
  except (json.JSONDecodeError, TypeError):
140
- # It's a plain string, encode it as JSON
141
- return json.dumps(value)
142
-
135
+ pass
136
+
143
137
  # For all other types, convert to JSON
144
138
  return json.dumps(value)
145
139
 
xgae/utils/llm_client.py CHANGED
@@ -1,15 +1,15 @@
1
1
  import asyncio
2
2
  import json
3
- import logging
4
3
  import os
4
+ import logging
5
5
  import litellm
6
6
 
7
7
  from typing import Union, Dict, Any, Optional, List, TypedDict
8
-
9
- from litellm.utils import ModelResponse, CustomStreamWrapper
10
8
  from openai import OpenAIError
9
+ from litellm.utils import ModelResponse, CustomStreamWrapper
11
10
 
12
- from xgae.utils.setup_env import langfuse
11
+ from xgae.utils import to_bool
12
+ from xgae.utils.setup_env import setup_langfuse
13
13
 
14
14
  class LLMConfig(TypedDict, total=False):
15
15
  model: str # Optional Name of the model to use , Override .env LLM_MODEL
@@ -47,6 +47,7 @@ class LLMClient:
47
47
  def __init__(self, llm_config: LLMConfig=None):
48
48
  litellm.modify_params = True
49
49
  litellm.drop_params = True
50
+
50
51
  self._init_langfuse()
51
52
 
52
53
  llm_config = llm_config or LLMConfig()
@@ -79,22 +80,27 @@ class LLMClient:
79
80
  self.is_stream = llm_config_params['stream']
80
81
 
81
82
  self.lite_llm_params = self._prepare_llm_params(llm_config_params)
82
- logging.info(f"📡 LLMClient initialed : model={self.model_name}, is_stream={self.is_stream}, enable thinking={self.lite_llm_params['enable_thinking']}")
83
+ logging.info(f"=== LLMClient initialed : model={self.model_name}, is_stream={self.is_stream}, enable thinking={self.lite_llm_params['enable_thinking']}")
83
84
 
84
85
  @staticmethod
85
86
  def _init_langfuse():
86
87
  if not LLMClient.langfuse_inited:
87
88
  LLMClient.langfuse_inited =True
88
- if langfuse and langfuse.enabled:
89
- litellm.success_callback = ["langfuse"]
90
- litellm.failure_callback = ["langfuse"]
91
- LLMClient.langfuse_enabled = True
92
- logging.info("=== LiteLLM Langfuse is enable !")
89
+
90
+ env_llm_langfuse_enable = to_bool(os.getenv("LLM_LANGFUSE_ENABLE", False))
91
+ if env_llm_langfuse_enable:
92
+ env_langfuse = setup_langfuse()
93
+ if env_langfuse and env_langfuse.enabled:
94
+ litellm.success_callback = ["langfuse"]
95
+ litellm.failure_callback = ["langfuse"]
96
+ LLMClient.langfuse_enabled = True
97
+ logging.info("🛠️ LiteLLM Langfuse is enable !")
98
+ else:
99
+ LLMClient.langfuse_enabled = False
100
+ logging.warning("🛠️ LiteLLM Langfuse is disable, langfuse.enabled=false !")
93
101
  else:
94
102
  LLMClient.langfuse_enabled = False
95
- logging.warning("*** LiteLLM Langfuse is disable !")
96
-
97
-
103
+ logging.warning("🛠️ LiteLLM Langfuse is disable, LLM_LANGFUSE_ENABLE=False !")
98
104
 
99
105
  def _prepare_llm_params(self, llm_config_params: Dict[str, Any]) -> Dict[str, Any]:
100
106
  prepared_llm_params = llm_config_params.copy()
@@ -240,12 +246,26 @@ class LLMClient:
240
246
  raise LLMError(f"LLM completion failed after {self.max_retries} attempts !")
241
247
 
242
248
  if __name__ == "__main__":
243
- async def llm_completion():
249
+ from xgae.utils.setup_env import setup_logging
250
+
251
+ setup_logging()
252
+ langfuse = setup_langfuse()
253
+
254
+ async def main():
244
255
  llm_client = LLMClient(LLMConfig(stream=False))
256
+
245
257
  messages = [{"role": "user", "content": "1+1="}]
246
258
  trace_id = langfuse.trace(name = "xgae_litellm_test").trace_id
259
+ await asyncio.sleep(1)
260
+
261
+ meta = LangfuseMetadata(
262
+ generation_name="llm_completion_test",
263
+ generation_id="generation_id_0",
264
+ existing_trace_id=trace_id,
265
+ session_id="session_0",
266
+ )
247
267
 
248
- response = await llm_client.create_completion(messages, trace_id)
268
+ response = await llm_client.create_completion(messages, meta)
249
269
  if llm_client.is_stream:
250
270
  async for chunk in response:
251
271
  choices = chunk.get("choices", [{}])
@@ -258,7 +278,6 @@ if __name__ == "__main__":
258
278
  else:
259
279
  print(response.choices[0].message.content)
260
280
 
261
-
262
- asyncio.run(llm_completion())
281
+ asyncio.run(main())
263
282
 
264
283
 
xgae/utils/misc.py CHANGED
@@ -4,6 +4,8 @@ import sys
4
4
 
5
5
  from typing import Any, Dict
6
6
 
7
+ from xgae.utils import handle_error
8
+
7
9
  def read_file(file_path: str) -> str:
8
10
  if not os.path.exists(file_path):
9
11
  logging.error(f"File '{file_path}' not found")
@@ -31,4 +33,4 @@ def format_file_with_args(file_content:str, args: Dict[str, Any])-> str:
31
33
  finally:
32
34
  sys.stdout = original_stdout
33
35
 
34
- return formated
36
+ return formated
xgae/utils/setup_env.py CHANGED
@@ -4,24 +4,14 @@ import os
4
4
  from dotenv import load_dotenv
5
5
  from langfuse import Langfuse
6
6
 
7
- load_dotenv()
7
+ from xgae.utils import to_bool
8
8
 
9
- def setup_logging() :
10
- log_enable = bool(os.getenv("LOG_ENABLE", True))
11
- if not log_enable :
12
- return
9
+ load_dotenv()
13
10
 
11
+ def setup_logging(log_file: str=None, log_level: str="INFO") :
14
12
  import colorlog
15
13
 
16
- env_log_level = os.getenv("LOG_LEVEL", "INFO")
17
- log_file = os.getenv("LOG_FILE", "log/xga.log")
18
- log_level = getattr(logging, env_log_level.upper(), logging.INFO)
19
-
20
- log_dir = os.path.dirname(log_file)
21
- if log_dir and not os.path.exists(log_dir):
22
- os.makedirs(log_dir, exist_ok=True)
23
- else:
24
- os.remove(log_file)
14
+ logging_level = getattr(logging, log_level.upper(), logging.INFO)
25
15
 
26
16
  logger = logging.getLogger()
27
17
  for handler in logger.handlers[:]:
@@ -40,25 +30,36 @@ def setup_logging() :
40
30
  datefmt='%Y-%m-%d %H:%M:%S'
41
31
  )
42
32
 
43
- file_formatter = logging.Formatter(
44
- '%(asctime)s -%(levelname)-8s %(message)s',
45
- datefmt='%Y-%m-%d %H:%M:%S'
46
- )
47
-
48
33
  console_handler = logging.StreamHandler()
49
34
  console_handler.setFormatter(console_formatter)
35
+ logger.addHandler(console_handler)
50
36
 
51
- file_handler = logging.FileHandler(log_file, encoding='utf-8')
52
- file_handler.setFormatter(file_formatter)
37
+ if log_file:
38
+ log_dir = os.path.dirname(log_file)
39
+ if log_dir and not os.path.exists(log_dir):
40
+ os.makedirs(log_dir, exist_ok=True)
41
+ else:
42
+ os.remove(log_file)
53
43
 
54
- logger.addHandler(console_handler)
55
- logger.addHandler(file_handler)
44
+ file_formatter = logging.Formatter(
45
+ '%(asctime)s -%(levelname)-8s %(message)s',
46
+ datefmt='%Y-%m-%d %H:%M:%S'
47
+ )
48
+
49
+ file_handler = logging.FileHandler(log_file, encoding='utf-8')
50
+ file_handler.setFormatter(file_formatter)
51
+ logger.addHandler(file_handler)
56
52
 
57
- logger.setLevel(log_level)
53
+ logger.setLevel(logging_level)
58
54
 
59
- logging.info(f"📡 XGAE_LOGGING is initialized, log_level={env_log_level}, log_file={log_file}")
55
+ logging.info(f"🛠️ XGA_LOGGING is initialized, log_level={log_level}, log_file={log_file}")
60
56
 
61
- setup_logging()
57
+ def setup_env_logging():
58
+ log_enable = to_bool(os.getenv("LOG_ENABLE", True))
59
+ log_level = os.getenv("LOG_LEVEL", "INFO")
60
+ log_file = os.getenv("LOG_FILE", "log/xga.log")
61
+ if log_enable :
62
+ setup_logging(log_file, log_level)
62
63
 
63
64
  def setup_langfuse() -> Langfuse:
64
65
  env_public_key = os.getenv("LANGFUSE_PUBLIC_KEY")
@@ -70,11 +71,13 @@ def setup_langfuse() -> Langfuse:
70
71
  secret_key=env_secret_key,
71
72
  host=env_host)
72
73
 
73
- logging.info("📡 XGAE_LANGFUSE initialized Successfully by Key !")
74
+ logging.info("🛠️ XGA_LANGFUSE initialized Successfully by Key !")
74
75
  else:
75
76
  _langfuse = Langfuse(enabled=False)
76
- logging.warning("📡 XGAE_LANGFUSE Not set key, Langfuse is disabled!")
77
+ logging.warning("🛠️ XGA_LANGFUSE Not set key, Langfuse is disabled!")
77
78
 
78
79
  return _langfuse
79
80
 
80
- langfuse = setup_langfuse()
81
+ if __name__ == "__main__":
82
+ langfuse = setup_langfuse()
83
+ logging.warning(f"langfuse is enable={langfuse.enabled}")
@@ -68,16 +68,16 @@ class XMLToolParser:
68
68
  # Find function_calls blocks
69
69
  function_calls_matches = self.FUNCTION_CALLS_PATTERN.findall(content)
70
70
 
71
- for fc_content in function_calls_matches:
71
+ for func_content in function_calls_matches:
72
72
  # Find all invoke blocks within this function_calls block
73
- invoke_matches = self.INVOKE_PATTERN.findall(fc_content)
73
+ invoke_matches = self.INVOKE_PATTERN.findall(func_content)
74
74
 
75
75
  for function_name, invoke_content in invoke_matches:
76
76
  try:
77
77
  tool_call = self._parse_invoke_block(
78
78
  function_name,
79
79
  invoke_content,
80
- fc_content
80
+ func_content
81
81
  )
82
82
  if tool_call:
83
83
  tool_calls.append(tool_call)
@@ -98,17 +98,11 @@ class XMLToolParser:
98
98
  "function_name": function_name,
99
99
  "raw_parameters": {}
100
100
  }
101
-
102
- # Extract all parameters
101
+
103
102
  param_matches = self.PARAMETER_PATTERN.findall(invoke_content)
104
-
105
103
  for param_name, param_value in param_matches:
106
- # Clean up the parameter value
107
104
  param_value = param_value.strip()
108
-
109
- # Try to parse as JSON if it looks like JSON
110
105
  parsed_value = self._parse_parameter_value(param_value)
111
-
112
106
  parameters[param_name] = parsed_value
113
107
  parsing_details["raw_parameters"][param_name] = param_value
114
108
 
@@ -161,73 +155,3 @@ class XMLToolParser:
161
155
 
162
156
  # Return as string
163
157
  return value
164
-
165
-
166
- def format_tool_call(self, function_name: str, parameters: Dict[str, Any]) -> str:
167
- """
168
- Format a tool call in the XML format.
169
-
170
- Args:
171
- function_name: Name of the function to call
172
- parameters: Dictionary of parameters
173
-
174
- Returns:
175
- Formatted XML string
176
- """
177
- lines = ['<function_calls>', '<invoke name="{}">'.format(function_name)]
178
-
179
- for param_name, param_value in parameters.items():
180
- # Convert value to string representation
181
- if isinstance(param_value, (dict, list)):
182
- value_str = json.dumps(param_value)
183
- elif isinstance(param_value, bool):
184
- value_str = str(param_value).lower()
185
- else:
186
- value_str = str(param_value)
187
-
188
- lines.append('<parameter name="{}">{}</parameter>'.format(
189
- param_name, value_str
190
- ))
191
-
192
- lines.extend(['</invoke>', '</function_calls>'])
193
- return '\n'.join(lines)
194
-
195
- def validate_tool_call(self, tool_call: XMLToolCall, expected_params: Optional[Dict[str, type]] = None) -> Tuple[bool, Optional[str]]:
196
- """
197
- Validate a tool call against expected parameters.
198
-
199
- Args:
200
- tool_call: The XMLToolCall to validate
201
- expected_params: Optional dict of parameter names to expected types
202
-
203
- Returns:
204
- Tuple of (is_valid, error_message)
205
- """
206
- if not tool_call.function_name:
207
- return False, "Function name is required"
208
-
209
- if expected_params:
210
- for param_name, expected_type in expected_params.items():
211
- if param_name not in tool_call.parameters:
212
- return False, f"Missing required parameter: {param_name}"
213
-
214
- param_value = tool_call.parameters[param_name]
215
- if not isinstance(param_value, expected_type):
216
- return False, f"Parameter {param_name} should be of type {expected_type.__name__}"
217
-
218
- return True, None
219
-
220
-
221
- # Convenience function for quick parsing
222
- def parse_xml_tool_calls(content: str) -> List[XMLToolCall]:
223
- """
224
- Parse XML tool calls from content.
225
-
226
- Args:
227
- content: The text content potentially containing XML tool calls
228
-
229
- Returns:
230
- List of parsed XMLToolCall objects
231
- """
232
- parser = XMLToolParser()
233
- return parser.parse_content(content)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: xgae
3
- Version: 0.1.8
3
+ Version: 0.1.10
4
4
  Summary: Extreme General Agent Engine
5
5
  Requires-Python: >=3.13
6
6
  Requires-Dist: colorlog==6.9.0
@@ -0,0 +1,21 @@
1
+ xgae/__init__.py,sha256=OEUd9y9AoGBd3xYerdTTpz9xl4NWkmXeq1a2eil7Qro,72
2
+ xgae/cli_app.py,sha256=ePis7gYYZrevEArnCQOhaN7z4C8Y5yJSOIov8z-lGBs,3157
3
+ xgae/engine/engine_base.py,sha256=ioywuTpDMHEmyVcd6BInoU-vR70PhQStE2MVRWoEiJg,1768
4
+ xgae/engine/mcp_tool_box.py,sha256=ZSCBSXRWhISwyZ1uEIbt3esjesM46g-ktv6CxvyPVDU,10030
5
+ xgae/engine/prompt_builder.py,sha256=X9bS7YIms6LYplCpNHeUmi74xFP5MwFXmXNqOt1Xz-Q,4356
6
+ xgae/engine/task_engine.py,sha256=LAo55FKmmO8Jbo5geEUYr8kFCaVigTb-Jm06XuYKYyY,19010
7
+ xgae/engine/task_langfuse.py,sha256=b0aJ_Di-WDcYzi0TFCvcKWxkBz7PYP2jx3N52OptQMs,2349
8
+ xgae/engine/responser/non_stream_responser.py,sha256=9YCCUedbotH-TPPbTh2Mv1qNVYvznHYFPgAnQB7NJSE,6510
9
+ xgae/engine/responser/responser_base.py,sha256=8PcsvQHP68FEhu6v3dT9hDCc_rLKs38i4txWLcJD4ck,29851
10
+ xgae/engine/responser/stream_responser.py,sha256=oPGtrT1nedGMjiBAwPzUlu6Z_rPWeVSODC1xQ6D8cTY,52055
11
+ xgae/tools/without_general_tools_app.py,sha256=FGMV6njcOKwwfitc0j_nUov0RC-eWlhO1IP8_KHz1tQ,3788
12
+ xgae/utils/__init__.py,sha256=_-TTNq5FanrA-jl_w3-4xp-BnRM7SLwfYQcFyvepcW0,332
13
+ xgae/utils/json_helpers.py,sha256=6BkqiyEF3jV3Irb4Z6-wGY2_FNaLlxE1WKlMJHHT6E0,4645
14
+ xgae/utils/llm_client.py,sha256=hvEDb4DBaWVQTXMjXOd6KrFwJFBcI-YXEQD4f_AhG7Q,14008
15
+ xgae/utils/misc.py,sha256=M8lMXYp1pHiY6Ee8ZTUG88GpOAsE5fbYoRO_hcBFUCE,953
16
+ xgae/utils/setup_env.py,sha256=HweQ-WAyxfV3KYjGYi-rRQAbI_SXoimduOLpQPbHfl8,2619
17
+ xgae/utils/xml_tool_parser.py,sha256=I9xAZC_ElwBY19PNUq-WLXe9FSIJMeAv2Xs-VlajI7Y,4782
18
+ xgae-0.1.10.dist-info/METADATA,sha256=SGnhZrr3DDZ600FDMep9ihERmhwFspEtUKRv1THqQsk,310
19
+ xgae-0.1.10.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
20
+ xgae-0.1.10.dist-info/entry_points.txt,sha256=vClvL_WBJyF2x3wJCz5CNJ_BJG-dWUh7h2YbAoskHsc,162
21
+ xgae-0.1.10.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ [console_scripts]
2
+ custom_fault_tools = examples.tools.custom_fault_tools_app:main
3
+ xgae = xgae.cli_app:main
4
+ xgae-tools = xgae.tools.without_general_tools_app:main
@@ -1,19 +0,0 @@
1
- xgae/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- xgae/engine/engine_base.py,sha256=MExJxXHkufv_a-n9tm3FZAbJyxVAwEkRC3C_WAGdu4Q,1746
3
- xgae/engine/mcp_tool_box.py,sha256=6bsORGK2HE-NS8vWNnbbgUQ9uGr_MI7JGO9j_FYKUKA,9903
4
- xgae/engine/prompt_builder.py,sha256=8_rNRJksn2QLV_K98S0x0qNeHcmxhU0kB_53IZJTGOU,4366
5
- xgae/engine/task_engine.py,sha256=kOntdzVtTjjakIhcBNK5vBMiBuAXPqB4R5ustY-BNfU,18931
6
- xgae/engine/responser/non_stream_responser.py,sha256=tXvkGHFGm2oO0IE8Bz9DFDwqemBkO92Nb6MbX1ukHeE,6352
7
- xgae/engine/responser/responser_base.py,sha256=FdR0yX7VfU5kW-4JmTXBfnn9AwyX1EbOAnslkC7Vcyg,29874
8
- xgae/engine/responser/stream_responser.py,sha256=oPGtrT1nedGMjiBAwPzUlu6Z_rPWeVSODC1xQ6D8cTY,52055
9
- xgae/tools/without_general_tools_app.py,sha256=QknIF4OW9xvOad8gx-F_sCBwQYXqMalnNFvYvZXkQ_I,3789
10
- xgae/utils/__init__.py,sha256=GPubcIs2XFPiWKnuCpevAtYEmVWKJuXlmGkmsH9qoXA,219
11
- xgae/utils/json_helpers.py,sha256=K1ja6GJCatrAheW9bEWAYSQbDI42__boBCZgtsv1gtk,4865
12
- xgae/utils/llm_client.py,sha256=Y-o26VW1MOhJYsWJ0zR4u_YXsHSEbvVPY6r90zLQJXU,13213
13
- xgae/utils/misc.py,sha256=EK94YesZp8AmRUqWfN-CjTxyEHPWdIIWpFNO17dzm9g,915
14
- xgae/utils/setup_env.py,sha256=P_p74q3nroBdTkAElfGr4QLm4fu7ZRP0R9BMGNUL010,2352
15
- xgae/utils/xml_tool_parser.py,sha256=EJ6BjpD4CSdmS_LqViUJ6P8H9GY2R1e4Dh8rLCR6nSE,7474
16
- xgae-0.1.8.dist-info/METADATA,sha256=mk0D1208wUJnbIZxyteCAjE2tlKtQixumHo0mcqD3RI,309
17
- xgae-0.1.8.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
18
- xgae-0.1.8.dist-info/entry_points.txt,sha256=rhQ9Vksnu8nA78lPTjiJxOCZ5k6sH6s5YNMR68y7C-A,73
19
- xgae-0.1.8.dist-info/RECORD,,
@@ -1,2 +0,0 @@
1
- [console_scripts]
2
- xgae-tools = xgae.tools.without_general_tools_app:main
File without changes