praisonaiagents 0.0.17__tar.gz → 0.0.19__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/PKG-INFO +1 -1
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/__init__.py +4 -0
- praisonaiagents-0.0.19/praisonaiagents/main.py +353 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents.egg-info/PKG-INFO +1 -1
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/pyproject.toml +1 -1
- praisonaiagents-0.0.17/praisonaiagents/main.py +0 -161
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/agent/__init__.py +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/agent/agent.py +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/agents/__init__.py +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/agents/agents.py +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/build/lib/praisonaiagents/__init__.py +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/build/lib/praisonaiagents/agent/__init__.py +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/build/lib/praisonaiagents/agent/agent.py +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/build/lib/praisonaiagents/agents/__init__.py +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/build/lib/praisonaiagents/agents/agents.py +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/build/lib/praisonaiagents/main.py +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/build/lib/praisonaiagents/task/__init__.py +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/build/lib/praisonaiagents/task/task.py +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/process/__init__.py +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/process/process.py +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/task/__init__.py +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/task/task.py +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents.egg-info/SOURCES.txt +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents.egg-info/dependency_links.txt +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents.egg-info/requires.txt +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents.egg-info/top_level.txt +0 -0
- {praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/setup.cfg +0 -0
@@ -16,6 +16,8 @@ from .main import (
|
|
16
16
|
display_generating,
|
17
17
|
clean_triple_backticks,
|
18
18
|
error_logs,
|
19
|
+
register_display_callback,
|
20
|
+
display_callbacks,
|
19
21
|
)
|
20
22
|
|
21
23
|
__all__ = [
|
@@ -32,4 +34,6 @@ __all__ = [
|
|
32
34
|
'display_generating',
|
33
35
|
'clean_triple_backticks',
|
34
36
|
'error_logs',
|
37
|
+
'register_display_callback',
|
38
|
+
'display_callbacks',
|
35
39
|
]
|
@@ -0,0 +1,353 @@
|
|
1
|
+
import os
|
2
|
+
import time
|
3
|
+
import json
|
4
|
+
import logging
|
5
|
+
from typing import List, Optional, Dict, Any, Union, Literal, Type
|
6
|
+
from openai import OpenAI
|
7
|
+
from pydantic import BaseModel
|
8
|
+
from rich import print
|
9
|
+
from rich.console import Console
|
10
|
+
from rich.panel import Panel
|
11
|
+
from rich.text import Text
|
12
|
+
from rich.markdown import Markdown
|
13
|
+
from rich.logging import RichHandler
|
14
|
+
from rich.live import Live
|
15
|
+
import asyncio
|
16
|
+
|
17
|
+
LOGLEVEL = os.environ.get('LOGLEVEL', 'INFO').upper()
|
18
|
+
|
19
|
+
logging.basicConfig(
|
20
|
+
level=getattr(logging, LOGLEVEL, logging.INFO),
|
21
|
+
format="%(asctime)s %(filename)s:%(lineno)d %(levelname)s %(message)s",
|
22
|
+
datefmt="[%X]",
|
23
|
+
handlers=[RichHandler(rich_tracebacks=True)]
|
24
|
+
)
|
25
|
+
|
26
|
+
# Global list to store error logs
|
27
|
+
error_logs = []
|
28
|
+
|
29
|
+
# Global callback registry
|
30
|
+
display_callbacks = {}
|
31
|
+
|
32
|
+
def register_display_callback(display_type: str, callback_fn):
|
33
|
+
"""Register a callback function for a specific display type."""
|
34
|
+
display_callbacks[display_type] = callback_fn
|
35
|
+
|
36
|
+
def _clean_display_content(content: str, max_length: int = 20000) -> str:
|
37
|
+
"""Helper function to clean and truncate content for display."""
|
38
|
+
if not content or not str(content).strip():
|
39
|
+
return ""
|
40
|
+
|
41
|
+
content = str(content)
|
42
|
+
# Handle base64 content
|
43
|
+
if "base64" in content:
|
44
|
+
content_parts = []
|
45
|
+
for line in content.split('\n'):
|
46
|
+
if "base64" not in line:
|
47
|
+
content_parts.append(line)
|
48
|
+
content = '\n'.join(content_parts)
|
49
|
+
|
50
|
+
# Truncate if too long
|
51
|
+
if len(content) > max_length:
|
52
|
+
content = content[:max_length] + "..."
|
53
|
+
|
54
|
+
return content.strip()
|
55
|
+
|
56
|
+
def display_interaction(message, response, markdown=True, generation_time=None, console=None):
|
57
|
+
"""Display the interaction between user and assistant."""
|
58
|
+
if console is None:
|
59
|
+
console = Console()
|
60
|
+
|
61
|
+
# Handle multimodal content (list)
|
62
|
+
if isinstance(message, list):
|
63
|
+
text_content = next((item["text"] for item in message if item["type"] == "text"), "")
|
64
|
+
message = text_content
|
65
|
+
|
66
|
+
message = _clean_display_content(str(message))
|
67
|
+
response = _clean_display_content(str(response))
|
68
|
+
|
69
|
+
# Execute callback if registered
|
70
|
+
if 'interaction' in display_callbacks:
|
71
|
+
display_callbacks['interaction'](
|
72
|
+
message=message,
|
73
|
+
response=response,
|
74
|
+
markdown=markdown,
|
75
|
+
generation_time=generation_time
|
76
|
+
)
|
77
|
+
|
78
|
+
# Existing display logic...
|
79
|
+
if generation_time:
|
80
|
+
console.print(Text(f"Response generated in {generation_time:.1f}s", style="dim"))
|
81
|
+
|
82
|
+
if markdown:
|
83
|
+
console.print(Panel.fit(Markdown(message), title="Message", border_style="cyan"))
|
84
|
+
console.print(Panel.fit(Markdown(response), title="Response", border_style="cyan"))
|
85
|
+
else:
|
86
|
+
console.print(Panel.fit(Text(message, style="bold green"), title="Message", border_style="cyan"))
|
87
|
+
console.print(Panel.fit(Text(response, style="bold blue"), title="Response", border_style="cyan"))
|
88
|
+
|
89
|
+
def display_self_reflection(message: str, console=None):
|
90
|
+
if not message or not message.strip():
|
91
|
+
return
|
92
|
+
if console is None:
|
93
|
+
console = Console()
|
94
|
+
message = _clean_display_content(str(message))
|
95
|
+
|
96
|
+
# Execute callback if registered
|
97
|
+
if 'self_reflection' in display_callbacks:
|
98
|
+
display_callbacks['self_reflection'](message=message)
|
99
|
+
|
100
|
+
console.print(Panel.fit(Text(message, style="bold yellow"), title="Self Reflection", border_style="magenta"))
|
101
|
+
|
102
|
+
def display_instruction(message: str, console=None):
|
103
|
+
if not message or not message.strip():
|
104
|
+
return
|
105
|
+
if console is None:
|
106
|
+
console = Console()
|
107
|
+
message = _clean_display_content(str(message))
|
108
|
+
|
109
|
+
# Execute callback if registered
|
110
|
+
if 'instruction' in display_callbacks:
|
111
|
+
display_callbacks['instruction'](message=message)
|
112
|
+
|
113
|
+
console.print(Panel.fit(Text(message, style="bold blue"), title="Instruction", border_style="cyan"))
|
114
|
+
|
115
|
+
def display_tool_call(message: str, console=None):
|
116
|
+
if not message or not message.strip():
|
117
|
+
return
|
118
|
+
if console is None:
|
119
|
+
console = Console()
|
120
|
+
message = _clean_display_content(str(message))
|
121
|
+
|
122
|
+
# Execute callback if registered
|
123
|
+
if 'tool_call' in display_callbacks:
|
124
|
+
display_callbacks['tool_call'](message=message)
|
125
|
+
|
126
|
+
console.print(Panel.fit(Text(message, style="bold cyan"), title="Tool Call", border_style="green"))
|
127
|
+
|
128
|
+
def display_error(message: str, console=None):
|
129
|
+
if not message or not message.strip():
|
130
|
+
return
|
131
|
+
if console is None:
|
132
|
+
console = Console()
|
133
|
+
message = _clean_display_content(str(message))
|
134
|
+
|
135
|
+
# Execute callback if registered
|
136
|
+
if 'error' in display_callbacks:
|
137
|
+
display_callbacks['error'](message=message)
|
138
|
+
|
139
|
+
console.print(Panel.fit(Text(message, style="bold red"), title="Error", border_style="red"))
|
140
|
+
error_logs.append(message)
|
141
|
+
|
142
|
+
def display_generating(content: str = "", start_time: Optional[float] = None):
|
143
|
+
if not content or not str(content).strip():
|
144
|
+
return Panel("", title="", border_style="green")
|
145
|
+
|
146
|
+
elapsed_str = ""
|
147
|
+
if start_time is not None:
|
148
|
+
elapsed = time.time() - start_time
|
149
|
+
elapsed_str = f" {elapsed:.1f}s"
|
150
|
+
|
151
|
+
content = _clean_display_content(str(content))
|
152
|
+
|
153
|
+
# Execute callback if registered
|
154
|
+
if 'generating' in display_callbacks:
|
155
|
+
display_callbacks['generating'](
|
156
|
+
content=content,
|
157
|
+
elapsed_time=elapsed_str.strip() if elapsed_str else None
|
158
|
+
)
|
159
|
+
|
160
|
+
return Panel(Markdown(content), title=f"Generating...{elapsed_str}", border_style="green")
|
161
|
+
|
162
|
+
# Async versions with 'a' prefix
|
163
|
+
async def adisplay_interaction(message, response, markdown=True, generation_time=None, console=None):
|
164
|
+
"""Async version of display_interaction."""
|
165
|
+
if console is None:
|
166
|
+
console = Console()
|
167
|
+
|
168
|
+
if isinstance(message, list):
|
169
|
+
text_content = next((item["text"] for item in message if item["type"] == "text"), "")
|
170
|
+
message = text_content
|
171
|
+
|
172
|
+
message = _clean_display_content(str(message))
|
173
|
+
response = _clean_display_content(str(response))
|
174
|
+
|
175
|
+
if 'interaction' in display_callbacks:
|
176
|
+
callback = display_callbacks['interaction']
|
177
|
+
if asyncio.iscoroutinefunction(callback):
|
178
|
+
await callback(
|
179
|
+
message=message,
|
180
|
+
response=response,
|
181
|
+
markdown=markdown,
|
182
|
+
generation_time=generation_time
|
183
|
+
)
|
184
|
+
else:
|
185
|
+
loop = asyncio.get_event_loop()
|
186
|
+
await loop.run_in_executor(
|
187
|
+
None,
|
188
|
+
callback,
|
189
|
+
message,
|
190
|
+
response,
|
191
|
+
markdown,
|
192
|
+
generation_time
|
193
|
+
)
|
194
|
+
|
195
|
+
if generation_time:
|
196
|
+
console.print(Text(f"Response generated in {generation_time:.1f}s", style="dim"))
|
197
|
+
|
198
|
+
if markdown:
|
199
|
+
console.print(Panel.fit(Markdown(message), title="Message", border_style="cyan"))
|
200
|
+
console.print(Panel.fit(Markdown(response), title="Response", border_style="cyan"))
|
201
|
+
else:
|
202
|
+
console.print(Panel.fit(Text(message, style="bold green"), title="Message", border_style="cyan"))
|
203
|
+
console.print(Panel.fit(Text(response, style="bold blue"), title="Response", border_style="cyan"))
|
204
|
+
|
205
|
+
async def adisplay_self_reflection(message: str, console=None):
|
206
|
+
"""Async version of display_self_reflection."""
|
207
|
+
if not message or not message.strip():
|
208
|
+
return
|
209
|
+
if console is None:
|
210
|
+
console = Console()
|
211
|
+
message = _clean_display_content(str(message))
|
212
|
+
|
213
|
+
if 'self_reflection' in display_callbacks:
|
214
|
+
callback = display_callbacks['self_reflection']
|
215
|
+
if asyncio.iscoroutinefunction(callback):
|
216
|
+
await callback(message=message)
|
217
|
+
else:
|
218
|
+
loop = asyncio.get_event_loop()
|
219
|
+
await loop.run_in_executor(None, callback, message)
|
220
|
+
|
221
|
+
console.print(Panel.fit(Text(message, style="bold yellow"), title="Self Reflection", border_style="magenta"))
|
222
|
+
|
223
|
+
async def adisplay_instruction(message: str, console=None):
|
224
|
+
"""Async version of display_instruction."""
|
225
|
+
if not message or not message.strip():
|
226
|
+
return
|
227
|
+
if console is None:
|
228
|
+
console = Console()
|
229
|
+
message = _clean_display_content(str(message))
|
230
|
+
|
231
|
+
if 'instruction' in display_callbacks:
|
232
|
+
callback = display_callbacks['instruction']
|
233
|
+
if asyncio.iscoroutinefunction(callback):
|
234
|
+
await callback(message=message)
|
235
|
+
else:
|
236
|
+
loop = asyncio.get_event_loop()
|
237
|
+
await loop.run_in_executor(None, callback, message)
|
238
|
+
|
239
|
+
console.print(Panel.fit(Text(message, style="bold blue"), title="Instruction", border_style="cyan"))
|
240
|
+
|
241
|
+
async def adisplay_tool_call(message: str, console=None):
|
242
|
+
"""Async version of display_tool_call."""
|
243
|
+
if not message or not message.strip():
|
244
|
+
return
|
245
|
+
if console is None:
|
246
|
+
console = Console()
|
247
|
+
message = _clean_display_content(str(message))
|
248
|
+
|
249
|
+
if 'tool_call' in display_callbacks:
|
250
|
+
callback = display_callbacks['tool_call']
|
251
|
+
if asyncio.iscoroutinefunction(callback):
|
252
|
+
await callback(message=message)
|
253
|
+
else:
|
254
|
+
loop = asyncio.get_event_loop()
|
255
|
+
await loop.run_in_executor(None, callback, message)
|
256
|
+
|
257
|
+
console.print(Panel.fit(Text(message, style="bold cyan"), title="Tool Call", border_style="green"))
|
258
|
+
|
259
|
+
async def adisplay_error(message: str, console=None):
|
260
|
+
"""Async version of display_error."""
|
261
|
+
if not message or not message.strip():
|
262
|
+
return
|
263
|
+
if console is None:
|
264
|
+
console = Console()
|
265
|
+
message = _clean_display_content(str(message))
|
266
|
+
|
267
|
+
if 'error' in display_callbacks:
|
268
|
+
callback = display_callbacks['error']
|
269
|
+
if asyncio.iscoroutinefunction(callback):
|
270
|
+
await callback(message=message)
|
271
|
+
else:
|
272
|
+
loop = asyncio.get_event_loop()
|
273
|
+
await loop.run_in_executor(None, callback, message)
|
274
|
+
|
275
|
+
console.print(Panel.fit(Text(message, style="bold red"), title="Error", border_style="red"))
|
276
|
+
error_logs.append(message)
|
277
|
+
|
278
|
+
async def adisplay_generating(content: str = "", start_time: Optional[float] = None):
|
279
|
+
"""Async version of display_generating."""
|
280
|
+
if not content or not str(content).strip():
|
281
|
+
return Panel("", title="", border_style="green")
|
282
|
+
|
283
|
+
elapsed_str = ""
|
284
|
+
if start_time is not None:
|
285
|
+
elapsed = time.time() - start_time
|
286
|
+
elapsed_str = f" {elapsed:.1f}s"
|
287
|
+
|
288
|
+
content = _clean_display_content(str(content))
|
289
|
+
|
290
|
+
if 'generating' in display_callbacks:
|
291
|
+
callback = display_callbacks['generating']
|
292
|
+
if asyncio.iscoroutinefunction(callback):
|
293
|
+
await callback(
|
294
|
+
content=content,
|
295
|
+
elapsed_time=elapsed_str.strip() if elapsed_str else None
|
296
|
+
)
|
297
|
+
else:
|
298
|
+
loop = asyncio.get_event_loop()
|
299
|
+
await loop.run_in_executor(
|
300
|
+
None,
|
301
|
+
callback,
|
302
|
+
content,
|
303
|
+
elapsed_str.strip() if elapsed_str else None
|
304
|
+
)
|
305
|
+
|
306
|
+
return Panel(Markdown(content), title=f"Generating...{elapsed_str}", border_style="green")
|
307
|
+
|
308
|
+
def clean_triple_backticks(text: str) -> str:
|
309
|
+
"""Remove triple backticks and surrounding json fences from a string."""
|
310
|
+
cleaned = text.strip()
|
311
|
+
if cleaned.startswith("```json"):
|
312
|
+
cleaned = cleaned[len("```json"):].strip()
|
313
|
+
if cleaned.startswith("```"):
|
314
|
+
cleaned = cleaned[len("```"):].strip()
|
315
|
+
if cleaned.endswith("```"):
|
316
|
+
cleaned = cleaned[:-3].strip()
|
317
|
+
return cleaned
|
318
|
+
|
319
|
+
class ReflectionOutput(BaseModel):
|
320
|
+
reflection: str
|
321
|
+
satisfactory: Literal["yes", "no"]
|
322
|
+
|
323
|
+
client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
|
324
|
+
|
325
|
+
class TaskOutput(BaseModel):
|
326
|
+
description: str
|
327
|
+
summary: Optional[str] = None
|
328
|
+
raw: str
|
329
|
+
pydantic: Optional[BaseModel] = None
|
330
|
+
json_dict: Optional[Dict[str, Any]] = None
|
331
|
+
agent: str
|
332
|
+
output_format: Literal["RAW", "JSON", "Pydantic"] = "RAW"
|
333
|
+
|
334
|
+
def json(self) -> Optional[str]:
|
335
|
+
if self.output_format == "JSON" and self.json_dict:
|
336
|
+
return json.dumps(self.json_dict)
|
337
|
+
return None
|
338
|
+
|
339
|
+
def to_dict(self) -> dict:
|
340
|
+
output_dict = {}
|
341
|
+
if self.json_dict:
|
342
|
+
output_dict.update(self.json_dict)
|
343
|
+
if self.pydantic:
|
344
|
+
output_dict.update(self.pydantic.model_dump())
|
345
|
+
return output_dict
|
346
|
+
|
347
|
+
def __str__(self):
|
348
|
+
if self.pydantic:
|
349
|
+
return str(self.pydantic)
|
350
|
+
elif self.json_dict:
|
351
|
+
return json.dumps(self.json_dict)
|
352
|
+
else:
|
353
|
+
return self.raw
|
@@ -1,161 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
import time
|
3
|
-
import json
|
4
|
-
import logging
|
5
|
-
from typing import List, Optional, Dict, Any, Union, Literal, Type
|
6
|
-
from openai import OpenAI
|
7
|
-
from pydantic import BaseModel
|
8
|
-
from rich import print
|
9
|
-
from rich.console import Console
|
10
|
-
from rich.panel import Panel
|
11
|
-
from rich.text import Text
|
12
|
-
from rich.markdown import Markdown
|
13
|
-
from rich.logging import RichHandler
|
14
|
-
from rich.live import Live
|
15
|
-
|
16
|
-
LOGLEVEL = os.environ.get('LOGLEVEL', 'INFO').upper()
|
17
|
-
|
18
|
-
logging.basicConfig(
|
19
|
-
level=getattr(logging, LOGLEVEL, logging.INFO),
|
20
|
-
format="%(asctime)s %(filename)s:%(lineno)d %(levelname)s %(message)s",
|
21
|
-
datefmt="[%X]",
|
22
|
-
handlers=[RichHandler(rich_tracebacks=True)]
|
23
|
-
)
|
24
|
-
|
25
|
-
# Global list to store error logs
|
26
|
-
error_logs = []
|
27
|
-
|
28
|
-
def _clean_display_content(content: str, max_length: int = 20000) -> str:
|
29
|
-
"""Helper function to clean and truncate content for display."""
|
30
|
-
if not content or not str(content).strip():
|
31
|
-
return ""
|
32
|
-
|
33
|
-
content = str(content)
|
34
|
-
# Handle base64 content
|
35
|
-
if "base64" in content:
|
36
|
-
content_parts = []
|
37
|
-
for line in content.split('\n'):
|
38
|
-
if "base64" not in line:
|
39
|
-
content_parts.append(line)
|
40
|
-
content = '\n'.join(content_parts)
|
41
|
-
|
42
|
-
# Truncate if too long
|
43
|
-
if len(content) > max_length:
|
44
|
-
content = content[:max_length] + "..."
|
45
|
-
|
46
|
-
return content.strip()
|
47
|
-
|
48
|
-
def display_interaction(message, response, markdown=True, generation_time=None, console=None):
|
49
|
-
"""Display the interaction between user and assistant."""
|
50
|
-
if console is None:
|
51
|
-
console = Console()
|
52
|
-
if generation_time:
|
53
|
-
console.print(Text(f"Response generated in {generation_time:.1f}s", style="dim"))
|
54
|
-
|
55
|
-
# Handle multimodal content (list)
|
56
|
-
if isinstance(message, list):
|
57
|
-
# Extract just the text content from the multimodal message
|
58
|
-
text_content = next((item["text"] for item in message if item["type"] == "text"), "")
|
59
|
-
message = text_content
|
60
|
-
|
61
|
-
message = _clean_display_content(str(message))
|
62
|
-
response = _clean_display_content(str(response))
|
63
|
-
|
64
|
-
if markdown:
|
65
|
-
console.print(Panel.fit(Markdown(message), title="Message", border_style="cyan"))
|
66
|
-
console.print(Panel.fit(Markdown(response), title="Response", border_style="cyan"))
|
67
|
-
else:
|
68
|
-
console.print(Panel.fit(Text(message, style="bold green"), title="Message", border_style="cyan"))
|
69
|
-
console.print(Panel.fit(Text(response, style="bold blue"), title="Response", border_style="cyan"))
|
70
|
-
|
71
|
-
def display_self_reflection(message: str, console=None):
|
72
|
-
if not message or not message.strip():
|
73
|
-
return
|
74
|
-
if console is None:
|
75
|
-
console = Console()
|
76
|
-
message = _clean_display_content(str(message))
|
77
|
-
console.print(Panel.fit(Text(message, style="bold yellow"), title="Self Reflection", border_style="magenta"))
|
78
|
-
|
79
|
-
def display_instruction(message: str, console=None):
|
80
|
-
if not message or not message.strip():
|
81
|
-
return
|
82
|
-
if console is None:
|
83
|
-
console = Console()
|
84
|
-
message = _clean_display_content(str(message))
|
85
|
-
console.print(Panel.fit(Text(message, style="bold blue"), title="Instruction", border_style="cyan"))
|
86
|
-
|
87
|
-
def display_tool_call(message: str, console=None):
|
88
|
-
if not message or not message.strip():
|
89
|
-
return
|
90
|
-
if console is None:
|
91
|
-
console = Console()
|
92
|
-
message = _clean_display_content(str(message))
|
93
|
-
console.print(Panel.fit(Text(message, style="bold cyan"), title="Tool Call", border_style="green"))
|
94
|
-
|
95
|
-
def display_error(message: str, console=None):
|
96
|
-
if not message or not message.strip():
|
97
|
-
return
|
98
|
-
if console is None:
|
99
|
-
console = Console()
|
100
|
-
message = _clean_display_content(str(message))
|
101
|
-
console.print(Panel.fit(Text(message, style="bold red"), title="Error", border_style="red"))
|
102
|
-
# Store errors
|
103
|
-
error_logs.append(message)
|
104
|
-
|
105
|
-
def display_generating(content: str = "", start_time: Optional[float] = None):
|
106
|
-
if not content or not str(content).strip():
|
107
|
-
return Panel("", title="", border_style="green") # Return empty panel when no content
|
108
|
-
elapsed_str = ""
|
109
|
-
if start_time is not None:
|
110
|
-
elapsed = time.time() - start_time
|
111
|
-
elapsed_str = f" {elapsed:.1f}s"
|
112
|
-
|
113
|
-
content = _clean_display_content(str(content))
|
114
|
-
return Panel(Markdown(content), title=f"Generating...{elapsed_str}", border_style="green")
|
115
|
-
|
116
|
-
def clean_triple_backticks(text: str) -> str:
|
117
|
-
"""Remove triple backticks and surrounding json fences from a string."""
|
118
|
-
cleaned = text.strip()
|
119
|
-
if cleaned.startswith("```json"):
|
120
|
-
cleaned = cleaned[len("```json"):].strip()
|
121
|
-
if cleaned.startswith("```"):
|
122
|
-
cleaned = cleaned[len("```"):].strip()
|
123
|
-
if cleaned.endswith("```"):
|
124
|
-
cleaned = cleaned[:-3].strip()
|
125
|
-
return cleaned
|
126
|
-
|
127
|
-
class ReflectionOutput(BaseModel):
|
128
|
-
reflection: str
|
129
|
-
satisfactory: Literal["yes", "no"]
|
130
|
-
|
131
|
-
client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
|
132
|
-
|
133
|
-
class TaskOutput(BaseModel):
|
134
|
-
description: str
|
135
|
-
summary: Optional[str] = None
|
136
|
-
raw: str
|
137
|
-
pydantic: Optional[BaseModel] = None
|
138
|
-
json_dict: Optional[Dict[str, Any]] = None
|
139
|
-
agent: str
|
140
|
-
output_format: Literal["RAW", "JSON", "Pydantic"] = "RAW"
|
141
|
-
|
142
|
-
def json(self) -> Optional[str]:
|
143
|
-
if self.output_format == "JSON" and self.json_dict:
|
144
|
-
return json.dumps(self.json_dict)
|
145
|
-
return None
|
146
|
-
|
147
|
-
def to_dict(self) -> dict:
|
148
|
-
output_dict = {}
|
149
|
-
if self.json_dict:
|
150
|
-
output_dict.update(self.json_dict)
|
151
|
-
if self.pydantic:
|
152
|
-
output_dict.update(self.pydantic.model_dump())
|
153
|
-
return output_dict
|
154
|
-
|
155
|
-
def __str__(self):
|
156
|
-
if self.pydantic:
|
157
|
-
return str(self.pydantic)
|
158
|
-
elif self.json_dict:
|
159
|
-
return json.dumps(self.json_dict)
|
160
|
-
else:
|
161
|
-
return self.raw
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents/build/lib/praisonaiagents/main.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{praisonaiagents-0.0.17 → praisonaiagents-0.0.19}/praisonaiagents.egg-info/dependency_links.txt
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|