h2ogpte 1.6.42__py3-none-any.whl → 1.6.43rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. h2ogpte/__init__.py +1 -1
  2. h2ogpte/cli/__init__.py +0 -0
  3. h2ogpte/cli/commands/__init__.py +0 -0
  4. h2ogpte/cli/commands/command_handlers/__init__.py +0 -0
  5. h2ogpte/cli/commands/command_handlers/agent.py +41 -0
  6. h2ogpte/cli/commands/command_handlers/chat.py +37 -0
  7. h2ogpte/cli/commands/command_handlers/clear.py +8 -0
  8. h2ogpte/cli/commands/command_handlers/collection.py +67 -0
  9. h2ogpte/cli/commands/command_handlers/config.py +113 -0
  10. h2ogpte/cli/commands/command_handlers/disconnect.py +36 -0
  11. h2ogpte/cli/commands/command_handlers/exit.py +37 -0
  12. h2ogpte/cli/commands/command_handlers/help.py +8 -0
  13. h2ogpte/cli/commands/command_handlers/history.py +29 -0
  14. h2ogpte/cli/commands/command_handlers/rag.py +146 -0
  15. h2ogpte/cli/commands/command_handlers/research_agent.py +45 -0
  16. h2ogpte/cli/commands/command_handlers/session.py +77 -0
  17. h2ogpte/cli/commands/command_handlers/status.py +33 -0
  18. h2ogpte/cli/commands/dispatcher.py +79 -0
  19. h2ogpte/cli/core/__init__.py +0 -0
  20. h2ogpte/cli/core/app.py +105 -0
  21. h2ogpte/cli/core/config.py +199 -0
  22. h2ogpte/cli/core/encryption.py +104 -0
  23. h2ogpte/cli/core/session.py +171 -0
  24. h2ogpte/cli/integrations/__init__.py +0 -0
  25. h2ogpte/cli/integrations/agent.py +338 -0
  26. h2ogpte/cli/integrations/rag.py +442 -0
  27. h2ogpte/cli/main.py +90 -0
  28. h2ogpte/cli/ui/__init__.py +0 -0
  29. h2ogpte/cli/ui/hbot_prompt.py +435 -0
  30. h2ogpte/cli/ui/prompts.py +129 -0
  31. h2ogpte/cli/ui/status_bar.py +133 -0
  32. h2ogpte/cli/utils/__init__.py +0 -0
  33. h2ogpte/cli/utils/file_manager.py +411 -0
  34. h2ogpte/h2ogpte.py +471 -67
  35. h2ogpte/h2ogpte_async.py +482 -68
  36. h2ogpte/h2ogpte_sync_base.py +8 -1
  37. h2ogpte/rest_async/__init__.py +6 -3
  38. h2ogpte/rest_async/api/chat_api.py +29 -0
  39. h2ogpte/rest_async/api/collections_api.py +293 -0
  40. h2ogpte/rest_async/api/extractors_api.py +2874 -70
  41. h2ogpte/rest_async/api/prompt_templates_api.py +32 -32
  42. h2ogpte/rest_async/api_client.py +1 -1
  43. h2ogpte/rest_async/configuration.py +1 -1
  44. h2ogpte/rest_async/models/__init__.py +5 -2
  45. h2ogpte/rest_async/models/chat_completion.py +4 -2
  46. h2ogpte/rest_async/models/chat_completion_delta.py +5 -3
  47. h2ogpte/rest_async/models/chat_completion_request.py +1 -1
  48. h2ogpte/rest_async/models/chat_session.py +4 -2
  49. h2ogpte/rest_async/models/chat_settings.py +1 -1
  50. h2ogpte/rest_async/models/collection.py +4 -2
  51. h2ogpte/rest_async/models/collection_create_request.py +4 -2
  52. h2ogpte/rest_async/models/create_chat_session_request.py +87 -0
  53. h2ogpte/rest_async/models/extraction_request.py +1 -1
  54. h2ogpte/rest_async/models/extractor.py +4 -2
  55. h2ogpte/rest_async/models/guardrails_settings.py +8 -4
  56. h2ogpte/rest_async/models/guardrails_settings_create_request.py +1 -1
  57. h2ogpte/rest_async/models/process_document_job_request.py +1 -1
  58. h2ogpte/rest_async/models/question_request.py +1 -1
  59. h2ogpte/rest_async/models/{reset_and_share_prompt_template_request.py → reset_and_share_request.py} +6 -6
  60. h2ogpte/{rest_sync/models/reset_and_share_prompt_template_with_groups_request.py → rest_async/models/reset_and_share_with_groups_request.py} +6 -6
  61. h2ogpte/rest_async/models/summarize_request.py +1 -1
  62. h2ogpte/rest_async/models/update_collection_workspace_request.py +87 -0
  63. h2ogpte/rest_async/models/update_extractor_privacy_request.py +87 -0
  64. h2ogpte/rest_sync/__init__.py +6 -3
  65. h2ogpte/rest_sync/api/chat_api.py +29 -0
  66. h2ogpte/rest_sync/api/collections_api.py +293 -0
  67. h2ogpte/rest_sync/api/extractors_api.py +2874 -70
  68. h2ogpte/rest_sync/api/prompt_templates_api.py +32 -32
  69. h2ogpte/rest_sync/api_client.py +1 -1
  70. h2ogpte/rest_sync/configuration.py +1 -1
  71. h2ogpte/rest_sync/models/__init__.py +5 -2
  72. h2ogpte/rest_sync/models/chat_completion.py +4 -2
  73. h2ogpte/rest_sync/models/chat_completion_delta.py +5 -3
  74. h2ogpte/rest_sync/models/chat_completion_request.py +1 -1
  75. h2ogpte/rest_sync/models/chat_session.py +4 -2
  76. h2ogpte/rest_sync/models/chat_settings.py +1 -1
  77. h2ogpte/rest_sync/models/collection.py +4 -2
  78. h2ogpte/rest_sync/models/collection_create_request.py +4 -2
  79. h2ogpte/rest_sync/models/create_chat_session_request.py +87 -0
  80. h2ogpte/rest_sync/models/extraction_request.py +1 -1
  81. h2ogpte/rest_sync/models/extractor.py +4 -2
  82. h2ogpte/rest_sync/models/guardrails_settings.py +8 -4
  83. h2ogpte/rest_sync/models/guardrails_settings_create_request.py +1 -1
  84. h2ogpte/rest_sync/models/process_document_job_request.py +1 -1
  85. h2ogpte/rest_sync/models/question_request.py +1 -1
  86. h2ogpte/rest_sync/models/{reset_and_share_prompt_template_request.py → reset_and_share_request.py} +6 -6
  87. h2ogpte/{rest_async/models/reset_and_share_prompt_template_with_groups_request.py → rest_sync/models/reset_and_share_with_groups_request.py} +6 -6
  88. h2ogpte/rest_sync/models/summarize_request.py +1 -1
  89. h2ogpte/rest_sync/models/update_collection_workspace_request.py +87 -0
  90. h2ogpte/rest_sync/models/update_extractor_privacy_request.py +87 -0
  91. h2ogpte/session.py +3 -2
  92. h2ogpte/session_async.py +22 -6
  93. h2ogpte/types.py +6 -0
  94. {h2ogpte-1.6.42.dist-info → h2ogpte-1.6.43rc1.dist-info}/METADATA +5 -1
  95. {h2ogpte-1.6.42.dist-info → h2ogpte-1.6.43rc1.dist-info}/RECORD +98 -59
  96. h2ogpte-1.6.43rc1.dist-info/entry_points.txt +2 -0
  97. {h2ogpte-1.6.42.dist-info → h2ogpte-1.6.43rc1.dist-info}/WHEEL +0 -0
  98. {h2ogpte-1.6.42.dist-info → h2ogpte-1.6.43rc1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,338 @@
1
+ import asyncio
2
+ import httpx
3
+ import json
4
+ from typing import Dict, Any, Optional, List, AsyncGenerator
5
+ from datetime import datetime
6
+ from rich.console import Console
7
+ from rich.panel import Panel
8
+ from rich.markdown import Markdown
9
+ from rich.live import Live
10
+ from rich.spinner import Spinner
11
+ from rich.text import Text
12
+
13
+ console = Console()
14
+
15
+
16
+ class AgentClient:
17
+ """Client for interacting with AI agent systems."""
18
+
19
+ def __init__(self, endpoint: str, api_key: str, model: str = "gpt-4"):
20
+ self.endpoint = endpoint.rstrip("/")
21
+ self.api_key = api_key
22
+ self.model = model
23
+ self.client = httpx.AsyncClient(
24
+ headers={
25
+ "Authorization": f"Bearer {api_key}",
26
+ "Content-Type": "application/json",
27
+ },
28
+ timeout=httpx.Timeout(300.0),
29
+ )
30
+ self.conversation_history: List[Dict[str, str]] = []
31
+
32
+ async def test_connection(self) -> bool:
33
+ """Test connection to agent system."""
34
+ try:
35
+ response = await self.client.get(f"{self.endpoint}/models")
36
+ return response.status_code == 200
37
+ except Exception as e:
38
+ console.print(f"[red]Connection failed: {e}[/red]")
39
+ return False
40
+
41
+ async def send_message(
42
+ self,
43
+ message: str,
44
+ context: Optional[Dict[str, Any]] = None,
45
+ stream: bool = True,
46
+ ) -> AsyncGenerator[str, None]:
47
+ """Send a message to the agent and stream the response."""
48
+ # Add to conversation history
49
+ self.conversation_history.append({"role": "user", "content": message})
50
+
51
+ # Prepare request
52
+ request_data = {
53
+ "model": self.model,
54
+ "messages": self.conversation_history,
55
+ "stream": stream,
56
+ "temperature": 0.7,
57
+ "max_tokens": 2000,
58
+ }
59
+
60
+ if context:
61
+ request_data["context"] = context
62
+
63
+ try:
64
+ if stream:
65
+ async with self.client.stream(
66
+ "POST", f"{self.endpoint}/chat/completions", json=request_data
67
+ ) as response:
68
+ response.raise_for_status()
69
+ full_response = ""
70
+
71
+ async for line in response.aiter_lines():
72
+ if line.startswith("data: "):
73
+ data = line[6:]
74
+ if data == "[DONE]":
75
+ break
76
+
77
+ try:
78
+ chunk = json.loads(data)
79
+ if "choices" in chunk and chunk["choices"]:
80
+ content = (
81
+ chunk["choices"][0]
82
+ .get("delta", {})
83
+ .get("content", "")
84
+ )
85
+ if content:
86
+ full_response += content
87
+ yield content
88
+ except json.JSONDecodeError:
89
+ continue
90
+
91
+ # Add assistant response to history
92
+ self.conversation_history.append(
93
+ {"role": "assistant", "content": full_response}
94
+ )
95
+
96
+ else:
97
+ response = await self.client.post(
98
+ f"{self.endpoint}/chat/completions", json=request_data
99
+ )
100
+ response.raise_for_status()
101
+ data = response.json()
102
+
103
+ if "choices" in data and data["choices"]:
104
+ content = data["choices"][0]["message"]["content"]
105
+ self.conversation_history.append(
106
+ {"role": "assistant", "content": content}
107
+ )
108
+ yield content
109
+
110
+ except httpx.HTTPError as e:
111
+ yield f"[ERROR] HTTP error occurred: {e}"
112
+ except Exception as e:
113
+ yield f"[ERROR] An error occurred: {e}"
114
+
115
+ async def execute_action(
116
+ self, action: str, parameters: Dict[str, Any]
117
+ ) -> Dict[str, Any]:
118
+ """Execute a specific action through the agent."""
119
+ request_data = {"action": action, "parameters": parameters, "model": self.model}
120
+
121
+ try:
122
+ response = await self.client.post(
123
+ f"{self.endpoint}/actions", json=request_data
124
+ )
125
+ response.raise_for_status()
126
+ return response.json()
127
+ except Exception as e:
128
+ return {"error": str(e), "success": False}
129
+
130
+ def clear_history(self):
131
+ """Clear conversation history."""
132
+ self.conversation_history = []
133
+
134
+ def get_history(self) -> List[Dict[str, str]]:
135
+ """Get conversation history."""
136
+ return self.conversation_history.copy()
137
+
138
+ async def close(self):
139
+ """Close the client connection."""
140
+ await self.client.aclose()
141
+
142
+
143
+ class AgentSession:
144
+ """Manages an interactive agent session."""
145
+
146
+ def __init__(self, client: AgentClient):
147
+ self.client = client
148
+ self.is_active = False
149
+ self.current_task: Optional[asyncio.Task] = None
150
+ self.interrupted = False
151
+
152
+ async def start_conversation(self, initial_prompt: Optional[str] = None):
153
+ """Start an interactive conversation with the agent."""
154
+ self.is_active = True
155
+ console.print(
156
+ Panel(
157
+ "[bold cyan]Agent Session Started[/bold cyan]\n"
158
+ "[dim]Type your message and press Enter. Use /end to exit session.[/dim]",
159
+ border_style="cyan",
160
+ )
161
+ )
162
+
163
+ if initial_prompt:
164
+ await self.send_and_display(initial_prompt)
165
+
166
+ async def send_and_display(self, message: str):
167
+ """Send message and display streaming response."""
168
+ # Display user message
169
+ console.print(f"\n[bold blue]You:[/bold blue] {message}")
170
+
171
+ # Create a live display for streaming response
172
+ response_text = Text()
173
+ panel = Panel(
174
+ response_text,
175
+ title="[bold green]Agent[/bold green]",
176
+ border_style="green",
177
+ expand=False,
178
+ )
179
+
180
+ with Live(panel, refresh_per_second=10, console=console) as live:
181
+ try:
182
+ async for chunk in self.client.send_message(message):
183
+ if self.interrupted:
184
+ break
185
+ response_text.append(chunk)
186
+ live.update(
187
+ Panel(
188
+ response_text,
189
+ title="[bold green]Agent[/bold green]",
190
+ border_style="green",
191
+ expand=False,
192
+ )
193
+ )
194
+ except Exception as e:
195
+ console.print(f"[red]Error: {e}[/red]")
196
+
197
+ def interrupt(self):
198
+ """Interrupt the current operation."""
199
+ self.interrupted = True
200
+ if self.current_task and not self.current_task.done():
201
+ self.current_task.cancel()
202
+
203
+ async def end_session(self):
204
+ """End the agent session."""
205
+ self.is_active = False
206
+ console.print(
207
+ Panel(
208
+ "[bold yellow]Agent Session Ended[/bold yellow]", border_style="yellow"
209
+ )
210
+ )
211
+
212
+
213
+ class AgentManager:
214
+ """Manager for agent operations with UI integration."""
215
+
216
+ def __init__(self):
217
+ self.client: Optional[AgentClient] = None
218
+ self.session: Optional[AgentSession] = None
219
+ self.connected = False
220
+
221
+ async def connect(self, endpoint: str, api_key: str, model: str = "gpt-4") -> bool:
222
+ """Connect to agent system."""
223
+ console.print(f"[blue]Connecting to agent system at {endpoint}...[/blue]")
224
+
225
+ self.client = AgentClient(endpoint, api_key, model)
226
+
227
+ if await self.client.test_connection():
228
+ self.connected = True
229
+ console.print(
230
+ f"[green]✓[/green] Connected to agent system (model: {model})"
231
+ )
232
+ return True
233
+ else:
234
+ self.connected = False
235
+ console.print("[red]✗[/red] Failed to connect to agent system")
236
+ return False
237
+
238
+ async def start_session(self, initial_prompt: Optional[str] = None):
239
+ """Start an interactive agent session."""
240
+ if not self.connected or not self.client:
241
+ console.print(
242
+ "[red]Not connected to agent system. Configure in settings first.[/red]"
243
+ )
244
+ return
245
+
246
+ self.session = AgentSession(self.client)
247
+ await self.session.start_conversation(initial_prompt)
248
+
249
+ async def send_message(self, message: str) -> str:
250
+ """Send a single message to the agent."""
251
+ if not self.connected or not self.client:
252
+ console.print("[red]Not connected to agent system.[/red]")
253
+ return ""
254
+
255
+ response = ""
256
+ async for chunk in self.client.send_message(message, stream=False):
257
+ response += chunk
258
+
259
+ return response
260
+
261
+ async def execute_action(self, action: str, parameters: Dict[str, Any]):
262
+ """Execute an action through the agent."""
263
+ if not self.connected or not self.client:
264
+ console.print("[red]Not connected to agent system.[/red]")
265
+ return None
266
+
267
+ console.print(f"[blue]Executing action: {action}[/blue]")
268
+
269
+ with console.status("[bold green]Processing..."):
270
+ result = await self.client.execute_action(action, parameters)
271
+
272
+ if result.get("success"):
273
+ console.print("[green]✓[/green] Action completed successfully")
274
+ self._display_action_result(result)
275
+ else:
276
+ console.print(
277
+ f"[red]✗[/red] Action failed: {result.get('error', 'Unknown error')}"
278
+ )
279
+
280
+ return result
281
+
282
+ def _display_action_result(self, result: Dict[str, Any]):
283
+ """Display action result."""
284
+ if "output" in result:
285
+ console.print(
286
+ Panel(str(result["output"]), title="Output", border_style="green")
287
+ )
288
+
289
+ if "metadata" in result:
290
+ from rich.table import Table
291
+
292
+ table = Table(title="Metadata", show_header=False)
293
+ table.add_column("Key", style="cyan")
294
+ table.add_column("Value", style="white")
295
+
296
+ for key, value in result["metadata"].items():
297
+ table.add_row(key, str(value))
298
+
299
+ console.print(table)
300
+
301
+ def show_history(self):
302
+ """Display conversation history."""
303
+ if not self.client:
304
+ console.print("[yellow]No active session[/yellow]")
305
+ return
306
+
307
+ history = self.client.get_history()
308
+ if not history:
309
+ console.print("[yellow]No conversation history[/yellow]")
310
+ return
311
+
312
+ for entry in history:
313
+ role = entry["role"]
314
+ content = (
315
+ entry["content"][:200] + "..."
316
+ if len(entry["content"]) > 200
317
+ else entry["content"]
318
+ )
319
+
320
+ if role == "user":
321
+ console.print(f"[bold blue]User:[/bold blue] {content}")
322
+ else:
323
+ console.print(f"[bold green]Agent:[/bold green] {content}")
324
+
325
+ def clear_history(self):
326
+ """Clear conversation history."""
327
+ if self.client:
328
+ self.client.clear_history()
329
+ console.print("[green]✓[/green] Conversation history cleared")
330
+
331
+ async def close(self):
332
+ """Close agent connection."""
333
+ if self.session:
334
+ await self.session.end_session()
335
+
336
+ if self.client:
337
+ await self.client.close()
338
+ self.connected = False