genxai-framework 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (156) hide show
  1. cli/__init__.py +3 -0
  2. cli/commands/__init__.py +6 -0
  3. cli/commands/approval.py +85 -0
  4. cli/commands/audit.py +127 -0
  5. cli/commands/metrics.py +25 -0
  6. cli/commands/tool.py +389 -0
  7. cli/main.py +32 -0
  8. genxai/__init__.py +81 -0
  9. genxai/api/__init__.py +5 -0
  10. genxai/api/app.py +21 -0
  11. genxai/config/__init__.py +5 -0
  12. genxai/config/settings.py +37 -0
  13. genxai/connectors/__init__.py +19 -0
  14. genxai/connectors/base.py +122 -0
  15. genxai/connectors/kafka.py +92 -0
  16. genxai/connectors/postgres_cdc.py +95 -0
  17. genxai/connectors/registry.py +44 -0
  18. genxai/connectors/sqs.py +94 -0
  19. genxai/connectors/webhook.py +73 -0
  20. genxai/core/__init__.py +37 -0
  21. genxai/core/agent/__init__.py +32 -0
  22. genxai/core/agent/base.py +206 -0
  23. genxai/core/agent/config_io.py +59 -0
  24. genxai/core/agent/registry.py +98 -0
  25. genxai/core/agent/runtime.py +970 -0
  26. genxai/core/communication/__init__.py +6 -0
  27. genxai/core/communication/collaboration.py +44 -0
  28. genxai/core/communication/message_bus.py +192 -0
  29. genxai/core/communication/protocols.py +35 -0
  30. genxai/core/execution/__init__.py +22 -0
  31. genxai/core/execution/metadata.py +181 -0
  32. genxai/core/execution/queue.py +201 -0
  33. genxai/core/graph/__init__.py +30 -0
  34. genxai/core/graph/checkpoints.py +77 -0
  35. genxai/core/graph/edges.py +131 -0
  36. genxai/core/graph/engine.py +813 -0
  37. genxai/core/graph/executor.py +516 -0
  38. genxai/core/graph/nodes.py +161 -0
  39. genxai/core/graph/trigger_runner.py +40 -0
  40. genxai/core/memory/__init__.py +19 -0
  41. genxai/core/memory/base.py +72 -0
  42. genxai/core/memory/embedding.py +327 -0
  43. genxai/core/memory/episodic.py +448 -0
  44. genxai/core/memory/long_term.py +467 -0
  45. genxai/core/memory/manager.py +543 -0
  46. genxai/core/memory/persistence.py +297 -0
  47. genxai/core/memory/procedural.py +461 -0
  48. genxai/core/memory/semantic.py +526 -0
  49. genxai/core/memory/shared.py +62 -0
  50. genxai/core/memory/short_term.py +303 -0
  51. genxai/core/memory/vector_store.py +508 -0
  52. genxai/core/memory/working.py +211 -0
  53. genxai/core/state/__init__.py +6 -0
  54. genxai/core/state/manager.py +293 -0
  55. genxai/core/state/schema.py +115 -0
  56. genxai/llm/__init__.py +14 -0
  57. genxai/llm/base.py +150 -0
  58. genxai/llm/factory.py +329 -0
  59. genxai/llm/providers/__init__.py +1 -0
  60. genxai/llm/providers/anthropic.py +249 -0
  61. genxai/llm/providers/cohere.py +274 -0
  62. genxai/llm/providers/google.py +334 -0
  63. genxai/llm/providers/ollama.py +147 -0
  64. genxai/llm/providers/openai.py +257 -0
  65. genxai/llm/routing.py +83 -0
  66. genxai/observability/__init__.py +6 -0
  67. genxai/observability/logging.py +327 -0
  68. genxai/observability/metrics.py +494 -0
  69. genxai/observability/tracing.py +372 -0
  70. genxai/performance/__init__.py +39 -0
  71. genxai/performance/cache.py +256 -0
  72. genxai/performance/pooling.py +289 -0
  73. genxai/security/audit.py +304 -0
  74. genxai/security/auth.py +315 -0
  75. genxai/security/cost_control.py +528 -0
  76. genxai/security/default_policies.py +44 -0
  77. genxai/security/jwt.py +142 -0
  78. genxai/security/oauth.py +226 -0
  79. genxai/security/pii.py +366 -0
  80. genxai/security/policy_engine.py +82 -0
  81. genxai/security/rate_limit.py +341 -0
  82. genxai/security/rbac.py +247 -0
  83. genxai/security/validation.py +218 -0
  84. genxai/tools/__init__.py +21 -0
  85. genxai/tools/base.py +383 -0
  86. genxai/tools/builtin/__init__.py +131 -0
  87. genxai/tools/builtin/communication/__init__.py +15 -0
  88. genxai/tools/builtin/communication/email_sender.py +159 -0
  89. genxai/tools/builtin/communication/notification_manager.py +167 -0
  90. genxai/tools/builtin/communication/slack_notifier.py +118 -0
  91. genxai/tools/builtin/communication/sms_sender.py +118 -0
  92. genxai/tools/builtin/communication/webhook_caller.py +136 -0
  93. genxai/tools/builtin/computation/__init__.py +15 -0
  94. genxai/tools/builtin/computation/calculator.py +101 -0
  95. genxai/tools/builtin/computation/code_executor.py +183 -0
  96. genxai/tools/builtin/computation/data_validator.py +259 -0
  97. genxai/tools/builtin/computation/hash_generator.py +129 -0
  98. genxai/tools/builtin/computation/regex_matcher.py +201 -0
  99. genxai/tools/builtin/data/__init__.py +15 -0
  100. genxai/tools/builtin/data/csv_processor.py +213 -0
  101. genxai/tools/builtin/data/data_transformer.py +299 -0
  102. genxai/tools/builtin/data/json_processor.py +233 -0
  103. genxai/tools/builtin/data/text_analyzer.py +288 -0
  104. genxai/tools/builtin/data/xml_processor.py +175 -0
  105. genxai/tools/builtin/database/__init__.py +15 -0
  106. genxai/tools/builtin/database/database_inspector.py +157 -0
  107. genxai/tools/builtin/database/mongodb_query.py +196 -0
  108. genxai/tools/builtin/database/redis_cache.py +167 -0
  109. genxai/tools/builtin/database/sql_query.py +145 -0
  110. genxai/tools/builtin/database/vector_search.py +163 -0
  111. genxai/tools/builtin/file/__init__.py +17 -0
  112. genxai/tools/builtin/file/directory_scanner.py +214 -0
  113. genxai/tools/builtin/file/file_compressor.py +237 -0
  114. genxai/tools/builtin/file/file_reader.py +102 -0
  115. genxai/tools/builtin/file/file_writer.py +122 -0
  116. genxai/tools/builtin/file/image_processor.py +186 -0
  117. genxai/tools/builtin/file/pdf_parser.py +144 -0
  118. genxai/tools/builtin/test/__init__.py +15 -0
  119. genxai/tools/builtin/test/async_simulator.py +62 -0
  120. genxai/tools/builtin/test/data_transformer.py +99 -0
  121. genxai/tools/builtin/test/error_generator.py +82 -0
  122. genxai/tools/builtin/test/simple_math.py +94 -0
  123. genxai/tools/builtin/test/string_processor.py +72 -0
  124. genxai/tools/builtin/web/__init__.py +15 -0
  125. genxai/tools/builtin/web/api_caller.py +161 -0
  126. genxai/tools/builtin/web/html_parser.py +330 -0
  127. genxai/tools/builtin/web/http_client.py +187 -0
  128. genxai/tools/builtin/web/url_validator.py +162 -0
  129. genxai/tools/builtin/web/web_scraper.py +170 -0
  130. genxai/tools/custom/my_test_tool_2.py +9 -0
  131. genxai/tools/dynamic.py +105 -0
  132. genxai/tools/mcp_server.py +167 -0
  133. genxai/tools/persistence/__init__.py +6 -0
  134. genxai/tools/persistence/models.py +55 -0
  135. genxai/tools/persistence/service.py +322 -0
  136. genxai/tools/registry.py +227 -0
  137. genxai/tools/security/__init__.py +11 -0
  138. genxai/tools/security/limits.py +214 -0
  139. genxai/tools/security/policy.py +20 -0
  140. genxai/tools/security/sandbox.py +248 -0
  141. genxai/tools/templates.py +435 -0
  142. genxai/triggers/__init__.py +19 -0
  143. genxai/triggers/base.py +104 -0
  144. genxai/triggers/file_watcher.py +75 -0
  145. genxai/triggers/queue.py +68 -0
  146. genxai/triggers/registry.py +82 -0
  147. genxai/triggers/schedule.py +66 -0
  148. genxai/triggers/webhook.py +68 -0
  149. genxai/utils/__init__.py +1 -0
  150. genxai/utils/tokens.py +295 -0
  151. genxai_framework-0.1.0.dist-info/METADATA +495 -0
  152. genxai_framework-0.1.0.dist-info/RECORD +156 -0
  153. genxai_framework-0.1.0.dist-info/WHEEL +5 -0
  154. genxai_framework-0.1.0.dist-info/entry_points.txt +2 -0
  155. genxai_framework-0.1.0.dist-info/licenses/LICENSE +21 -0
  156. genxai_framework-0.1.0.dist-info/top_level.txt +2 -0
@@ -0,0 +1,448 @@
1
+ """Episodic memory implementation for storing agent experiences."""
2
+
3
+ from typing import Any, Dict, List, Optional
4
+ from datetime import datetime
5
+ import logging
6
+ import uuid
7
+
8
+ from genxai.core.memory.base import Memory, MemoryType
9
+ from genxai.core.memory.persistence import (
10
+ JsonMemoryStore,
11
+ MemoryPersistenceConfig,
12
+ SqliteMemoryStore,
13
+ create_memory_store,
14
+ )
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ class Episode:
20
+ """Represents a single episode in agent's experience."""
21
+
22
+ def __init__(
23
+ self,
24
+ id: str,
25
+ agent_id: str,
26
+ task: str,
27
+ actions: List[Dict[str, Any]],
28
+ outcome: Dict[str, Any],
29
+ timestamp: datetime,
30
+ duration: float,
31
+ success: bool,
32
+ metadata: Optional[Dict[str, Any]] = None,
33
+ ) -> None:
34
+ """Initialize episode.
35
+
36
+ Args:
37
+ id: Unique episode ID
38
+ agent_id: ID of the agent
39
+ task: Task description
40
+ actions: List of actions taken
41
+ outcome: Final outcome
42
+ timestamp: When episode occurred
43
+ duration: Duration in seconds
44
+ success: Whether episode was successful
45
+ metadata: Additional metadata
46
+ """
47
+ self.id = id
48
+ self.agent_id = agent_id
49
+ self.task = task
50
+ self.actions = actions
51
+ self.outcome = outcome
52
+ self.timestamp = timestamp
53
+ self.duration = duration
54
+ self.success = success
55
+ self.metadata = metadata or {}
56
+
57
+ def to_dict(self) -> Dict[str, Any]:
58
+ """Convert episode to dictionary."""
59
+ return {
60
+ "id": self.id,
61
+ "agent_id": self.agent_id,
62
+ "task": self.task,
63
+ "actions": self.actions,
64
+ "outcome": self.outcome,
65
+ "timestamp": self.timestamp.isoformat(),
66
+ "duration": self.duration,
67
+ "success": self.success,
68
+ "metadata": self.metadata,
69
+ }
70
+
71
+ @classmethod
72
+ def from_dict(cls, data: Dict[str, Any]) -> "Episode":
73
+ """Create episode from dictionary."""
74
+ return cls(
75
+ id=data["id"],
76
+ agent_id=data["agent_id"],
77
+ task=data["task"],
78
+ actions=data["actions"],
79
+ outcome=data["outcome"],
80
+ timestamp=datetime.fromisoformat(data["timestamp"]),
81
+ duration=data["duration"],
82
+ success=data["success"],
83
+ metadata=data.get("metadata", {}),
84
+ )
85
+
86
+
87
+ class EpisodicMemory:
88
+ """Episodic memory for storing and retrieving agent experiences.
89
+
90
+ Stores complete episodes of agent behavior including:
91
+ - Tasks attempted
92
+ - Actions taken
93
+ - Outcomes achieved
94
+ - Success/failure patterns
95
+ """
96
+
97
+ def __init__(
98
+ self,
99
+ graph_db: Optional[Any] = None,
100
+ max_episodes: int = 1000,
101
+ persistence: Optional[MemoryPersistenceConfig] = None,
102
+ ) -> None:
103
+ """Initialize episodic memory.
104
+
105
+ Args:
106
+ graph_db: Graph database client (Neo4j, etc.)
107
+ max_episodes: Maximum number of episodes to store
108
+ """
109
+ self._graph_db = graph_db
110
+ self._max_episodes = max_episodes
111
+ self._use_graph = graph_db is not None
112
+ self._persistence = persistence
113
+ if persistence:
114
+ self._store = create_memory_store(persistence)
115
+ else:
116
+ self._store = None
117
+
118
+ # Fallback to in-memory storage
119
+ self._episodes: Dict[str, Episode] = {}
120
+
121
+ if self._use_graph:
122
+ logger.info("Initialized episodic memory with graph database")
123
+ else:
124
+ logger.warning(
125
+ "Graph database not provided. Using in-memory storage. "
126
+ "Episodes will not persist across restarts."
127
+ )
128
+
129
+ if self._store and self._persistence and self._persistence.enabled:
130
+ self._load_from_disk()
131
+
132
+ async def store_episode(
133
+ self,
134
+ agent_id: str,
135
+ task: str,
136
+ actions: List[Dict[str, Any]],
137
+ outcome: Dict[str, Any],
138
+ duration: float,
139
+ success: bool,
140
+ metadata: Optional[Dict[str, Any]] = None,
141
+ ) -> Episode:
142
+ """Store a new episode.
143
+
144
+ Args:
145
+ agent_id: ID of the agent
146
+ task: Task description
147
+ actions: List of actions taken
148
+ outcome: Final outcome
149
+ duration: Duration in seconds
150
+ success: Whether episode was successful
151
+ metadata: Additional metadata
152
+
153
+ Returns:
154
+ Created episode
155
+ """
156
+ episode = Episode(
157
+ id=str(uuid.uuid4()),
158
+ agent_id=agent_id,
159
+ task=task,
160
+ actions=actions,
161
+ outcome=outcome,
162
+ timestamp=datetime.now(),
163
+ duration=duration,
164
+ success=success,
165
+ metadata=metadata,
166
+ )
167
+
168
+ if self._use_graph:
169
+ await self._store_in_graph(episode)
170
+ else:
171
+ # In-memory storage
172
+ self._episodes[episode.id] = episode
173
+
174
+ # Enforce max episodes limit
175
+ if len(self._episodes) > self._max_episodes:
176
+ # Remove oldest episode
177
+ oldest_id = min(
178
+ self._episodes.keys(),
179
+ key=lambda k: self._episodes[k].timestamp
180
+ )
181
+ del self._episodes[oldest_id]
182
+
183
+ self._persist()
184
+
185
+ logger.debug(f"Stored episode {episode.id} for agent {agent_id}")
186
+ return episode
187
+
188
+ async def retrieve_episode(self, episode_id: str) -> Optional[Episode]:
189
+ """Retrieve an episode by ID.
190
+
191
+ Args:
192
+ episode_id: Episode ID
193
+
194
+ Returns:
195
+ Episode if found, None otherwise
196
+ """
197
+ if self._use_graph:
198
+ return await self._retrieve_from_graph(episode_id)
199
+
200
+ return self._episodes.get(episode_id)
201
+
202
+ async def retrieve_by_agent(
203
+ self,
204
+ agent_id: str,
205
+ limit: int = 10,
206
+ success_only: bool = False,
207
+ ) -> List[Episode]:
208
+ """Retrieve episodes for a specific agent.
209
+
210
+ Args:
211
+ agent_id: Agent ID
212
+ limit: Maximum number of episodes
213
+ success_only: Only return successful episodes
214
+
215
+ Returns:
216
+ List of episodes
217
+ """
218
+ if self._use_graph:
219
+ return await self._retrieve_by_agent_from_graph(
220
+ agent_id, limit, success_only
221
+ )
222
+
223
+ # In-memory retrieval
224
+ episodes = [
225
+ ep for ep in self._episodes.values()
226
+ if ep.agent_id == agent_id
227
+ ]
228
+
229
+ if success_only:
230
+ episodes = [ep for ep in episodes if ep.success]
231
+
232
+ # Sort by timestamp (most recent first)
233
+ episodes.sort(key=lambda ep: ep.timestamp, reverse=True)
234
+
235
+ return episodes[:limit]
236
+
237
+ async def retrieve_similar_tasks(
238
+ self,
239
+ task: str,
240
+ limit: int = 5,
241
+ ) -> List[Episode]:
242
+ """Retrieve episodes with similar tasks.
243
+
244
+ Args:
245
+ task: Task description
246
+ limit: Maximum number of episodes
247
+
248
+ Returns:
249
+ List of similar episodes
250
+ """
251
+ if self._use_graph:
252
+ return await self._retrieve_similar_from_graph(task, limit)
253
+
254
+ # Simple in-memory similarity (keyword matching)
255
+ task_lower = task.lower()
256
+ episodes = []
257
+
258
+ for episode in self._episodes.values():
259
+ if any(word in episode.task.lower() for word in task_lower.split()):
260
+ episodes.append(episode)
261
+
262
+ # Sort by success and recency
263
+ episodes.sort(
264
+ key=lambda ep: (ep.success, ep.timestamp),
265
+ reverse=True
266
+ )
267
+
268
+ return episodes[:limit]
269
+
270
+ async def get_success_rate(
271
+ self,
272
+ agent_id: Optional[str] = None,
273
+ task_pattern: Optional[str] = None,
274
+ ) -> float:
275
+ """Calculate success rate for episodes.
276
+
277
+ Args:
278
+ agent_id: Filter by agent ID (optional)
279
+ task_pattern: Filter by task pattern (optional)
280
+
281
+ Returns:
282
+ Success rate (0.0 to 1.0)
283
+ """
284
+ episodes = list(self._episodes.values())
285
+
286
+ # Apply filters
287
+ if agent_id:
288
+ episodes = [ep for ep in episodes if ep.agent_id == agent_id]
289
+
290
+ if task_pattern:
291
+ pattern_lower = task_pattern.lower()
292
+ episodes = [
293
+ ep for ep in episodes
294
+ if pattern_lower in ep.task.lower()
295
+ ]
296
+
297
+ if not episodes:
298
+ return 0.0
299
+
300
+ successful = sum(1 for ep in episodes if ep.success)
301
+ return successful / len(episodes)
302
+
303
+ async def get_patterns(
304
+ self,
305
+ agent_id: Optional[str] = None,
306
+ min_occurrences: int = 3,
307
+ ) -> List[Dict[str, Any]]:
308
+ """Extract patterns from episodes.
309
+
310
+ Args:
311
+ agent_id: Filter by agent ID (optional)
312
+ min_occurrences: Minimum occurrences to be considered a pattern
313
+
314
+ Returns:
315
+ List of patterns with statistics
316
+ """
317
+ episodes = list(self._episodes.values())
318
+
319
+ if agent_id:
320
+ episodes = [ep for ep in episodes if ep.agent_id == agent_id]
321
+
322
+ # Group by task
323
+ task_groups: Dict[str, List[Episode]] = {}
324
+ for episode in episodes:
325
+ task_key = episode.task.lower()
326
+ if task_key not in task_groups:
327
+ task_groups[task_key] = []
328
+ task_groups[task_key].append(episode)
329
+
330
+ # Extract patterns
331
+ patterns = []
332
+ for task, task_episodes in task_groups.items():
333
+ if len(task_episodes) >= min_occurrences:
334
+ successful = sum(1 for ep in task_episodes if ep.success)
335
+ patterns.append({
336
+ "task": task,
337
+ "occurrences": len(task_episodes),
338
+ "success_rate": successful / len(task_episodes),
339
+ "avg_duration": sum(ep.duration for ep in task_episodes) / len(task_episodes),
340
+ "last_seen": max(ep.timestamp for ep in task_episodes).isoformat(),
341
+ })
342
+
343
+ # Sort by occurrences
344
+ patterns.sort(key=lambda p: p["occurrences"], reverse=True)
345
+
346
+ return patterns
347
+
348
+ async def clear(self, agent_id: Optional[str] = None) -> None:
349
+ """Clear episodes.
350
+
351
+ Args:
352
+ agent_id: Clear only episodes for this agent (optional)
353
+ """
354
+ if agent_id:
355
+ # Clear specific agent's episodes
356
+ self._episodes = {
357
+ k: v for k, v in self._episodes.items()
358
+ if v.agent_id != agent_id
359
+ }
360
+ logger.info(f"Cleared episodes for agent {agent_id}")
361
+ else:
362
+ # Clear all episodes
363
+ self._episodes.clear()
364
+ logger.info("Cleared all episodes")
365
+
366
+ self._persist()
367
+
368
+ async def get_stats(self) -> Dict[str, Any]:
369
+ """Get episodic memory statistics.
370
+
371
+ Returns:
372
+ Statistics dictionary
373
+ """
374
+ if not self._episodes:
375
+ return {
376
+ "total_episodes": 0,
377
+ "backend": "graph" if self._use_graph else "in-memory",
378
+ "persistence": bool(self._persistence and self._persistence.enabled),
379
+ }
380
+
381
+ episodes = list(self._episodes.values())
382
+ successful = sum(1 for ep in episodes if ep.success)
383
+
384
+ return {
385
+ "total_episodes": len(episodes),
386
+ "successful_episodes": successful,
387
+ "failed_episodes": len(episodes) - successful,
388
+ "success_rate": successful / len(episodes),
389
+ "avg_duration": sum(ep.duration for ep in episodes) / len(episodes),
390
+ "unique_agents": len(set(ep.agent_id for ep in episodes)),
391
+ "oldest_episode": min(ep.timestamp for ep in episodes).isoformat(),
392
+ "newest_episode": max(ep.timestamp for ep in episodes).isoformat(),
393
+ "backend": "graph" if self._use_graph else "in-memory",
394
+ "persistence": bool(self._persistence and self._persistence.enabled),
395
+ }
396
+
397
+ def _persist(self) -> None:
398
+ if not self._store:
399
+ return
400
+ self._store.save_list("episodic_memory.json", [ep.to_dict() for ep in self._episodes.values()])
401
+
402
+ def _load_from_disk(self) -> None:
403
+ if not self._store:
404
+ return
405
+ data = self._store.load_list("episodic_memory.json")
406
+ if not data:
407
+ return
408
+ self._episodes = {item["id"]: Episode.from_dict(item) for item in data}
409
+
410
+ async def _store_in_graph(self, episode: Episode) -> None:
411
+ """Store episode in graph database (placeholder)."""
412
+ # TODO: Implement Neo4j storage
413
+ logger.warning("Graph database storage not yet implemented")
414
+ # Fallback to in-memory
415
+ self._episodes[episode.id] = episode
416
+
417
+ async def _retrieve_from_graph(self, episode_id: str) -> Optional[Episode]:
418
+ """Retrieve episode from graph database (placeholder)."""
419
+ # TODO: Implement Neo4j retrieval
420
+ return self._episodes.get(episode_id)
421
+
422
+ async def _retrieve_by_agent_from_graph(
423
+ self,
424
+ agent_id: str,
425
+ limit: int,
426
+ success_only: bool,
427
+ ) -> List[Episode]:
428
+ """Retrieve episodes from graph database (placeholder)."""
429
+ # TODO: Implement Neo4j query
430
+ return await self.retrieve_by_agent(agent_id, limit, success_only)
431
+
432
+ async def _retrieve_similar_from_graph(
433
+ self,
434
+ task: str,
435
+ limit: int,
436
+ ) -> List[Episode]:
437
+ """Retrieve similar episodes from graph database (placeholder)."""
438
+ # TODO: Implement Neo4j similarity query
439
+ return await self.retrieve_similar_tasks(task, limit)
440
+
441
+ def __len__(self) -> int:
442
+ """Get number of stored episodes."""
443
+ return len(self._episodes)
444
+
445
+ def __repr__(self) -> str:
446
+ """String representation."""
447
+ backend = "Graph" if self._use_graph else "In-Memory"
448
+ return f"EpisodicMemory(backend={backend}, episodes={len(self._episodes)})"