genxai-framework 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cli/__init__.py +3 -0
- cli/commands/__init__.py +6 -0
- cli/commands/approval.py +85 -0
- cli/commands/audit.py +127 -0
- cli/commands/metrics.py +25 -0
- cli/commands/tool.py +389 -0
- cli/main.py +32 -0
- genxai/__init__.py +81 -0
- genxai/api/__init__.py +5 -0
- genxai/api/app.py +21 -0
- genxai/config/__init__.py +5 -0
- genxai/config/settings.py +37 -0
- genxai/connectors/__init__.py +19 -0
- genxai/connectors/base.py +122 -0
- genxai/connectors/kafka.py +92 -0
- genxai/connectors/postgres_cdc.py +95 -0
- genxai/connectors/registry.py +44 -0
- genxai/connectors/sqs.py +94 -0
- genxai/connectors/webhook.py +73 -0
- genxai/core/__init__.py +37 -0
- genxai/core/agent/__init__.py +32 -0
- genxai/core/agent/base.py +206 -0
- genxai/core/agent/config_io.py +59 -0
- genxai/core/agent/registry.py +98 -0
- genxai/core/agent/runtime.py +970 -0
- genxai/core/communication/__init__.py +6 -0
- genxai/core/communication/collaboration.py +44 -0
- genxai/core/communication/message_bus.py +192 -0
- genxai/core/communication/protocols.py +35 -0
- genxai/core/execution/__init__.py +22 -0
- genxai/core/execution/metadata.py +181 -0
- genxai/core/execution/queue.py +201 -0
- genxai/core/graph/__init__.py +30 -0
- genxai/core/graph/checkpoints.py +77 -0
- genxai/core/graph/edges.py +131 -0
- genxai/core/graph/engine.py +813 -0
- genxai/core/graph/executor.py +516 -0
- genxai/core/graph/nodes.py +161 -0
- genxai/core/graph/trigger_runner.py +40 -0
- genxai/core/memory/__init__.py +19 -0
- genxai/core/memory/base.py +72 -0
- genxai/core/memory/embedding.py +327 -0
- genxai/core/memory/episodic.py +448 -0
- genxai/core/memory/long_term.py +467 -0
- genxai/core/memory/manager.py +543 -0
- genxai/core/memory/persistence.py +297 -0
- genxai/core/memory/procedural.py +461 -0
- genxai/core/memory/semantic.py +526 -0
- genxai/core/memory/shared.py +62 -0
- genxai/core/memory/short_term.py +303 -0
- genxai/core/memory/vector_store.py +508 -0
- genxai/core/memory/working.py +211 -0
- genxai/core/state/__init__.py +6 -0
- genxai/core/state/manager.py +293 -0
- genxai/core/state/schema.py +115 -0
- genxai/llm/__init__.py +14 -0
- genxai/llm/base.py +150 -0
- genxai/llm/factory.py +329 -0
- genxai/llm/providers/__init__.py +1 -0
- genxai/llm/providers/anthropic.py +249 -0
- genxai/llm/providers/cohere.py +274 -0
- genxai/llm/providers/google.py +334 -0
- genxai/llm/providers/ollama.py +147 -0
- genxai/llm/providers/openai.py +257 -0
- genxai/llm/routing.py +83 -0
- genxai/observability/__init__.py +6 -0
- genxai/observability/logging.py +327 -0
- genxai/observability/metrics.py +494 -0
- genxai/observability/tracing.py +372 -0
- genxai/performance/__init__.py +39 -0
- genxai/performance/cache.py +256 -0
- genxai/performance/pooling.py +289 -0
- genxai/security/audit.py +304 -0
- genxai/security/auth.py +315 -0
- genxai/security/cost_control.py +528 -0
- genxai/security/default_policies.py +44 -0
- genxai/security/jwt.py +142 -0
- genxai/security/oauth.py +226 -0
- genxai/security/pii.py +366 -0
- genxai/security/policy_engine.py +82 -0
- genxai/security/rate_limit.py +341 -0
- genxai/security/rbac.py +247 -0
- genxai/security/validation.py +218 -0
- genxai/tools/__init__.py +21 -0
- genxai/tools/base.py +383 -0
- genxai/tools/builtin/__init__.py +131 -0
- genxai/tools/builtin/communication/__init__.py +15 -0
- genxai/tools/builtin/communication/email_sender.py +159 -0
- genxai/tools/builtin/communication/notification_manager.py +167 -0
- genxai/tools/builtin/communication/slack_notifier.py +118 -0
- genxai/tools/builtin/communication/sms_sender.py +118 -0
- genxai/tools/builtin/communication/webhook_caller.py +136 -0
- genxai/tools/builtin/computation/__init__.py +15 -0
- genxai/tools/builtin/computation/calculator.py +101 -0
- genxai/tools/builtin/computation/code_executor.py +183 -0
- genxai/tools/builtin/computation/data_validator.py +259 -0
- genxai/tools/builtin/computation/hash_generator.py +129 -0
- genxai/tools/builtin/computation/regex_matcher.py +201 -0
- genxai/tools/builtin/data/__init__.py +15 -0
- genxai/tools/builtin/data/csv_processor.py +213 -0
- genxai/tools/builtin/data/data_transformer.py +299 -0
- genxai/tools/builtin/data/json_processor.py +233 -0
- genxai/tools/builtin/data/text_analyzer.py +288 -0
- genxai/tools/builtin/data/xml_processor.py +175 -0
- genxai/tools/builtin/database/__init__.py +15 -0
- genxai/tools/builtin/database/database_inspector.py +157 -0
- genxai/tools/builtin/database/mongodb_query.py +196 -0
- genxai/tools/builtin/database/redis_cache.py +167 -0
- genxai/tools/builtin/database/sql_query.py +145 -0
- genxai/tools/builtin/database/vector_search.py +163 -0
- genxai/tools/builtin/file/__init__.py +17 -0
- genxai/tools/builtin/file/directory_scanner.py +214 -0
- genxai/tools/builtin/file/file_compressor.py +237 -0
- genxai/tools/builtin/file/file_reader.py +102 -0
- genxai/tools/builtin/file/file_writer.py +122 -0
- genxai/tools/builtin/file/image_processor.py +186 -0
- genxai/tools/builtin/file/pdf_parser.py +144 -0
- genxai/tools/builtin/test/__init__.py +15 -0
- genxai/tools/builtin/test/async_simulator.py +62 -0
- genxai/tools/builtin/test/data_transformer.py +99 -0
- genxai/tools/builtin/test/error_generator.py +82 -0
- genxai/tools/builtin/test/simple_math.py +94 -0
- genxai/tools/builtin/test/string_processor.py +72 -0
- genxai/tools/builtin/web/__init__.py +15 -0
- genxai/tools/builtin/web/api_caller.py +161 -0
- genxai/tools/builtin/web/html_parser.py +330 -0
- genxai/tools/builtin/web/http_client.py +187 -0
- genxai/tools/builtin/web/url_validator.py +162 -0
- genxai/tools/builtin/web/web_scraper.py +170 -0
- genxai/tools/custom/my_test_tool_2.py +9 -0
- genxai/tools/dynamic.py +105 -0
- genxai/tools/mcp_server.py +167 -0
- genxai/tools/persistence/__init__.py +6 -0
- genxai/tools/persistence/models.py +55 -0
- genxai/tools/persistence/service.py +322 -0
- genxai/tools/registry.py +227 -0
- genxai/tools/security/__init__.py +11 -0
- genxai/tools/security/limits.py +214 -0
- genxai/tools/security/policy.py +20 -0
- genxai/tools/security/sandbox.py +248 -0
- genxai/tools/templates.py +435 -0
- genxai/triggers/__init__.py +19 -0
- genxai/triggers/base.py +104 -0
- genxai/triggers/file_watcher.py +75 -0
- genxai/triggers/queue.py +68 -0
- genxai/triggers/registry.py +82 -0
- genxai/triggers/schedule.py +66 -0
- genxai/triggers/webhook.py +68 -0
- genxai/utils/__init__.py +1 -0
- genxai/utils/tokens.py +295 -0
- genxai_framework-0.1.0.dist-info/METADATA +495 -0
- genxai_framework-0.1.0.dist-info/RECORD +156 -0
- genxai_framework-0.1.0.dist-info/WHEEL +5 -0
- genxai_framework-0.1.0.dist-info/entry_points.txt +2 -0
- genxai_framework-0.1.0.dist-info/licenses/LICENSE +21 -0
- genxai_framework-0.1.0.dist-info/top_level.txt +2 -0
genxai/__init__.py
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
"""
|
|
2
|
+
GenXAI - Advanced Agentic AI Framework
|
|
3
|
+
|
|
4
|
+
A powerful framework for building multi-agent AI systems with graph-based orchestration,
|
|
5
|
+
advanced memory systems, and enterprise-grade features.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
__version__ = "0.1.0"
|
|
9
|
+
__author__ = "GenXAI Team"
|
|
10
|
+
__license__ = "MIT"
|
|
11
|
+
|
|
12
|
+
from genxai.core.agent import (
|
|
13
|
+
Agent,
|
|
14
|
+
AgentConfig,
|
|
15
|
+
AgentFactory,
|
|
16
|
+
AgentRegistry,
|
|
17
|
+
AgentRuntime,
|
|
18
|
+
AgentType,
|
|
19
|
+
)
|
|
20
|
+
from genxai.core.graph import (
|
|
21
|
+
Edge,
|
|
22
|
+
EnhancedGraph,
|
|
23
|
+
Graph,
|
|
24
|
+
Node,
|
|
25
|
+
NodeType,
|
|
26
|
+
TriggerWorkflowRunner,
|
|
27
|
+
WorkflowExecutor,
|
|
28
|
+
execute_workflow_sync,
|
|
29
|
+
)
|
|
30
|
+
from genxai.core.memory.manager import MemorySystem
|
|
31
|
+
from genxai.tools import (
|
|
32
|
+
DynamicTool,
|
|
33
|
+
Tool,
|
|
34
|
+
ToolCategory,
|
|
35
|
+
ToolMetadata,
|
|
36
|
+
ToolParameter,
|
|
37
|
+
ToolRegistry,
|
|
38
|
+
ToolResult,
|
|
39
|
+
)
|
|
40
|
+
from genxai.triggers import (
|
|
41
|
+
BaseTrigger,
|
|
42
|
+
TriggerEvent,
|
|
43
|
+
TriggerRegistry,
|
|
44
|
+
TriggerStatus,
|
|
45
|
+
WebhookTrigger,
|
|
46
|
+
ScheduleTrigger,
|
|
47
|
+
QueueTrigger,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
__all__ = [
|
|
51
|
+
"__version__",
|
|
52
|
+
"Agent",
|
|
53
|
+
"AgentConfig",
|
|
54
|
+
"AgentFactory",
|
|
55
|
+
"AgentRegistry",
|
|
56
|
+
"AgentRuntime",
|
|
57
|
+
"AgentType",
|
|
58
|
+
"Graph",
|
|
59
|
+
"EnhancedGraph",
|
|
60
|
+
"WorkflowExecutor",
|
|
61
|
+
"execute_workflow_sync",
|
|
62
|
+
"TriggerWorkflowRunner",
|
|
63
|
+
"Node",
|
|
64
|
+
"NodeType",
|
|
65
|
+
"Edge",
|
|
66
|
+
"Tool",
|
|
67
|
+
"ToolCategory",
|
|
68
|
+
"ToolMetadata",
|
|
69
|
+
"ToolParameter",
|
|
70
|
+
"ToolRegistry",
|
|
71
|
+
"ToolResult",
|
|
72
|
+
"DynamicTool",
|
|
73
|
+
"MemorySystem",
|
|
74
|
+
"BaseTrigger",
|
|
75
|
+
"TriggerEvent",
|
|
76
|
+
"TriggerRegistry",
|
|
77
|
+
"TriggerStatus",
|
|
78
|
+
"WebhookTrigger",
|
|
79
|
+
"ScheduleTrigger",
|
|
80
|
+
"QueueTrigger",
|
|
81
|
+
]
|
genxai/api/__init__.py
ADDED
genxai/api/app.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"""Minimal FastAPI app for GenXAI observability endpoints."""
|
|
2
|
+
|
|
3
|
+
from fastapi import FastAPI, Response
|
|
4
|
+
|
|
5
|
+
from genxai.observability.metrics import get_prometheus_metrics
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def create_app() -> FastAPI:
|
|
9
|
+
"""Create a FastAPI app exposing observability endpoints.
|
|
10
|
+
|
|
11
|
+
Returns:
|
|
12
|
+
Configured FastAPI application
|
|
13
|
+
"""
|
|
14
|
+
app = FastAPI(title="GenXAI Observability API", version="0.1.0")
|
|
15
|
+
|
|
16
|
+
@app.get("/metrics", response_class=Response)
|
|
17
|
+
async def metrics() -> Response:
|
|
18
|
+
"""Expose Prometheus metrics."""
|
|
19
|
+
return Response(content=get_prometheus_metrics(), media_type="text/plain")
|
|
20
|
+
|
|
21
|
+
return app
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"""Central configuration for GenXAI runtime."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import List, Set
|
|
6
|
+
|
|
7
|
+
from pydantic import Field
|
|
8
|
+
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class GenXAISettings(BaseSettings):
|
|
12
|
+
"""Global settings for GenXAI.
|
|
13
|
+
|
|
14
|
+
Environment variables use the GENXAI_ prefix.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
model_config = SettingsConfigDict(env_prefix="GENXAI_", case_sensitive=False)
|
|
18
|
+
|
|
19
|
+
tool_allowlist: List[str] = Field(default_factory=list)
|
|
20
|
+
tool_denylist: List[str] = Field(default_factory=list)
|
|
21
|
+
|
|
22
|
+
def allowlist_set(self) -> Set[str]:
|
|
23
|
+
return {name.strip() for name in self.tool_allowlist if name.strip()}
|
|
24
|
+
|
|
25
|
+
def denylist_set(self) -> Set[str]:
|
|
26
|
+
return {name.strip() for name in self.tool_denylist if name.strip()}
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
_settings: GenXAISettings | None = None
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def get_settings() -> GenXAISettings:
|
|
33
|
+
"""Get cached settings instance."""
|
|
34
|
+
global _settings
|
|
35
|
+
if _settings is None:
|
|
36
|
+
_settings = GenXAISettings()
|
|
37
|
+
return _settings
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"""Connector SDK for GenXAI integrations."""
|
|
2
|
+
|
|
3
|
+
from genxai.connectors.base import Connector, ConnectorEvent, ConnectorStatus
|
|
4
|
+
from genxai.connectors.registry import ConnectorRegistry
|
|
5
|
+
from genxai.connectors.webhook import WebhookConnector
|
|
6
|
+
from genxai.connectors.kafka import KafkaConnector
|
|
7
|
+
from genxai.connectors.sqs import SQSConnector
|
|
8
|
+
from genxai.connectors.postgres_cdc import PostgresCDCConnector
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
"Connector",
|
|
12
|
+
"ConnectorEvent",
|
|
13
|
+
"ConnectorStatus",
|
|
14
|
+
"ConnectorRegistry",
|
|
15
|
+
"WebhookConnector",
|
|
16
|
+
"KafkaConnector",
|
|
17
|
+
"SQSConnector",
|
|
18
|
+
"PostgresCDCConnector",
|
|
19
|
+
]
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
"""Base connector abstractions for GenXAI."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from abc import ABC, abstractmethod
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from enum import Enum
|
|
9
|
+
from typing import Any, Awaitable, Callable, Dict, Optional
|
|
10
|
+
import asyncio
|
|
11
|
+
import logging
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ConnectorStatus(str, Enum):
|
|
17
|
+
"""Lifecycle status for connectors."""
|
|
18
|
+
|
|
19
|
+
STOPPED = "stopped"
|
|
20
|
+
STARTING = "starting"
|
|
21
|
+
RUNNING = "running"
|
|
22
|
+
STOPPING = "stopping"
|
|
23
|
+
ERROR = "error"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@dataclass
|
|
27
|
+
class ConnectorEvent:
|
|
28
|
+
"""Event emitted by connectors."""
|
|
29
|
+
|
|
30
|
+
connector_id: str
|
|
31
|
+
payload: Dict[str, Any]
|
|
32
|
+
timestamp: datetime = field(default_factory=datetime.utcnow)
|
|
33
|
+
metadata: Dict[str, Any] = field(default_factory=dict)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class Connector(ABC):
|
|
37
|
+
"""Abstract base class for connector integrations."""
|
|
38
|
+
|
|
39
|
+
def __init__(self, connector_id: str, name: Optional[str] = None) -> None:
|
|
40
|
+
self.connector_id = connector_id
|
|
41
|
+
self.name = name or connector_id
|
|
42
|
+
self.status: ConnectorStatus = ConnectorStatus.STOPPED
|
|
43
|
+
self._callbacks: list[Callable[[ConnectorEvent], Awaitable[None]]] = []
|
|
44
|
+
self._lock = asyncio.Lock()
|
|
45
|
+
self._last_error: Optional[str] = None
|
|
46
|
+
self._last_healthcheck: Optional[str] = None
|
|
47
|
+
|
|
48
|
+
def on_event(self, callback: Callable[[ConnectorEvent], Awaitable[None]]) -> None:
|
|
49
|
+
"""Register a callback to receive connector events."""
|
|
50
|
+
self._callbacks.append(callback)
|
|
51
|
+
|
|
52
|
+
async def emit(self, payload: Dict[str, Any], metadata: Optional[Dict[str, Any]] = None) -> None:
|
|
53
|
+
"""Emit connector event to subscribers."""
|
|
54
|
+
event = ConnectorEvent(
|
|
55
|
+
connector_id=self.connector_id,
|
|
56
|
+
payload=payload,
|
|
57
|
+
metadata=metadata or {},
|
|
58
|
+
)
|
|
59
|
+
if not self._callbacks:
|
|
60
|
+
logger.warning("Connector %s emitted event with no subscribers", self.connector_id)
|
|
61
|
+
return
|
|
62
|
+
|
|
63
|
+
await asyncio.gather(*[callback(event) for callback in self._callbacks])
|
|
64
|
+
|
|
65
|
+
async def start(self) -> None:
|
|
66
|
+
"""Start the connector."""
|
|
67
|
+
async with self._lock:
|
|
68
|
+
if self.status in {ConnectorStatus.RUNNING, ConnectorStatus.STARTING}:
|
|
69
|
+
return
|
|
70
|
+
self.status = ConnectorStatus.STARTING
|
|
71
|
+
try:
|
|
72
|
+
await self.validate_config()
|
|
73
|
+
await self._start()
|
|
74
|
+
self.status = ConnectorStatus.RUNNING
|
|
75
|
+
self._last_error = None
|
|
76
|
+
logger.info("Connector started: %s", self.connector_id)
|
|
77
|
+
except Exception as exc:
|
|
78
|
+
self.status = ConnectorStatus.ERROR
|
|
79
|
+
self._last_error = str(exc)
|
|
80
|
+
logger.error("Failed to start connector %s: %s", self.connector_id, exc)
|
|
81
|
+
raise
|
|
82
|
+
|
|
83
|
+
async def stop(self) -> None:
|
|
84
|
+
"""Stop the connector."""
|
|
85
|
+
async with self._lock:
|
|
86
|
+
if self.status in {ConnectorStatus.STOPPED, ConnectorStatus.STOPPING}:
|
|
87
|
+
return
|
|
88
|
+
self.status = ConnectorStatus.STOPPING
|
|
89
|
+
try:
|
|
90
|
+
await self._stop()
|
|
91
|
+
self.status = ConnectorStatus.STOPPED
|
|
92
|
+
self._last_error = None
|
|
93
|
+
logger.info("Connector stopped: %s", self.connector_id)
|
|
94
|
+
except Exception as exc:
|
|
95
|
+
self.status = ConnectorStatus.ERROR
|
|
96
|
+
self._last_error = str(exc)
|
|
97
|
+
logger.error("Failed to stop connector %s: %s", self.connector_id, exc)
|
|
98
|
+
raise
|
|
99
|
+
|
|
100
|
+
async def health_check(self) -> Dict[str, Any]:
|
|
101
|
+
"""Return a health payload for the connector."""
|
|
102
|
+
status = "ok" if self.status == ConnectorStatus.RUNNING else "not_running"
|
|
103
|
+
payload = {
|
|
104
|
+
"connector_id": self.connector_id,
|
|
105
|
+
"status": status,
|
|
106
|
+
"lifecycle": self.status.value,
|
|
107
|
+
"last_error": self._last_error,
|
|
108
|
+
}
|
|
109
|
+
self._last_healthcheck = datetime.utcnow().isoformat()
|
|
110
|
+
return payload
|
|
111
|
+
|
|
112
|
+
async def validate_config(self) -> None:
|
|
113
|
+
"""Validate connector configuration before start."""
|
|
114
|
+
return None
|
|
115
|
+
|
|
116
|
+
@abstractmethod
|
|
117
|
+
async def _start(self) -> None:
|
|
118
|
+
raise NotImplementedError
|
|
119
|
+
|
|
120
|
+
@abstractmethod
|
|
121
|
+
async def _stop(self) -> None:
|
|
122
|
+
raise NotImplementedError
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
"""Kafka connector implementation."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, Optional, Callable
|
|
6
|
+
import asyncio
|
|
7
|
+
import json
|
|
8
|
+
import logging
|
|
9
|
+
|
|
10
|
+
from genxai.connectors.base import Connector
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class KafkaConnector(Connector):
|
|
16
|
+
"""Kafka connector using aiokafka."""
|
|
17
|
+
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
connector_id: str,
|
|
21
|
+
topic: str,
|
|
22
|
+
bootstrap_servers: str,
|
|
23
|
+
group_id: Optional[str] = None,
|
|
24
|
+
name: Optional[str] = None,
|
|
25
|
+
value_deserializer: Optional[Callable[[bytes], Any]] = None,
|
|
26
|
+
poll_interval: float = 0.1,
|
|
27
|
+
) -> None:
|
|
28
|
+
super().__init__(connector_id=connector_id, name=name)
|
|
29
|
+
self.topic = topic
|
|
30
|
+
self.bootstrap_servers = bootstrap_servers
|
|
31
|
+
self.group_id = group_id
|
|
32
|
+
self.value_deserializer = value_deserializer or self._default_deserializer
|
|
33
|
+
self.poll_interval = poll_interval
|
|
34
|
+
self._consumer: Any = None
|
|
35
|
+
self._task: Optional[asyncio.Task[None]] = None
|
|
36
|
+
|
|
37
|
+
async def _start(self) -> None:
|
|
38
|
+
from aiokafka import AIOKafkaConsumer
|
|
39
|
+
|
|
40
|
+
self._consumer = AIOKafkaConsumer(
|
|
41
|
+
self.topic,
|
|
42
|
+
bootstrap_servers=self.bootstrap_servers,
|
|
43
|
+
group_id=self.group_id,
|
|
44
|
+
enable_auto_commit=True,
|
|
45
|
+
value_deserializer=self.value_deserializer,
|
|
46
|
+
)
|
|
47
|
+
await self._consumer.start()
|
|
48
|
+
self._task = asyncio.create_task(self._consume_loop())
|
|
49
|
+
|
|
50
|
+
async def _stop(self) -> None:
|
|
51
|
+
if self._task:
|
|
52
|
+
self._task.cancel()
|
|
53
|
+
await asyncio.gather(self._task, return_exceptions=True)
|
|
54
|
+
self._task = None
|
|
55
|
+
if self._consumer:
|
|
56
|
+
await self._consumer.stop()
|
|
57
|
+
self._consumer = None
|
|
58
|
+
|
|
59
|
+
async def validate_config(self) -> None:
|
|
60
|
+
if not self.topic:
|
|
61
|
+
raise ValueError("Kafka topic must be provided")
|
|
62
|
+
if not self.bootstrap_servers:
|
|
63
|
+
raise ValueError("Kafka bootstrap_servers must be provided")
|
|
64
|
+
|
|
65
|
+
async def _consume_loop(self) -> None:
|
|
66
|
+
while True:
|
|
67
|
+
try:
|
|
68
|
+
msg = await self._consumer.getone()
|
|
69
|
+
payload = msg.value
|
|
70
|
+
await self.emit(
|
|
71
|
+
payload=payload,
|
|
72
|
+
metadata={
|
|
73
|
+
"topic": msg.topic,
|
|
74
|
+
"partition": msg.partition,
|
|
75
|
+
"offset": msg.offset,
|
|
76
|
+
"timestamp": msg.timestamp,
|
|
77
|
+
},
|
|
78
|
+
)
|
|
79
|
+
except asyncio.CancelledError:
|
|
80
|
+
break
|
|
81
|
+
except Exception as exc:
|
|
82
|
+
logger.error("Kafka consumer error: %s", exc)
|
|
83
|
+
await asyncio.sleep(self.poll_interval)
|
|
84
|
+
|
|
85
|
+
def _default_deserializer(self, raw: bytes) -> Any:
|
|
86
|
+
try:
|
|
87
|
+
return json.loads(raw.decode("utf-8"))
|
|
88
|
+
except Exception:
|
|
89
|
+
return raw
|
|
90
|
+
|
|
91
|
+
async def handle_message(self, payload: Dict[str, Any]) -> None:
|
|
92
|
+
await self.emit(payload=payload)
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
"""Postgres CDC connector implementation (wal2json)."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, Optional
|
|
6
|
+
import asyncio
|
|
7
|
+
import json
|
|
8
|
+
import logging
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
from genxai.connectors.base import Connector
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class PostgresCDCConnector(Connector):
|
|
17
|
+
"""Postgres CDC connector using wal2json output plugin."""
|
|
18
|
+
|
|
19
|
+
def __init__(
|
|
20
|
+
self,
|
|
21
|
+
connector_id: str,
|
|
22
|
+
dsn: str,
|
|
23
|
+
slot_name: str,
|
|
24
|
+
publication: str,
|
|
25
|
+
name: Optional[str] = None,
|
|
26
|
+
poll_interval: float = 1.0,
|
|
27
|
+
) -> None:
|
|
28
|
+
super().__init__(connector_id=connector_id, name=name)
|
|
29
|
+
self.dsn = dsn
|
|
30
|
+
self.slot_name = slot_name
|
|
31
|
+
self.publication = publication
|
|
32
|
+
self.poll_interval = poll_interval
|
|
33
|
+
self._conn: Optional[Any] = None
|
|
34
|
+
self._task: Optional[asyncio.Task[None]] = None
|
|
35
|
+
|
|
36
|
+
async def _start(self) -> None:
|
|
37
|
+
import asyncpg
|
|
38
|
+
|
|
39
|
+
self._conn = await asyncpg.connect(self.dsn)
|
|
40
|
+
await self._ensure_slot()
|
|
41
|
+
self._task = asyncio.create_task(self._consume_loop())
|
|
42
|
+
|
|
43
|
+
async def _stop(self) -> None:
|
|
44
|
+
if self._task:
|
|
45
|
+
self._task.cancel()
|
|
46
|
+
await asyncio.gather(self._task, return_exceptions=True)
|
|
47
|
+
self._task = None
|
|
48
|
+
if self._conn:
|
|
49
|
+
await self._conn.close()
|
|
50
|
+
self._conn = None
|
|
51
|
+
|
|
52
|
+
async def validate_config(self) -> None:
|
|
53
|
+
if not self.dsn:
|
|
54
|
+
raise ValueError("Postgres CDC dsn must be provided")
|
|
55
|
+
if not self.slot_name:
|
|
56
|
+
raise ValueError("Postgres CDC slot_name must be provided")
|
|
57
|
+
if not self.publication:
|
|
58
|
+
raise ValueError("Postgres CDC publication must be provided")
|
|
59
|
+
|
|
60
|
+
async def _ensure_slot(self) -> None:
|
|
61
|
+
assert self._conn is not None
|
|
62
|
+
await self._conn.execute(
|
|
63
|
+
"SELECT * FROM pg_create_logical_replication_slot($1, 'wal2json') "
|
|
64
|
+
"ON CONFLICT DO NOTHING;",
|
|
65
|
+
self.slot_name,
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
async def _consume_loop(self) -> None:
|
|
69
|
+
while True:
|
|
70
|
+
try:
|
|
71
|
+
assert self._conn is not None
|
|
72
|
+
rows = await self._conn.fetch(
|
|
73
|
+
"SELECT data FROM pg_logical_slot_get_changes($1, NULL, NULL, 'pretty-print', '1')",
|
|
74
|
+
self.slot_name,
|
|
75
|
+
)
|
|
76
|
+
for row in rows:
|
|
77
|
+
payload = self._deserialize(row["data"])
|
|
78
|
+
await self.emit(payload=payload)
|
|
79
|
+
await asyncio.sleep(self.poll_interval)
|
|
80
|
+
except asyncio.CancelledError:
|
|
81
|
+
break
|
|
82
|
+
except Exception as exc:
|
|
83
|
+
logger.error("Postgres CDC error: %s", exc)
|
|
84
|
+
await asyncio.sleep(self.poll_interval)
|
|
85
|
+
|
|
86
|
+
def _deserialize(self, raw: Optional[str]) -> Any:
|
|
87
|
+
if raw is None:
|
|
88
|
+
return None
|
|
89
|
+
try:
|
|
90
|
+
return json.loads(raw)
|
|
91
|
+
except Exception:
|
|
92
|
+
return raw
|
|
93
|
+
|
|
94
|
+
async def handle_change(self, payload: Dict[str, Any]) -> None:
|
|
95
|
+
await self.emit(payload=payload)
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
"""Connector registry for GenXAI integrations."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Dict, List, Optional
|
|
6
|
+
import logging
|
|
7
|
+
|
|
8
|
+
from genxai.connectors.base import Connector
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ConnectorRegistry:
|
|
14
|
+
"""Registry for connectors."""
|
|
15
|
+
|
|
16
|
+
_connectors: Dict[str, Connector] = {}
|
|
17
|
+
|
|
18
|
+
@classmethod
|
|
19
|
+
def register(cls, connector: Connector) -> None:
|
|
20
|
+
if connector.connector_id in cls._connectors:
|
|
21
|
+
logger.warning("Connector %s already registered", connector.connector_id)
|
|
22
|
+
cls._connectors[connector.connector_id] = connector
|
|
23
|
+
|
|
24
|
+
@classmethod
|
|
25
|
+
def unregister(cls, connector_id: str) -> None:
|
|
26
|
+
cls._connectors.pop(connector_id, None)
|
|
27
|
+
|
|
28
|
+
@classmethod
|
|
29
|
+
def get(cls, connector_id: str) -> Optional[Connector]:
|
|
30
|
+
return cls._connectors.get(connector_id)
|
|
31
|
+
|
|
32
|
+
@classmethod
|
|
33
|
+
def list_all(cls) -> List[Connector]:
|
|
34
|
+
return list(cls._connectors.values())
|
|
35
|
+
|
|
36
|
+
@classmethod
|
|
37
|
+
async def start_all(cls) -> None:
|
|
38
|
+
for connector in cls._connectors.values():
|
|
39
|
+
await connector.start()
|
|
40
|
+
|
|
41
|
+
@classmethod
|
|
42
|
+
async def stop_all(cls) -> None:
|
|
43
|
+
for connector in cls._connectors.values():
|
|
44
|
+
await connector.stop()
|
genxai/connectors/sqs.py
ADDED
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
"""AWS SQS connector implementation."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, Optional
|
|
6
|
+
import asyncio
|
|
7
|
+
import json
|
|
8
|
+
import logging
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
from genxai.connectors.base import Connector
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class SQSConnector(Connector):
|
|
17
|
+
"""SQS connector using aioboto3."""
|
|
18
|
+
|
|
19
|
+
def __init__(
|
|
20
|
+
self,
|
|
21
|
+
connector_id: str,
|
|
22
|
+
queue_url: str,
|
|
23
|
+
region: Optional[str] = None,
|
|
24
|
+
name: Optional[str] = None,
|
|
25
|
+
poll_interval: float = 1.0,
|
|
26
|
+
wait_time_seconds: int = 10,
|
|
27
|
+
max_messages: int = 10,
|
|
28
|
+
) -> None:
|
|
29
|
+
super().__init__(connector_id=connector_id, name=name)
|
|
30
|
+
self.queue_url = queue_url
|
|
31
|
+
self.region = region
|
|
32
|
+
self.poll_interval = poll_interval
|
|
33
|
+
self.wait_time_seconds = wait_time_seconds
|
|
34
|
+
self.max_messages = max_messages
|
|
35
|
+
self._task: Optional[asyncio.Task[None]] = None
|
|
36
|
+
self._session = None
|
|
37
|
+
|
|
38
|
+
async def _start(self) -> None:
|
|
39
|
+
self._task = asyncio.create_task(self._poll_loop())
|
|
40
|
+
|
|
41
|
+
async def _stop(self) -> None:
|
|
42
|
+
if self._task:
|
|
43
|
+
self._task.cancel()
|
|
44
|
+
await asyncio.gather(self._task, return_exceptions=True)
|
|
45
|
+
self._task = None
|
|
46
|
+
|
|
47
|
+
async def validate_config(self) -> None:
|
|
48
|
+
if not self.queue_url:
|
|
49
|
+
raise ValueError("SQS queue_url must be provided")
|
|
50
|
+
|
|
51
|
+
async def _poll_loop(self) -> None:
|
|
52
|
+
while True:
|
|
53
|
+
try:
|
|
54
|
+
if self._session is None:
|
|
55
|
+
import aioboto3
|
|
56
|
+
|
|
57
|
+
self._session = aioboto3.Session()
|
|
58
|
+
async with self._session.client("sqs", region_name=self.region) as client:
|
|
59
|
+
response = await client.receive_message(
|
|
60
|
+
QueueUrl=self.queue_url,
|
|
61
|
+
MaxNumberOfMessages=self.max_messages,
|
|
62
|
+
WaitTimeSeconds=self.wait_time_seconds,
|
|
63
|
+
)
|
|
64
|
+
messages = response.get("Messages", [])
|
|
65
|
+
for message in messages:
|
|
66
|
+
body = message.get("Body")
|
|
67
|
+
payload = self._deserialize(body)
|
|
68
|
+
await self.emit(
|
|
69
|
+
payload=payload,
|
|
70
|
+
metadata={
|
|
71
|
+
"message_id": message.get("MessageId"),
|
|
72
|
+
"receipt_handle": message.get("ReceiptHandle"),
|
|
73
|
+
},
|
|
74
|
+
)
|
|
75
|
+
await client.delete_message(
|
|
76
|
+
QueueUrl=self.queue_url,
|
|
77
|
+
ReceiptHandle=message.get("ReceiptHandle"),
|
|
78
|
+
)
|
|
79
|
+
except asyncio.CancelledError:
|
|
80
|
+
break
|
|
81
|
+
except Exception as exc:
|
|
82
|
+
logger.error("SQS poll error: %s", exc)
|
|
83
|
+
await asyncio.sleep(self.poll_interval)
|
|
84
|
+
|
|
85
|
+
def _deserialize(self, body: Optional[str]) -> Any:
|
|
86
|
+
if body is None:
|
|
87
|
+
return None
|
|
88
|
+
try:
|
|
89
|
+
return json.loads(body)
|
|
90
|
+
except Exception:
|
|
91
|
+
return body
|
|
92
|
+
|
|
93
|
+
async def handle_message(self, payload: Dict[str, Any]) -> None:
|
|
94
|
+
await self.emit(payload=payload)
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
"""Webhook connector implementation."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, Optional
|
|
6
|
+
import hmac
|
|
7
|
+
import hashlib
|
|
8
|
+
import logging
|
|
9
|
+
|
|
10
|
+
from genxai.connectors.base import Connector
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class WebhookConnector(Connector):
|
|
16
|
+
"""HTTP webhook connector.
|
|
17
|
+
|
|
18
|
+
Provides a handler that can be mounted in FastAPI or other ASGI frameworks.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
connector_id: str,
|
|
24
|
+
secret: Optional[str] = None,
|
|
25
|
+
name: Optional[str] = None,
|
|
26
|
+
header_name: str = "X-GenXAI-Signature",
|
|
27
|
+
hash_alg: str = "sha256",
|
|
28
|
+
) -> None:
|
|
29
|
+
super().__init__(connector_id=connector_id, name=name)
|
|
30
|
+
self.secret = secret
|
|
31
|
+
self.header_name = header_name
|
|
32
|
+
self.hash_alg = hash_alg
|
|
33
|
+
|
|
34
|
+
async def _start(self) -> None:
|
|
35
|
+
logger.debug("Webhook connector %s ready for requests", self.connector_id)
|
|
36
|
+
|
|
37
|
+
async def _stop(self) -> None:
|
|
38
|
+
logger.debug("Webhook connector %s stopped", self.connector_id)
|
|
39
|
+
|
|
40
|
+
async def validate_config(self) -> None:
|
|
41
|
+
if self.secret is not None and not isinstance(self.secret, str):
|
|
42
|
+
raise ValueError("Webhook secret must be a string")
|
|
43
|
+
if not self.header_name:
|
|
44
|
+
raise ValueError("Webhook header_name must be set")
|
|
45
|
+
if not hasattr(hashlib, self.hash_alg):
|
|
46
|
+
raise ValueError(f"Unsupported hash algorithm: {self.hash_alg}")
|
|
47
|
+
|
|
48
|
+
def validate_signature(self, payload: bytes, signature: Optional[str]) -> bool:
|
|
49
|
+
if not self.secret:
|
|
50
|
+
return True
|
|
51
|
+
if not signature:
|
|
52
|
+
return False
|
|
53
|
+
|
|
54
|
+
digest = hmac.new(self.secret.encode(), payload, getattr(hashlib, self.hash_alg)).hexdigest()
|
|
55
|
+
expected = f"{self.hash_alg}={digest}"
|
|
56
|
+
return hmac.compare_digest(expected, signature)
|
|
57
|
+
|
|
58
|
+
async def handle_request(
|
|
59
|
+
self,
|
|
60
|
+
payload: Dict[str, Any],
|
|
61
|
+
raw_body: Optional[bytes] = None,
|
|
62
|
+
headers: Optional[Dict[str, str]] = None,
|
|
63
|
+
) -> Dict[str, Any]:
|
|
64
|
+
headers = headers or {}
|
|
65
|
+
signature = headers.get(self.header_name)
|
|
66
|
+
|
|
67
|
+
if self.secret and raw_body is not None:
|
|
68
|
+
if not self.validate_signature(raw_body, signature):
|
|
69
|
+
logger.warning("Webhook signature validation failed for %s", self.connector_id)
|
|
70
|
+
return {"status": "rejected", "reason": "invalid signature"}
|
|
71
|
+
|
|
72
|
+
await self.emit(payload=payload, metadata={"headers": headers})
|
|
73
|
+
return {"status": "accepted", "connector_id": self.connector_id}
|