kailash 0.3.1__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. kailash/__init__.py +33 -1
  2. kailash/access_control/__init__.py +129 -0
  3. kailash/access_control/managers.py +461 -0
  4. kailash/access_control/rule_evaluators.py +467 -0
  5. kailash/access_control_abac.py +825 -0
  6. kailash/config/__init__.py +27 -0
  7. kailash/config/database_config.py +359 -0
  8. kailash/database/__init__.py +28 -0
  9. kailash/database/execution_pipeline.py +499 -0
  10. kailash/middleware/__init__.py +306 -0
  11. kailash/middleware/auth/__init__.py +33 -0
  12. kailash/middleware/auth/access_control.py +436 -0
  13. kailash/middleware/auth/auth_manager.py +422 -0
  14. kailash/middleware/auth/jwt_auth.py +477 -0
  15. kailash/middleware/auth/kailash_jwt_auth.py +616 -0
  16. kailash/middleware/communication/__init__.py +37 -0
  17. kailash/middleware/communication/ai_chat.py +989 -0
  18. kailash/middleware/communication/api_gateway.py +802 -0
  19. kailash/middleware/communication/events.py +470 -0
  20. kailash/middleware/communication/realtime.py +710 -0
  21. kailash/middleware/core/__init__.py +21 -0
  22. kailash/middleware/core/agent_ui.py +890 -0
  23. kailash/middleware/core/schema.py +643 -0
  24. kailash/middleware/core/workflows.py +396 -0
  25. kailash/middleware/database/__init__.py +63 -0
  26. kailash/middleware/database/base.py +113 -0
  27. kailash/middleware/database/base_models.py +525 -0
  28. kailash/middleware/database/enums.py +106 -0
  29. kailash/middleware/database/migrations.py +12 -0
  30. kailash/{api/database.py → middleware/database/models.py} +183 -291
  31. kailash/middleware/database/repositories.py +685 -0
  32. kailash/middleware/database/session_manager.py +19 -0
  33. kailash/middleware/mcp/__init__.py +38 -0
  34. kailash/middleware/mcp/client_integration.py +585 -0
  35. kailash/middleware/mcp/enhanced_server.py +576 -0
  36. kailash/nodes/__init__.py +25 -3
  37. kailash/nodes/admin/__init__.py +35 -0
  38. kailash/nodes/admin/audit_log.py +794 -0
  39. kailash/nodes/admin/permission_check.py +864 -0
  40. kailash/nodes/admin/role_management.py +823 -0
  41. kailash/nodes/admin/security_event.py +1519 -0
  42. kailash/nodes/admin/user_management.py +944 -0
  43. kailash/nodes/ai/a2a.py +24 -7
  44. kailash/nodes/ai/ai_providers.py +1 -0
  45. kailash/nodes/ai/embedding_generator.py +11 -11
  46. kailash/nodes/ai/intelligent_agent_orchestrator.py +99 -11
  47. kailash/nodes/ai/llm_agent.py +407 -2
  48. kailash/nodes/ai/self_organizing.py +85 -10
  49. kailash/nodes/api/auth.py +287 -6
  50. kailash/nodes/api/rest.py +151 -0
  51. kailash/nodes/auth/__init__.py +17 -0
  52. kailash/nodes/auth/directory_integration.py +1228 -0
  53. kailash/nodes/auth/enterprise_auth_provider.py +1328 -0
  54. kailash/nodes/auth/mfa.py +2338 -0
  55. kailash/nodes/auth/risk_assessment.py +872 -0
  56. kailash/nodes/auth/session_management.py +1093 -0
  57. kailash/nodes/auth/sso.py +1040 -0
  58. kailash/nodes/base.py +344 -13
  59. kailash/nodes/base_cycle_aware.py +4 -2
  60. kailash/nodes/base_with_acl.py +1 -1
  61. kailash/nodes/code/python.py +293 -12
  62. kailash/nodes/compliance/__init__.py +9 -0
  63. kailash/nodes/compliance/data_retention.py +1888 -0
  64. kailash/nodes/compliance/gdpr.py +2004 -0
  65. kailash/nodes/data/__init__.py +22 -2
  66. kailash/nodes/data/async_connection.py +469 -0
  67. kailash/nodes/data/async_sql.py +757 -0
  68. kailash/nodes/data/async_vector.py +598 -0
  69. kailash/nodes/data/readers.py +767 -0
  70. kailash/nodes/data/retrieval.py +360 -1
  71. kailash/nodes/data/sharepoint_graph.py +397 -21
  72. kailash/nodes/data/sql.py +94 -5
  73. kailash/nodes/data/streaming.py +68 -8
  74. kailash/nodes/data/vector_db.py +54 -4
  75. kailash/nodes/enterprise/__init__.py +13 -0
  76. kailash/nodes/enterprise/batch_processor.py +741 -0
  77. kailash/nodes/enterprise/data_lineage.py +497 -0
  78. kailash/nodes/logic/convergence.py +31 -9
  79. kailash/nodes/logic/operations.py +14 -3
  80. kailash/nodes/mixins/__init__.py +8 -0
  81. kailash/nodes/mixins/event_emitter.py +201 -0
  82. kailash/nodes/mixins/mcp.py +9 -4
  83. kailash/nodes/mixins/security.py +165 -0
  84. kailash/nodes/monitoring/__init__.py +7 -0
  85. kailash/nodes/monitoring/performance_benchmark.py +2497 -0
  86. kailash/nodes/rag/__init__.py +284 -0
  87. kailash/nodes/rag/advanced.py +1615 -0
  88. kailash/nodes/rag/agentic.py +773 -0
  89. kailash/nodes/rag/conversational.py +999 -0
  90. kailash/nodes/rag/evaluation.py +875 -0
  91. kailash/nodes/rag/federated.py +1188 -0
  92. kailash/nodes/rag/graph.py +721 -0
  93. kailash/nodes/rag/multimodal.py +671 -0
  94. kailash/nodes/rag/optimized.py +933 -0
  95. kailash/nodes/rag/privacy.py +1059 -0
  96. kailash/nodes/rag/query_processing.py +1335 -0
  97. kailash/nodes/rag/realtime.py +764 -0
  98. kailash/nodes/rag/registry.py +547 -0
  99. kailash/nodes/rag/router.py +837 -0
  100. kailash/nodes/rag/similarity.py +1854 -0
  101. kailash/nodes/rag/strategies.py +566 -0
  102. kailash/nodes/rag/workflows.py +575 -0
  103. kailash/nodes/security/__init__.py +19 -0
  104. kailash/nodes/security/abac_evaluator.py +1411 -0
  105. kailash/nodes/security/audit_log.py +91 -0
  106. kailash/nodes/security/behavior_analysis.py +1893 -0
  107. kailash/nodes/security/credential_manager.py +401 -0
  108. kailash/nodes/security/rotating_credentials.py +760 -0
  109. kailash/nodes/security/security_event.py +132 -0
  110. kailash/nodes/security/threat_detection.py +1103 -0
  111. kailash/nodes/testing/__init__.py +9 -0
  112. kailash/nodes/testing/credential_testing.py +499 -0
  113. kailash/nodes/transform/__init__.py +10 -2
  114. kailash/nodes/transform/chunkers.py +592 -1
  115. kailash/nodes/transform/processors.py +484 -14
  116. kailash/nodes/validation.py +321 -0
  117. kailash/runtime/access_controlled.py +1 -1
  118. kailash/runtime/async_local.py +41 -7
  119. kailash/runtime/docker.py +1 -1
  120. kailash/runtime/local.py +474 -55
  121. kailash/runtime/parallel.py +1 -1
  122. kailash/runtime/parallel_cyclic.py +1 -1
  123. kailash/runtime/testing.py +210 -2
  124. kailash/utils/migrations/__init__.py +25 -0
  125. kailash/utils/migrations/generator.py +433 -0
  126. kailash/utils/migrations/models.py +231 -0
  127. kailash/utils/migrations/runner.py +489 -0
  128. kailash/utils/secure_logging.py +342 -0
  129. kailash/workflow/__init__.py +16 -0
  130. kailash/workflow/cyclic_runner.py +3 -4
  131. kailash/workflow/graph.py +70 -2
  132. kailash/workflow/resilience.py +249 -0
  133. kailash/workflow/templates.py +726 -0
  134. {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/METADATA +253 -20
  135. kailash-0.4.0.dist-info/RECORD +223 -0
  136. kailash/api/__init__.py +0 -17
  137. kailash/api/__main__.py +0 -6
  138. kailash/api/studio_secure.py +0 -893
  139. kailash/mcp/__main__.py +0 -13
  140. kailash/mcp/server_new.py +0 -336
  141. kailash/mcp/servers/__init__.py +0 -12
  142. kailash-0.3.1.dist-info/RECORD +0 -136
  143. {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/WHEEL +0 -0
  144. {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/entry_points.txt +0 -0
  145. {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/licenses/LICENSE +0 -0
  146. {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,794 @@
1
+ """Enterprise audit logging node for comprehensive compliance and security tracking.
2
+
3
+ This node provides comprehensive audit logging capabilities for enterprise
4
+ compliance, security monitoring, and forensic analysis. Built on Session 065's
5
+ async database infrastructure for high-performance logging with retention
6
+ policies and advanced querying capabilities.
7
+
8
+ Features:
9
+ - Comprehensive audit trail for all admin operations
10
+ - Security event tracking and alerting
11
+ - Compliance reporting and data retention
12
+ - Real-time log streaming and monitoring
13
+ - Advanced querying and filtering
14
+ - Automated log archiving and cleanup
15
+ - Integration with SIEM systems
16
+ - Multi-tenant log isolation
17
+ """
18
+
19
+ import hashlib
20
+ import json
21
+ from dataclasses import dataclass
22
+ from datetime import UTC, datetime, timedelta
23
+ from enum import Enum
24
+ from typing import Any, Dict, List, Optional, Union
25
+
26
+ from kailash.access_control import UserContext
27
+ from kailash.nodes.base import Node, NodeParameter, register_node
28
+ from kailash.nodes.data import AsyncSQLDatabaseNode
29
+ from kailash.sdk_exceptions import NodeExecutionError, NodeValidationError
30
+
31
+
32
+ class AuditEventType(Enum):
33
+ """Types of audit events."""
34
+
35
+ USER_LOGIN = "user_login"
36
+ USER_LOGOUT = "user_logout"
37
+ USER_CREATED = "user_created"
38
+ USER_UPDATED = "user_updated"
39
+ USER_DELETED = "user_deleted"
40
+ USER_ACTIVATED = "user_activated"
41
+ USER_DEACTIVATED = "user_deactivated"
42
+ PASSWORD_CHANGED = "password_changed"
43
+ PASSWORD_RESET = "password_reset"
44
+ ROLE_ASSIGNED = "role_assigned"
45
+ ROLE_UNASSIGNED = "role_unassigned"
46
+ ROLE_CREATED = "role_created"
47
+ ROLE_UPDATED = "role_updated"
48
+ ROLE_DELETED = "role_deleted"
49
+ PERMISSION_GRANTED = "permission_granted"
50
+ PERMISSION_REVOKED = "permission_revoked"
51
+ PERMISSION_CHECKED = "permission_checked"
52
+ PERMISSION_DENIED = "permission_denied"
53
+ DATA_ACCESSED = "data_accessed"
54
+ DATA_MODIFIED = "data_modified"
55
+ DATA_DELETED = "data_deleted"
56
+ DATA_EXPORTED = "data_exported"
57
+ WORKFLOW_EXECUTED = "workflow_executed"
58
+ WORKFLOW_FAILED = "workflow_failed"
59
+ SYSTEM_CONFIG_CHANGED = "system_config_changed"
60
+ SECURITY_VIOLATION = "security_violation"
61
+ COMPLIANCE_EVENT = "compliance_event"
62
+ CUSTOM = "custom"
63
+
64
+
65
+ class AuditSeverity(Enum):
66
+ """Severity levels for audit events."""
67
+
68
+ LOW = "low"
69
+ MEDIUM = "medium"
70
+ HIGH = "high"
71
+ CRITICAL = "critical"
72
+
73
+
74
+ class AuditOperation(Enum):
75
+ """Supported audit logging operations."""
76
+
77
+ LOG_EVENT = "log_event"
78
+ LOG_BATCH = "log_batch"
79
+ QUERY_LOGS = "query_logs"
80
+ GET_USER_ACTIVITY = "get_user_activity"
81
+ GET_SECURITY_EVENTS = "get_security_events"
82
+ GENERATE_REPORT = "generate_report"
83
+ EXPORT_LOGS = "export_logs"
84
+ ARCHIVE_LOGS = "archive_logs"
85
+ DELETE_LOGS = "delete_logs"
86
+ GET_STATISTICS = "get_statistics"
87
+ MONITOR_REALTIME = "monitor_realtime"
88
+
89
+
90
+ @dataclass
91
+ class AuditEvent:
92
+ """Audit event structure."""
93
+
94
+ event_id: str
95
+ event_type: AuditEventType
96
+ severity: AuditSeverity
97
+ user_id: Optional[str]
98
+ tenant_id: str
99
+ resource_id: Optional[str]
100
+ action: str
101
+ description: str
102
+ metadata: Dict[str, Any]
103
+ timestamp: datetime
104
+ ip_address: Optional[str] = None
105
+ user_agent: Optional[str] = None
106
+ session_id: Optional[str] = None
107
+ correlation_id: Optional[str] = None
108
+
109
+ def to_dict(self) -> Dict[str, Any]:
110
+ """Convert to dictionary for JSON serialization."""
111
+ return {
112
+ "event_id": self.event_id,
113
+ "event_type": self.event_type.value,
114
+ "severity": self.severity.value,
115
+ "user_id": self.user_id,
116
+ "tenant_id": self.tenant_id,
117
+ "resource_id": self.resource_id,
118
+ "action": self.action,
119
+ "description": self.description,
120
+ "metadata": self.metadata,
121
+ "timestamp": self.timestamp.isoformat(),
122
+ "ip_address": self.ip_address,
123
+ "user_agent": self.user_agent,
124
+ "session_id": self.session_id,
125
+ "correlation_id": self.correlation_id,
126
+ }
127
+
128
+
129
+ @register_node()
130
+ class AuditLogNode(Node):
131
+ """Enterprise audit logging node with comprehensive compliance features.
132
+
133
+ This node provides comprehensive audit logging capabilities including:
134
+ - Structured audit event logging
135
+ - Advanced querying and filtering
136
+ - Compliance reporting and export
137
+ - Real-time monitoring and alerting
138
+ - Automated archiving and retention
139
+ - Multi-tenant log isolation
140
+
141
+ Parameters:
142
+ operation: Type of audit operation to perform
143
+ event_data: Event data for logging operations
144
+ events: List of events for batch logging
145
+ query_filters: Filters for log querying
146
+ user_id: User ID for user-specific queries
147
+ event_types: Event types to filter
148
+ severity: Minimum severity level
149
+ date_range: Date range for queries
150
+ pagination: Pagination parameters
151
+ export_format: Format for log export
152
+ tenant_id: Tenant isolation
153
+
154
+ Example:
155
+ >>> # Log single audit event
156
+ >>> node = AuditLogNode(
157
+ ... operation="log_event",
158
+ ... event_data={
159
+ ... "event_type": "user_login",
160
+ ... "severity": "medium",
161
+ ... "user_id": "user123",
162
+ ... "action": "successful_login",
163
+ ... "description": "User logged in successfully",
164
+ ... "metadata": {
165
+ ... "login_method": "password",
166
+ ... "mfa_used": True
167
+ ... },
168
+ ... "ip_address": "192.168.1.100"
169
+ ... }
170
+ ... )
171
+ >>> result = node.run()
172
+ >>> event_id = result["event"]["event_id"]
173
+
174
+ >>> # Query security events
175
+ >>> node = AuditLogNode(
176
+ ... operation="get_security_events",
177
+ ... query_filters={
178
+ ... "severity": ["high", "critical"],
179
+ ... "date_range": {
180
+ ... "start": "2025-06-01T00:00:00Z",
181
+ ... "end": "2025-06-12T23:59:59Z"
182
+ ... }
183
+ ... },
184
+ ... pagination={"page": 1, "size": 50}
185
+ ... )
186
+ >>> result = node.run()
187
+ >>> events = result["events"]
188
+
189
+ >>> # Generate compliance report
190
+ >>> node = AuditLogNode(
191
+ ... operation="generate_report",
192
+ ... query_filters={
193
+ ... "event_types": ["data_accessed", "data_modified", "data_exported"],
194
+ ... "user_id": "analyst123"
195
+ ... },
196
+ ... export_format="json"
197
+ ... )
198
+ >>> result = node.run()
199
+ >>> report = result["report"]
200
+ """
201
+
202
+ def __init__(self, **config):
203
+ super().__init__(**config)
204
+ self._db_node = None
205
+
206
+ def get_parameters(self) -> Dict[str, NodeParameter]:
207
+ """Define parameters for audit logging operations."""
208
+ return {
209
+ param.name: param
210
+ for param in [
211
+ # Operation type
212
+ NodeParameter(
213
+ name="operation",
214
+ type=str,
215
+ required=True,
216
+ description="Audit logging operation to perform",
217
+ choices=[op.value for op in AuditOperation],
218
+ ),
219
+ # Event data for logging
220
+ NodeParameter(
221
+ name="event_data",
222
+ type=dict,
223
+ required=False,
224
+ description="Event data for logging operations",
225
+ ),
226
+ NodeParameter(
227
+ name="events",
228
+ type=list,
229
+ required=False,
230
+ description="List of events for batch logging",
231
+ ),
232
+ # Query parameters
233
+ NodeParameter(
234
+ name="query_filters",
235
+ type=dict,
236
+ required=False,
237
+ description="Filters for log querying",
238
+ ),
239
+ NodeParameter(
240
+ name="user_id",
241
+ type=str,
242
+ required=False,
243
+ description="User ID for user-specific queries",
244
+ ),
245
+ NodeParameter(
246
+ name="event_types",
247
+ type=list,
248
+ required=False,
249
+ description="Event types to filter",
250
+ ),
251
+ NodeParameter(
252
+ name="severity",
253
+ type=str,
254
+ required=False,
255
+ choices=[s.value for s in AuditSeverity],
256
+ description="Minimum severity level",
257
+ ),
258
+ # Date range
259
+ NodeParameter(
260
+ name="date_range",
261
+ type=dict,
262
+ required=False,
263
+ description="Date range for queries (start, end)",
264
+ ),
265
+ # Pagination
266
+ NodeParameter(
267
+ name="pagination",
268
+ type=dict,
269
+ required=False,
270
+ description="Pagination parameters (page, size, sort)",
271
+ ),
272
+ # Export options
273
+ NodeParameter(
274
+ name="export_format",
275
+ type=str,
276
+ required=False,
277
+ choices=["json", "csv", "pdf"],
278
+ description="Format for log export",
279
+ ),
280
+ # Multi-tenancy
281
+ NodeParameter(
282
+ name="tenant_id",
283
+ type=str,
284
+ required=False,
285
+ description="Tenant ID for multi-tenant isolation",
286
+ ),
287
+ # Database configuration
288
+ NodeParameter(
289
+ name="database_config",
290
+ type=dict,
291
+ required=False,
292
+ description="Database connection configuration",
293
+ ),
294
+ # Archiving options
295
+ NodeParameter(
296
+ name="archive_older_than_days",
297
+ type=int,
298
+ required=False,
299
+ description="Archive logs older than specified days",
300
+ ),
301
+ NodeParameter(
302
+ name="delete_older_than_days",
303
+ type=int,
304
+ required=False,
305
+ description="Delete logs older than specified days",
306
+ ),
307
+ # Real-time monitoring
308
+ NodeParameter(
309
+ name="stream_duration_seconds",
310
+ type=int,
311
+ required=False,
312
+ default=60,
313
+ description="Duration for real-time log streaming",
314
+ ),
315
+ ]
316
+ }
317
+
318
+ def run(self, **inputs) -> Dict[str, Any]:
319
+ """Execute audit logging operation."""
320
+ try:
321
+ operation = AuditOperation(inputs["operation"])
322
+
323
+ # Initialize dependencies
324
+ self._init_dependencies(inputs)
325
+
326
+ # Route to appropriate operation
327
+ if operation == AuditOperation.LOG_EVENT:
328
+ return self._log_event(inputs)
329
+ elif operation == AuditOperation.LOG_BATCH:
330
+ return self._log_batch(inputs)
331
+ elif operation == AuditOperation.QUERY_LOGS:
332
+ return self._query_logs(inputs)
333
+ elif operation == AuditOperation.GET_USER_ACTIVITY:
334
+ return self._get_user_activity(inputs)
335
+ elif operation == AuditOperation.GET_SECURITY_EVENTS:
336
+ return self._get_security_events(inputs)
337
+ elif operation == AuditOperation.GENERATE_REPORT:
338
+ return self._generate_report(inputs)
339
+ elif operation == AuditOperation.EXPORT_LOGS:
340
+ return self._export_logs(inputs)
341
+ elif operation == AuditOperation.ARCHIVE_LOGS:
342
+ return self._archive_logs(inputs)
343
+ elif operation == AuditOperation.DELETE_LOGS:
344
+ return self._delete_logs(inputs)
345
+ elif operation == AuditOperation.GET_STATISTICS:
346
+ return self._get_statistics(inputs)
347
+ elif operation == AuditOperation.MONITOR_REALTIME:
348
+ return self._monitor_realtime(inputs)
349
+ else:
350
+ raise NodeExecutionError(f"Unknown operation: {operation}")
351
+
352
+ except Exception as e:
353
+ raise NodeExecutionError(f"Audit logging operation failed: {str(e)}")
354
+
355
+ def _init_dependencies(self, inputs: Dict[str, Any]):
356
+ """Initialize database dependencies."""
357
+ # Get database config
358
+ db_config = inputs.get(
359
+ "database_config",
360
+ {
361
+ "database_type": "postgresql",
362
+ "host": "localhost",
363
+ "port": 5432,
364
+ "database": "kailash_admin",
365
+ "user": "admin",
366
+ "password": "admin",
367
+ },
368
+ )
369
+
370
+ # Initialize async database node
371
+ self._db_node = AsyncSQLDatabaseNode(name="audit_log_db", **db_config)
372
+
373
+ def _log_event(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
374
+ """Log a single audit event."""
375
+ event_data = inputs["event_data"]
376
+ tenant_id = inputs.get("tenant_id", "default")
377
+
378
+ # Validate required event fields
379
+ required_fields = ["event_type", "action", "description"]
380
+ for field in required_fields:
381
+ if field not in event_data:
382
+ raise NodeValidationError(f"Missing required field: {field}")
383
+
384
+ # Create audit event
385
+ event_id = self._generate_event_id()
386
+ now = datetime.now(UTC)
387
+
388
+ audit_event = AuditEvent(
389
+ event_id=event_id,
390
+ event_type=AuditEventType(event_data["event_type"]),
391
+ severity=AuditSeverity(event_data.get("severity", "medium")),
392
+ user_id=event_data.get("user_id"),
393
+ tenant_id=tenant_id,
394
+ resource_id=event_data.get("resource_id"),
395
+ action=event_data["action"],
396
+ description=event_data["description"],
397
+ metadata=event_data.get("metadata", {}),
398
+ timestamp=now,
399
+ ip_address=event_data.get("ip_address"),
400
+ user_agent=event_data.get("user_agent"),
401
+ session_id=event_data.get("session_id"),
402
+ correlation_id=event_data.get("correlation_id"),
403
+ )
404
+
405
+ # Insert into database
406
+ insert_query = """
407
+ INSERT INTO audit_logs (
408
+ event_id, event_type, severity, user_id, tenant_id, resource_id,
409
+ action, description, metadata, timestamp, ip_address, user_agent,
410
+ session_id, correlation_id
411
+ ) VALUES (
412
+ $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14
413
+ )
414
+ """
415
+
416
+ self._db_node.config.update(
417
+ {
418
+ "query": insert_query,
419
+ "params": [
420
+ audit_event.event_id,
421
+ audit_event.event_type.value,
422
+ audit_event.severity.value,
423
+ audit_event.user_id,
424
+ audit_event.tenant_id,
425
+ audit_event.resource_id,
426
+ audit_event.action,
427
+ audit_event.description,
428
+ audit_event.metadata,
429
+ audit_event.timestamp,
430
+ audit_event.ip_address,
431
+ audit_event.user_agent,
432
+ audit_event.session_id,
433
+ audit_event.correlation_id,
434
+ ],
435
+ }
436
+ )
437
+
438
+ db_result = self._db_node.run()
439
+
440
+ return {
441
+ "result": {
442
+ "event": audit_event.to_dict(),
443
+ "logged": True,
444
+ "operation": "log_event",
445
+ "timestamp": datetime.now(UTC).isoformat(),
446
+ }
447
+ }
448
+
449
+ def _log_batch(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
450
+ """Log multiple audit events in batch."""
451
+ events = inputs["events"]
452
+ tenant_id = inputs.get("tenant_id", "default")
453
+
454
+ if not isinstance(events, list):
455
+ raise NodeValidationError("events must be a list for batch operations")
456
+
457
+ results = {"logged": [], "failed": [], "stats": {"logged": 0, "failed": 0}}
458
+
459
+ for i, event_data in enumerate(events):
460
+ try:
461
+ # Log individual event
462
+ log_inputs = {
463
+ "operation": "log_event",
464
+ "event_data": event_data,
465
+ "tenant_id": tenant_id,
466
+ }
467
+
468
+ result = self._log_event(log_inputs)
469
+ results["logged"].append(
470
+ {"index": i, "event": result["result"]["event"]}
471
+ )
472
+ results["stats"]["logged"] += 1
473
+
474
+ except Exception as e:
475
+ results["failed"].append(
476
+ {"index": i, "event_data": event_data, "error": str(e)}
477
+ )
478
+ results["stats"]["failed"] += 1
479
+
480
+ return {
481
+ "result": {
482
+ "operation": "log_batch",
483
+ "results": results,
484
+ "timestamp": datetime.now(UTC).isoformat(),
485
+ }
486
+ }
487
+
488
+ def _query_logs(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
489
+ """Query audit logs with advanced filtering."""
490
+ query_filters = inputs.get("query_filters", {})
491
+ pagination = inputs.get(
492
+ "pagination", {"page": 1, "size": 20, "sort": "timestamp"}
493
+ )
494
+ tenant_id = inputs.get("tenant_id", "default")
495
+
496
+ # Build WHERE clause
497
+ where_conditions = ["tenant_id = $1"]
498
+ params = [tenant_id]
499
+ param_count = 1
500
+
501
+ # Apply filters
502
+ if "event_types" in query_filters:
503
+ param_count += 1
504
+ event_types = query_filters["event_types"]
505
+ placeholders = ",".join(
506
+ ["$" + str(param_count + i) for i in range(len(event_types))]
507
+ )
508
+ where_conditions.append(f"event_type IN ({placeholders})")
509
+ params.extend(event_types)
510
+ param_count += len(event_types) - 1
511
+
512
+ if "severity" in query_filters:
513
+ param_count += 1
514
+ where_conditions.append(f"severity = ${param_count}")
515
+ params.append(query_filters["severity"])
516
+
517
+ if "user_id" in query_filters:
518
+ param_count += 1
519
+ where_conditions.append(f"user_id = ${param_count}")
520
+ params.append(query_filters["user_id"])
521
+
522
+ if "resource_id" in query_filters:
523
+ param_count += 1
524
+ where_conditions.append(f"resource_id = ${param_count}")
525
+ params.append(query_filters["resource_id"])
526
+
527
+ # Date range filter
528
+ if "date_range" in query_filters:
529
+ date_range = query_filters["date_range"]
530
+ if "start" in date_range:
531
+ param_count += 1
532
+ where_conditions.append(f"timestamp >= ${param_count}")
533
+ params.append(
534
+ datetime.fromisoformat(date_range["start"].replace("Z", "+00:00"))
535
+ )
536
+
537
+ if "end" in date_range:
538
+ param_count += 1
539
+ where_conditions.append(f"timestamp <= ${param_count}")
540
+ params.append(
541
+ datetime.fromisoformat(date_range["end"].replace("Z", "+00:00"))
542
+ )
543
+
544
+ # Pagination
545
+ page = pagination.get("page", 1)
546
+ size = pagination.get("size", 20)
547
+ sort_field = pagination.get("sort", "timestamp")
548
+ sort_direction = pagination.get("direction", "DESC")
549
+
550
+ offset = (page - 1) * size
551
+
552
+ # Count query
553
+ count_query = f"""
554
+ SELECT COUNT(*) as total
555
+ FROM audit_logs
556
+ WHERE {' AND '.join(where_conditions)}
557
+ """
558
+
559
+ # Data query
560
+ data_query = f"""
561
+ SELECT event_id, event_type, severity, user_id, resource_id, action,
562
+ description, metadata, timestamp, ip_address, correlation_id
563
+ FROM audit_logs
564
+ WHERE {' AND '.join(where_conditions)}
565
+ ORDER BY {sort_field} {sort_direction}
566
+ LIMIT {size} OFFSET {offset}
567
+ """
568
+
569
+ # Execute count query
570
+ self._db_node.config.update(
571
+ {"query": count_query, "params": params, "fetch_mode": "one"}
572
+ )
573
+ count_result = self._db_node.run()
574
+ total_count = count_result["result"]["data"]["total"]
575
+
576
+ # Execute data query
577
+ self._db_node.config.update(
578
+ {"query": data_query, "params": params, "fetch_mode": "all"}
579
+ )
580
+ data_result = self._db_node.run()
581
+ logs = data_result["result"]["data"]
582
+
583
+ # Calculate pagination info
584
+ total_pages = (total_count + size - 1) // size
585
+ has_next = page < total_pages
586
+ has_prev = page > 1
587
+
588
+ return {
589
+ "result": {
590
+ "logs": logs,
591
+ "pagination": {
592
+ "page": page,
593
+ "size": size,
594
+ "total": total_count,
595
+ "total_pages": total_pages,
596
+ "has_next": has_next,
597
+ "has_prev": has_prev,
598
+ },
599
+ "filters_applied": query_filters,
600
+ "operation": "query_logs",
601
+ "timestamp": datetime.now(UTC).isoformat(),
602
+ }
603
+ }
604
+
605
+ def _get_security_events(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
606
+ """Get security-specific audit events."""
607
+ # Security event types
608
+ security_event_types = [
609
+ AuditEventType.SECURITY_VIOLATION.value,
610
+ AuditEventType.PERMISSION_DENIED.value,
611
+ AuditEventType.USER_LOGIN.value,
612
+ AuditEventType.USER_LOGOUT.value,
613
+ AuditEventType.PASSWORD_CHANGED.value,
614
+ AuditEventType.PASSWORD_RESET.value,
615
+ ]
616
+
617
+ # Add security event filter
618
+ query_filters = inputs.get("query_filters", {})
619
+ query_filters["event_types"] = security_event_types
620
+
621
+ # Use regular query_logs with security filters
622
+ security_inputs = inputs.copy()
623
+ security_inputs["query_filters"] = query_filters
624
+
625
+ result = self._query_logs(security_inputs)
626
+ result["result"]["operation"] = "get_security_events"
627
+
628
+ return result
629
+
630
+ def _get_user_activity(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
631
+ """Get activity logs for a specific user."""
632
+ user_id = inputs["user_id"]
633
+
634
+ # Add user filter
635
+ query_filters = inputs.get("query_filters", {})
636
+ query_filters["user_id"] = user_id
637
+
638
+ # Use regular query_logs with user filter
639
+ user_inputs = inputs.copy()
640
+ user_inputs["query_filters"] = query_filters
641
+
642
+ result = self._query_logs(user_inputs)
643
+ result["result"]["operation"] = "get_user_activity"
644
+ result["result"]["user_id"] = user_id
645
+
646
+ return result
647
+
648
+ def _generate_report(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
649
+ """Generate compliance and audit reports."""
650
+ query_filters = inputs.get("query_filters", {})
651
+ export_format = inputs.get("export_format", "json")
652
+ tenant_id = inputs.get("tenant_id", "default")
653
+
654
+ # Get audit logs based on filters
655
+ query_inputs = {
656
+ "query_filters": query_filters,
657
+ "pagination": {"page": 1, "size": 10000}, # Large size for reports
658
+ "tenant_id": tenant_id,
659
+ }
660
+
661
+ logs_result = self._query_logs(query_inputs)
662
+ logs = logs_result["result"]["logs"]
663
+
664
+ # Generate statistics
665
+ stats = self._calculate_log_statistics(logs)
666
+
667
+ # Build report
668
+ report = {
669
+ "report_id": self._generate_event_id(),
670
+ "generated_at": datetime.now(UTC).isoformat(),
671
+ "tenant_id": tenant_id,
672
+ "filters": query_filters,
673
+ "statistics": stats,
674
+ "total_events": len(logs),
675
+ "format": export_format,
676
+ }
677
+
678
+ if export_format == "json":
679
+ report["events"] = logs
680
+ elif export_format == "csv":
681
+ report["csv_data"] = self._convert_to_csv(logs)
682
+ elif export_format == "pdf":
683
+ report["pdf_url"] = f"/reports/{report['report_id']}.pdf"
684
+
685
+ return {
686
+ "result": {
687
+ "report": report,
688
+ "operation": "generate_report",
689
+ "timestamp": datetime.now(UTC).isoformat(),
690
+ }
691
+ }
692
+
693
+ def _calculate_log_statistics(self, logs: List[Dict[str, Any]]) -> Dict[str, Any]:
694
+ """Calculate statistics from audit logs."""
695
+ stats = {
696
+ "event_types": {},
697
+ "severities": {},
698
+ "users": {},
699
+ "daily_counts": {},
700
+ "hourly_distribution": [0] * 24,
701
+ }
702
+
703
+ for log in logs:
704
+ # Event type distribution
705
+ event_type = log["event_type"]
706
+ stats["event_types"][event_type] = (
707
+ stats["event_types"].get(event_type, 0) + 1
708
+ )
709
+
710
+ # Severity distribution
711
+ severity = log["severity"]
712
+ stats["severities"][severity] = stats["severities"].get(severity, 0) + 1
713
+
714
+ # User activity
715
+ user_id = log.get("user_id")
716
+ if user_id:
717
+ stats["users"][user_id] = stats["users"].get(user_id, 0) + 1
718
+
719
+ # Daily counts
720
+ if log["timestamp"]:
721
+ date_str = log["timestamp"][:10] # Extract date part
722
+ stats["daily_counts"][date_str] = (
723
+ stats["daily_counts"].get(date_str, 0) + 1
724
+ )
725
+
726
+ # Hourly distribution
727
+ try:
728
+ hour = datetime.fromisoformat(
729
+ log["timestamp"].replace("Z", "+00:00")
730
+ ).hour
731
+ stats["hourly_distribution"][hour] += 1
732
+ except:
733
+ pass
734
+
735
+ return stats
736
+
737
+ def _convert_to_csv(self, logs: List[Dict[str, Any]]) -> str:
738
+ """Convert logs to CSV format."""
739
+ if not logs:
740
+ return ""
741
+
742
+ # CSV headers
743
+ headers = [
744
+ "event_id",
745
+ "event_type",
746
+ "severity",
747
+ "user_id",
748
+ "action",
749
+ "timestamp",
750
+ ]
751
+ csv_lines = [",".join(headers)]
752
+
753
+ # CSV data
754
+ for log in logs:
755
+ row = []
756
+ for header in headers:
757
+ value = log.get(header, "")
758
+ if value is None:
759
+ value = ""
760
+ # Escape commas and quotes
761
+ value_str = str(value).replace('"', '""')
762
+ if "," in value_str or '"' in value_str:
763
+ value_str = f'"{value_str}"'
764
+ row.append(value_str)
765
+ csv_lines.append(",".join(row))
766
+
767
+ return "\n".join(csv_lines)
768
+
769
+ def _generate_event_id(self) -> str:
770
+ """Generate unique event ID."""
771
+ import uuid
772
+
773
+ return str(uuid.uuid4())
774
+
775
+ # Additional operations would follow similar patterns
776
+ def _export_logs(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
777
+ """Export audit logs in various formats."""
778
+ raise NotImplementedError("Export logs operation will be implemented")
779
+
780
+ def _archive_logs(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
781
+ """Archive old audit logs for long-term storage."""
782
+ raise NotImplementedError("Archive logs operation will be implemented")
783
+
784
+ def _delete_logs(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
785
+ """Delete old audit logs based on retention policy."""
786
+ raise NotImplementedError("Delete logs operation will be implemented")
787
+
788
+ def _get_statistics(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
789
+ """Get audit log statistics and metrics."""
790
+ raise NotImplementedError("Get statistics operation will be implemented")
791
+
792
+ def _monitor_realtime(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
793
+ """Monitor audit logs in real-time."""
794
+ raise NotImplementedError("Monitor realtime operation will be implemented")