kailash 0.3.2__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +33 -1
- kailash/access_control/__init__.py +129 -0
- kailash/access_control/managers.py +461 -0
- kailash/access_control/rule_evaluators.py +467 -0
- kailash/access_control_abac.py +825 -0
- kailash/config/__init__.py +27 -0
- kailash/config/database_config.py +359 -0
- kailash/database/__init__.py +28 -0
- kailash/database/execution_pipeline.py +499 -0
- kailash/middleware/__init__.py +306 -0
- kailash/middleware/auth/__init__.py +33 -0
- kailash/middleware/auth/access_control.py +436 -0
- kailash/middleware/auth/auth_manager.py +422 -0
- kailash/middleware/auth/jwt_auth.py +477 -0
- kailash/middleware/auth/kailash_jwt_auth.py +616 -0
- kailash/middleware/communication/__init__.py +37 -0
- kailash/middleware/communication/ai_chat.py +989 -0
- kailash/middleware/communication/api_gateway.py +802 -0
- kailash/middleware/communication/events.py +470 -0
- kailash/middleware/communication/realtime.py +710 -0
- kailash/middleware/core/__init__.py +21 -0
- kailash/middleware/core/agent_ui.py +890 -0
- kailash/middleware/core/schema.py +643 -0
- kailash/middleware/core/workflows.py +396 -0
- kailash/middleware/database/__init__.py +63 -0
- kailash/middleware/database/base.py +113 -0
- kailash/middleware/database/base_models.py +525 -0
- kailash/middleware/database/enums.py +106 -0
- kailash/middleware/database/migrations.py +12 -0
- kailash/{api/database.py → middleware/database/models.py} +183 -291
- kailash/middleware/database/repositories.py +685 -0
- kailash/middleware/database/session_manager.py +19 -0
- kailash/middleware/mcp/__init__.py +38 -0
- kailash/middleware/mcp/client_integration.py +585 -0
- kailash/middleware/mcp/enhanced_server.py +576 -0
- kailash/nodes/__init__.py +25 -3
- kailash/nodes/admin/__init__.py +35 -0
- kailash/nodes/admin/audit_log.py +794 -0
- kailash/nodes/admin/permission_check.py +864 -0
- kailash/nodes/admin/role_management.py +823 -0
- kailash/nodes/admin/security_event.py +1519 -0
- kailash/nodes/admin/user_management.py +944 -0
- kailash/nodes/ai/a2a.py +24 -7
- kailash/nodes/ai/ai_providers.py +1 -0
- kailash/nodes/ai/embedding_generator.py +11 -11
- kailash/nodes/ai/intelligent_agent_orchestrator.py +99 -11
- kailash/nodes/ai/llm_agent.py +407 -2
- kailash/nodes/ai/self_organizing.py +85 -10
- kailash/nodes/api/auth.py +287 -6
- kailash/nodes/api/rest.py +151 -0
- kailash/nodes/auth/__init__.py +17 -0
- kailash/nodes/auth/directory_integration.py +1228 -0
- kailash/nodes/auth/enterprise_auth_provider.py +1328 -0
- kailash/nodes/auth/mfa.py +2338 -0
- kailash/nodes/auth/risk_assessment.py +872 -0
- kailash/nodes/auth/session_management.py +1093 -0
- kailash/nodes/auth/sso.py +1040 -0
- kailash/nodes/base.py +344 -13
- kailash/nodes/base_cycle_aware.py +4 -2
- kailash/nodes/base_with_acl.py +1 -1
- kailash/nodes/code/python.py +283 -10
- kailash/nodes/compliance/__init__.py +9 -0
- kailash/nodes/compliance/data_retention.py +1888 -0
- kailash/nodes/compliance/gdpr.py +2004 -0
- kailash/nodes/data/__init__.py +22 -2
- kailash/nodes/data/async_connection.py +469 -0
- kailash/nodes/data/async_sql.py +757 -0
- kailash/nodes/data/async_vector.py +598 -0
- kailash/nodes/data/readers.py +767 -0
- kailash/nodes/data/retrieval.py +360 -1
- kailash/nodes/data/sharepoint_graph.py +397 -21
- kailash/nodes/data/sql.py +94 -5
- kailash/nodes/data/streaming.py +68 -8
- kailash/nodes/data/vector_db.py +54 -4
- kailash/nodes/enterprise/__init__.py +13 -0
- kailash/nodes/enterprise/batch_processor.py +741 -0
- kailash/nodes/enterprise/data_lineage.py +497 -0
- kailash/nodes/logic/convergence.py +31 -9
- kailash/nodes/logic/operations.py +14 -3
- kailash/nodes/mixins/__init__.py +8 -0
- kailash/nodes/mixins/event_emitter.py +201 -0
- kailash/nodes/mixins/mcp.py +9 -4
- kailash/nodes/mixins/security.py +165 -0
- kailash/nodes/monitoring/__init__.py +7 -0
- kailash/nodes/monitoring/performance_benchmark.py +2497 -0
- kailash/nodes/rag/__init__.py +284 -0
- kailash/nodes/rag/advanced.py +1615 -0
- kailash/nodes/rag/agentic.py +773 -0
- kailash/nodes/rag/conversational.py +999 -0
- kailash/nodes/rag/evaluation.py +875 -0
- kailash/nodes/rag/federated.py +1188 -0
- kailash/nodes/rag/graph.py +721 -0
- kailash/nodes/rag/multimodal.py +671 -0
- kailash/nodes/rag/optimized.py +933 -0
- kailash/nodes/rag/privacy.py +1059 -0
- kailash/nodes/rag/query_processing.py +1335 -0
- kailash/nodes/rag/realtime.py +764 -0
- kailash/nodes/rag/registry.py +547 -0
- kailash/nodes/rag/router.py +837 -0
- kailash/nodes/rag/similarity.py +1854 -0
- kailash/nodes/rag/strategies.py +566 -0
- kailash/nodes/rag/workflows.py +575 -0
- kailash/nodes/security/__init__.py +19 -0
- kailash/nodes/security/abac_evaluator.py +1411 -0
- kailash/nodes/security/audit_log.py +91 -0
- kailash/nodes/security/behavior_analysis.py +1893 -0
- kailash/nodes/security/credential_manager.py +401 -0
- kailash/nodes/security/rotating_credentials.py +760 -0
- kailash/nodes/security/security_event.py +132 -0
- kailash/nodes/security/threat_detection.py +1103 -0
- kailash/nodes/testing/__init__.py +9 -0
- kailash/nodes/testing/credential_testing.py +499 -0
- kailash/nodes/transform/__init__.py +10 -2
- kailash/nodes/transform/chunkers.py +592 -1
- kailash/nodes/transform/processors.py +484 -14
- kailash/nodes/validation.py +321 -0
- kailash/runtime/access_controlled.py +1 -1
- kailash/runtime/async_local.py +41 -7
- kailash/runtime/docker.py +1 -1
- kailash/runtime/local.py +474 -55
- kailash/runtime/parallel.py +1 -1
- kailash/runtime/parallel_cyclic.py +1 -1
- kailash/runtime/testing.py +210 -2
- kailash/utils/migrations/__init__.py +25 -0
- kailash/utils/migrations/generator.py +433 -0
- kailash/utils/migrations/models.py +231 -0
- kailash/utils/migrations/runner.py +489 -0
- kailash/utils/secure_logging.py +342 -0
- kailash/workflow/__init__.py +16 -0
- kailash/workflow/cyclic_runner.py +3 -4
- kailash/workflow/graph.py +70 -2
- kailash/workflow/resilience.py +249 -0
- kailash/workflow/templates.py +726 -0
- {kailash-0.3.2.dist-info → kailash-0.4.0.dist-info}/METADATA +253 -20
- kailash-0.4.0.dist-info/RECORD +223 -0
- kailash/api/__init__.py +0 -17
- kailash/api/__main__.py +0 -6
- kailash/api/studio_secure.py +0 -893
- kailash/mcp/__main__.py +0 -13
- kailash/mcp/server_new.py +0 -336
- kailash/mcp/servers/__init__.py +0 -12
- kailash-0.3.2.dist-info/RECORD +0 -136
- {kailash-0.3.2.dist-info → kailash-0.4.0.dist-info}/WHEEL +0 -0
- {kailash-0.3.2.dist-info → kailash-0.4.0.dist-info}/entry_points.txt +0 -0
- {kailash-0.3.2.dist-info → kailash-0.4.0.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.3.2.dist-info → kailash-0.4.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,525 @@
|
|
1
|
+
"""
|
2
|
+
Base abstract models for common entities.
|
3
|
+
|
4
|
+
Applications extend these models for their specific needs.
|
5
|
+
"""
|
6
|
+
|
7
|
+
import uuid
|
8
|
+
from datetime import datetime, timezone
|
9
|
+
from typing import Any, Dict, List, Optional
|
10
|
+
|
11
|
+
from sqlalchemy import (
|
12
|
+
JSON,
|
13
|
+
Boolean,
|
14
|
+
CheckConstraint,
|
15
|
+
Column,
|
16
|
+
DateTime,
|
17
|
+
)
|
18
|
+
from sqlalchemy import Enum as SQLEnum
|
19
|
+
from sqlalchemy import (
|
20
|
+
Float,
|
21
|
+
ForeignKey,
|
22
|
+
Index,
|
23
|
+
Integer,
|
24
|
+
String,
|
25
|
+
Text,
|
26
|
+
UniqueConstraint,
|
27
|
+
func,
|
28
|
+
)
|
29
|
+
from sqlalchemy.ext.declarative import declared_attr
|
30
|
+
from sqlalchemy.orm import relationship, validates
|
31
|
+
|
32
|
+
from .base import BaseMixin, ComplianceMixin, EnterpriseBaseMixin
|
33
|
+
from .enums import (
|
34
|
+
ComplianceFramework,
|
35
|
+
ExecutionStatus,
|
36
|
+
NodeType,
|
37
|
+
SecurityEventType,
|
38
|
+
TemplateCategory,
|
39
|
+
WorkflowStatus,
|
40
|
+
)
|
41
|
+
from .models import Base
|
42
|
+
|
43
|
+
|
44
|
+
class BaseWorkflowModel(Base, EnterpriseBaseMixin):
|
45
|
+
"""Base workflow model with enterprise features."""
|
46
|
+
|
47
|
+
__abstract__ = True
|
48
|
+
|
49
|
+
# Core fields
|
50
|
+
workflow_id = Column(
|
51
|
+
String(255), primary_key=True, default=lambda: f"workflow_{uuid.uuid4().hex}"
|
52
|
+
)
|
53
|
+
name = Column(String(500), nullable=False)
|
54
|
+
description = Column(Text, default="")
|
55
|
+
status = Column(
|
56
|
+
SQLEnum(WorkflowStatus), nullable=False, default=WorkflowStatus.DRAFT
|
57
|
+
)
|
58
|
+
|
59
|
+
# Workflow definition
|
60
|
+
nodes = Column(JSON, default=list)
|
61
|
+
connections = Column(JSON, default=list)
|
62
|
+
workflow_metadata = Column(JSON, default=dict)
|
63
|
+
|
64
|
+
# Session tracking for middleware
|
65
|
+
session_id = Column(String(255), index=True)
|
66
|
+
owner_id = Column(String(255), index=True)
|
67
|
+
|
68
|
+
# Performance optimization
|
69
|
+
estimated_runtime_seconds = Column(Integer)
|
70
|
+
resource_requirements = Column(JSON, default=dict)
|
71
|
+
optimization_hints = Column(JSON, default=dict)
|
72
|
+
|
73
|
+
@declared_attr
|
74
|
+
def __table_args__(cls):
|
75
|
+
return (
|
76
|
+
Index(f"idx_{cls.__tablename__}_tenant_status", "tenant_id", "status"),
|
77
|
+
Index(f"idx_{cls.__tablename__}_owner_tenant", "owner_id", "tenant_id"),
|
78
|
+
Index(f"idx_{cls.__tablename__}_session", "session_id"),
|
79
|
+
Index(f"idx_{cls.__tablename__}_created", "created_at"),
|
80
|
+
CheckConstraint(
|
81
|
+
"version > 0", name=f"check_{cls.__tablename__}_version_positive"
|
82
|
+
),
|
83
|
+
UniqueConstraint(
|
84
|
+
"tenant_id",
|
85
|
+
"name",
|
86
|
+
"version",
|
87
|
+
name=f"uq_{cls.__tablename__}_tenant_name_version",
|
88
|
+
),
|
89
|
+
)
|
90
|
+
|
91
|
+
@validates("name")
|
92
|
+
def validate_name(self, key, name):
|
93
|
+
"""Validate workflow name."""
|
94
|
+
if not name or len(name.strip()) < 3:
|
95
|
+
raise ValueError("Workflow name must be at least 3 characters")
|
96
|
+
if len(name) > 500:
|
97
|
+
raise ValueError("Workflow name must be less than 500 characters")
|
98
|
+
return name.strip()
|
99
|
+
|
100
|
+
def to_dict(self) -> Dict[str, Any]:
|
101
|
+
"""Convert to dictionary for API responses."""
|
102
|
+
return {
|
103
|
+
"workflow_id": self.workflow_id,
|
104
|
+
"name": self.name,
|
105
|
+
"description": self.description,
|
106
|
+
"status": self.status.value if self.status else None,
|
107
|
+
"version": self.version,
|
108
|
+
"tenant_id": self.tenant_id,
|
109
|
+
"owner_id": self.owner_id,
|
110
|
+
"session_id": self.session_id,
|
111
|
+
"nodes": self.nodes or [],
|
112
|
+
"connections": self.connections or [],
|
113
|
+
"metadata": self.workflow_metadata or {},
|
114
|
+
"security_classification": self.security_classification,
|
115
|
+
"compliance_requirements": self.compliance_requirements or [],
|
116
|
+
"created_at": self.created_at.isoformat() if self.created_at else None,
|
117
|
+
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
|
118
|
+
}
|
119
|
+
|
120
|
+
|
121
|
+
class BaseExecutionModel(Base, BaseMixin):
|
122
|
+
"""Base execution model with progress tracking."""
|
123
|
+
|
124
|
+
__abstract__ = True
|
125
|
+
|
126
|
+
# Core fields
|
127
|
+
execution_id = Column(
|
128
|
+
String(255), primary_key=True, default=lambda: f"exec_{uuid.uuid4().hex}"
|
129
|
+
)
|
130
|
+
workflow_id = Column(String(255), nullable=False, index=True)
|
131
|
+
status = Column(
|
132
|
+
SQLEnum(ExecutionStatus), nullable=False, default=ExecutionStatus.PENDING
|
133
|
+
)
|
134
|
+
|
135
|
+
# Progress tracking
|
136
|
+
total_nodes = Column(Integer, default=0)
|
137
|
+
completed_nodes = Column(Integer, default=0)
|
138
|
+
failed_nodes = Column(Integer, default=0)
|
139
|
+
current_node = Column(String(255))
|
140
|
+
progress_percentage = Column(Float, default=0.0)
|
141
|
+
|
142
|
+
# Data
|
143
|
+
inputs = Column(JSON, default=dict)
|
144
|
+
outputs = Column(JSON, default=dict)
|
145
|
+
intermediate_results = Column(JSON, default=dict)
|
146
|
+
|
147
|
+
# Error handling
|
148
|
+
error_message = Column(Text)
|
149
|
+
error_details = Column(JSON, default=dict)
|
150
|
+
retry_count = Column(Integer, default=0)
|
151
|
+
|
152
|
+
# Performance
|
153
|
+
started_at = Column(DateTime(timezone=True))
|
154
|
+
completed_at = Column(DateTime(timezone=True))
|
155
|
+
runtime_seconds = Column(Float)
|
156
|
+
resource_usage = Column(JSON, default=dict)
|
157
|
+
|
158
|
+
# Context
|
159
|
+
started_by = Column(String(255))
|
160
|
+
execution_context = Column(JSON, default=dict)
|
161
|
+
|
162
|
+
# Logging
|
163
|
+
logs = Column(JSON, default=list)
|
164
|
+
debug_info = Column(JSON, default=dict)
|
165
|
+
|
166
|
+
@declared_attr
|
167
|
+
def __table_args__(cls):
|
168
|
+
return (
|
169
|
+
Index(f"idx_{cls.__tablename__}_workflow", "workflow_id"),
|
170
|
+
Index(f"idx_{cls.__tablename__}_tenant_status", "tenant_id", "status"),
|
171
|
+
Index(f"idx_{cls.__tablename__}_started_by", "started_by"),
|
172
|
+
Index(f"idx_{cls.__tablename__}_started_at", "started_at"),
|
173
|
+
CheckConstraint(
|
174
|
+
"progress_percentage >= 0 AND progress_percentage <= 100",
|
175
|
+
name=f"check_{cls.__tablename__}_progress_range",
|
176
|
+
),
|
177
|
+
CheckConstraint(
|
178
|
+
"completed_nodes >= 0",
|
179
|
+
name=f"check_{cls.__tablename__}_completed_positive",
|
180
|
+
),
|
181
|
+
CheckConstraint(
|
182
|
+
"failed_nodes >= 0", name=f"check_{cls.__tablename__}_failed_positive"
|
183
|
+
),
|
184
|
+
)
|
185
|
+
|
186
|
+
def start(self, started_by: str = None):
|
187
|
+
"""Mark execution as started."""
|
188
|
+
self.status = ExecutionStatus.RUNNING
|
189
|
+
self.started_at = datetime.now(timezone.utc)
|
190
|
+
self.started_by = started_by
|
191
|
+
|
192
|
+
def complete(self, outputs: Dict[str, Any] = None):
|
193
|
+
"""Mark execution as completed."""
|
194
|
+
self.status = ExecutionStatus.COMPLETED
|
195
|
+
self.completed_at = datetime.now(timezone.utc)
|
196
|
+
if outputs:
|
197
|
+
self.outputs = outputs
|
198
|
+
if self.started_at:
|
199
|
+
self.runtime_seconds = (self.completed_at - self.started_at).total_seconds()
|
200
|
+
self.progress_percentage = 100.0
|
201
|
+
|
202
|
+
def fail(self, error_message: str, error_details: Dict[str, Any] = None):
|
203
|
+
"""Mark execution as failed."""
|
204
|
+
self.status = ExecutionStatus.FAILED
|
205
|
+
self.completed_at = datetime.now(timezone.utc)
|
206
|
+
self.error_message = error_message
|
207
|
+
if error_details:
|
208
|
+
self.error_details = error_details
|
209
|
+
if self.started_at:
|
210
|
+
self.runtime_seconds = (self.completed_at - self.started_at).total_seconds()
|
211
|
+
|
212
|
+
def to_dict(self) -> Dict[str, Any]:
|
213
|
+
"""Convert to dictionary for API responses."""
|
214
|
+
return {
|
215
|
+
"execution_id": self.execution_id,
|
216
|
+
"workflow_id": self.workflow_id,
|
217
|
+
"status": self.status.value if self.status else None,
|
218
|
+
"progress": {
|
219
|
+
"total_nodes": self.total_nodes,
|
220
|
+
"completed_nodes": self.completed_nodes,
|
221
|
+
"failed_nodes": self.failed_nodes,
|
222
|
+
"current_node": self.current_node,
|
223
|
+
"progress_percentage": self.progress_percentage,
|
224
|
+
},
|
225
|
+
"inputs": self.inputs or {},
|
226
|
+
"outputs": self.outputs or {},
|
227
|
+
"error_message": self.error_message,
|
228
|
+
"error_details": self.error_details or {},
|
229
|
+
"started_at": self.started_at.isoformat() if self.started_at else None,
|
230
|
+
"completed_at": (
|
231
|
+
self.completed_at.isoformat() if self.completed_at else None
|
232
|
+
),
|
233
|
+
"runtime_seconds": self.runtime_seconds,
|
234
|
+
"tenant_id": self.tenant_id,
|
235
|
+
"started_by": self.started_by,
|
236
|
+
"created_at": self.created_at.isoformat() if self.created_at else None,
|
237
|
+
}
|
238
|
+
|
239
|
+
|
240
|
+
class BaseTemplateModel(Base, BaseMixin):
|
241
|
+
"""Base template model with analytics."""
|
242
|
+
|
243
|
+
__abstract__ = True
|
244
|
+
|
245
|
+
# Core fields
|
246
|
+
template_id = Column(
|
247
|
+
String(255), primary_key=True, default=lambda: f"template_{uuid.uuid4().hex}"
|
248
|
+
)
|
249
|
+
name = Column(String(500), nullable=False)
|
250
|
+
description = Column(Text, default="")
|
251
|
+
category = Column(SQLEnum(TemplateCategory), nullable=False)
|
252
|
+
|
253
|
+
# Organization
|
254
|
+
tags = Column(JSON, default=list)
|
255
|
+
industry = Column(String(100))
|
256
|
+
difficulty_level = Column(String(50), default="intermediate")
|
257
|
+
|
258
|
+
# Template definition
|
259
|
+
workflow_definition = Column(JSON, nullable=False)
|
260
|
+
preview_image = Column(String(1000))
|
261
|
+
documentation = Column(Text)
|
262
|
+
|
263
|
+
# Analytics
|
264
|
+
usage_count = Column(Integer, default=0)
|
265
|
+
rating_average = Column(Float, default=0.0)
|
266
|
+
rating_count = Column(Integer, default=0)
|
267
|
+
last_used = Column(DateTime(timezone=True))
|
268
|
+
|
269
|
+
# Enterprise
|
270
|
+
is_certified = Column(Boolean, default=False)
|
271
|
+
certification_level = Column(String(50))
|
272
|
+
compliance_frameworks = Column(JSON, default=list)
|
273
|
+
security_requirements = Column(JSON, default=dict)
|
274
|
+
|
275
|
+
# Visibility
|
276
|
+
is_public = Column(Boolean, default=False)
|
277
|
+
|
278
|
+
@declared_attr
|
279
|
+
def __table_args__(cls):
|
280
|
+
return (
|
281
|
+
Index(f"idx_{cls.__tablename__}_category", "category"),
|
282
|
+
Index(f"idx_{cls.__tablename__}_tenant_public", "tenant_id", "is_public"),
|
283
|
+
Index(f"idx_{cls.__tablename__}_certified", "is_certified"),
|
284
|
+
Index(f"idx_{cls.__tablename__}_usage", "usage_count"),
|
285
|
+
CheckConstraint(
|
286
|
+
"usage_count >= 0", name=f"check_{cls.__tablename__}_usage_positive"
|
287
|
+
),
|
288
|
+
CheckConstraint(
|
289
|
+
"rating_average >= 0 AND rating_average <= 5",
|
290
|
+
name=f"check_{cls.__tablename__}_rating_range",
|
291
|
+
),
|
292
|
+
)
|
293
|
+
|
294
|
+
def to_dict(self) -> Dict[str, Any]:
|
295
|
+
"""Convert to dictionary for API responses."""
|
296
|
+
return {
|
297
|
+
"template_id": self.template_id,
|
298
|
+
"name": self.name,
|
299
|
+
"description": self.description,
|
300
|
+
"category": self.category.value if self.category else None,
|
301
|
+
"tags": self.tags or [],
|
302
|
+
"difficulty_level": self.difficulty_level,
|
303
|
+
"workflow_definition": self.workflow_definition or {},
|
304
|
+
"usage_count": self.usage_count,
|
305
|
+
"rating_average": self.rating_average,
|
306
|
+
"is_certified": self.is_certified,
|
307
|
+
"tenant_id": self.tenant_id,
|
308
|
+
"is_public": self.is_public,
|
309
|
+
"created_at": self.created_at.isoformat() if self.created_at else None,
|
310
|
+
}
|
311
|
+
|
312
|
+
|
313
|
+
class BaseSecurityEventModel(Base, BaseMixin, ComplianceMixin):
|
314
|
+
"""Base security event model for monitoring."""
|
315
|
+
|
316
|
+
__abstract__ = True
|
317
|
+
|
318
|
+
# Core fields
|
319
|
+
event_id = Column(
|
320
|
+
String(255), primary_key=True, default=lambda: f"sec_event_{uuid.uuid4().hex}"
|
321
|
+
)
|
322
|
+
event_type = Column(SQLEnum(SecurityEventType), nullable=False)
|
323
|
+
severity = Column(String(50), nullable=False, default="info")
|
324
|
+
description = Column(Text)
|
325
|
+
|
326
|
+
# Associated resources
|
327
|
+
workflow_id = Column(String(255), index=True)
|
328
|
+
execution_id = Column(String(255), index=True)
|
329
|
+
resource_type = Column(String(100))
|
330
|
+
resource_id = Column(String(255))
|
331
|
+
|
332
|
+
# User context
|
333
|
+
user_id = Column(String(255), index=True)
|
334
|
+
session_id = Column(String(255), index=True)
|
335
|
+
ip_address = Column(String(45))
|
336
|
+
user_agent = Column(Text)
|
337
|
+
geographic_location = Column(JSON)
|
338
|
+
|
339
|
+
# Event data
|
340
|
+
event_data = Column(JSON, default=dict)
|
341
|
+
threat_indicators = Column(JSON, default=dict)
|
342
|
+
response_actions = Column(JSON, default=list)
|
343
|
+
|
344
|
+
# Detection
|
345
|
+
detection_method = Column(String(100))
|
346
|
+
confidence_score = Column(Float)
|
347
|
+
false_positive = Column(Boolean, default=False)
|
348
|
+
|
349
|
+
# Timing
|
350
|
+
occurred_at = Column(DateTime(timezone=True), nullable=False, default=func.now())
|
351
|
+
detected_at = Column(DateTime(timezone=True), nullable=False, default=func.now())
|
352
|
+
resolved_at = Column(DateTime(timezone=True))
|
353
|
+
|
354
|
+
@declared_attr
|
355
|
+
def __table_args__(cls):
|
356
|
+
return (
|
357
|
+
Index(f"idx_{cls.__tablename__}_type_severity", "event_type", "severity"),
|
358
|
+
Index(
|
359
|
+
f"idx_{cls.__tablename__}_tenant_occurred", "tenant_id", "occurred_at"
|
360
|
+
),
|
361
|
+
Index(f"idx_{cls.__tablename__}_user", "user_id"),
|
362
|
+
Index(f"idx_{cls.__tablename__}_session", "session_id"),
|
363
|
+
CheckConstraint(
|
364
|
+
"confidence_score >= 0 AND confidence_score <= 1",
|
365
|
+
name=f"check_{cls.__tablename__}_confidence_range",
|
366
|
+
),
|
367
|
+
)
|
368
|
+
|
369
|
+
def to_dict(self) -> Dict[str, Any]:
|
370
|
+
"""Convert to dictionary for API responses."""
|
371
|
+
return {
|
372
|
+
"event_id": self.event_id,
|
373
|
+
"event_type": self.event_type.value if self.event_type else None,
|
374
|
+
"severity": self.severity,
|
375
|
+
"description": self.description,
|
376
|
+
"user_id": self.user_id,
|
377
|
+
"session_id": self.session_id,
|
378
|
+
"tenant_id": self.tenant_id,
|
379
|
+
"event_data": self.event_data or {},
|
380
|
+
"occurred_at": self.occurred_at.isoformat() if self.occurred_at else None,
|
381
|
+
}
|
382
|
+
|
383
|
+
|
384
|
+
class BaseAuditLogModel(Base, BaseMixin, ComplianceMixin):
|
385
|
+
"""Base audit log model for compliance."""
|
386
|
+
|
387
|
+
__abstract__ = True
|
388
|
+
|
389
|
+
# Core fields
|
390
|
+
audit_id = Column(
|
391
|
+
String(255), primary_key=True, default=lambda: f"audit_{uuid.uuid4().hex}"
|
392
|
+
)
|
393
|
+
action = Column(String(100), nullable=False)
|
394
|
+
resource_type = Column(String(100), nullable=False)
|
395
|
+
resource_id = Column(String(255))
|
396
|
+
workflow_id = Column(String(255), index=True)
|
397
|
+
|
398
|
+
# User context
|
399
|
+
user_id = Column(String(255), index=True)
|
400
|
+
user_email = Column(String(255))
|
401
|
+
user_roles = Column(JSON, default=list)
|
402
|
+
session_id = Column(String(255), index=True)
|
403
|
+
|
404
|
+
# Change tracking
|
405
|
+
old_values = Column(JSON)
|
406
|
+
new_values = Column(JSON)
|
407
|
+
changes = Column(JSON)
|
408
|
+
|
409
|
+
# Request details
|
410
|
+
ip_address = Column(String(45))
|
411
|
+
user_agent = Column(Text)
|
412
|
+
request_id = Column(String(255))
|
413
|
+
api_endpoint = Column(String(500))
|
414
|
+
http_method = Column(String(10))
|
415
|
+
|
416
|
+
# Result
|
417
|
+
success = Column(Boolean, nullable=False)
|
418
|
+
error_message = Column(Text)
|
419
|
+
response_code = Column(Integer)
|
420
|
+
|
421
|
+
# Data classification
|
422
|
+
data_classification = Column(String(50))
|
423
|
+
|
424
|
+
# Timing
|
425
|
+
timestamp = Column(DateTime(timezone=True), nullable=False, default=func.now())
|
426
|
+
duration_ms = Column(Integer)
|
427
|
+
|
428
|
+
# Integrity
|
429
|
+
checksum = Column(String(512))
|
430
|
+
signature = Column(Text)
|
431
|
+
|
432
|
+
@declared_attr
|
433
|
+
def __table_args__(cls):
|
434
|
+
return (
|
435
|
+
Index(
|
436
|
+
f"idx_{cls.__tablename__}_action_resource", "action", "resource_type"
|
437
|
+
),
|
438
|
+
Index(
|
439
|
+
f"idx_{cls.__tablename__}_tenant_timestamp", "tenant_id", "timestamp"
|
440
|
+
),
|
441
|
+
Index(f"idx_{cls.__tablename__}_user_timestamp", "user_id", "timestamp"),
|
442
|
+
Index(f"idx_{cls.__tablename__}_session", "session_id"),
|
443
|
+
Index(f"idx_{cls.__tablename__}_success", "success"),
|
444
|
+
)
|
445
|
+
|
446
|
+
def to_dict(self) -> Dict[str, Any]:
|
447
|
+
"""Convert to dictionary for API responses."""
|
448
|
+
return {
|
449
|
+
"audit_id": self.audit_id,
|
450
|
+
"action": self.action,
|
451
|
+
"resource_type": self.resource_type,
|
452
|
+
"resource_id": self.resource_id,
|
453
|
+
"user_id": self.user_id,
|
454
|
+
"session_id": self.session_id,
|
455
|
+
"tenant_id": self.tenant_id,
|
456
|
+
"success": self.success,
|
457
|
+
"timestamp": self.timestamp.isoformat() if self.timestamp else None,
|
458
|
+
}
|
459
|
+
|
460
|
+
|
461
|
+
class BaseComplianceModel(Base, BaseMixin):
|
462
|
+
"""Base compliance assessment model."""
|
463
|
+
|
464
|
+
__abstract__ = True
|
465
|
+
|
466
|
+
# Core fields
|
467
|
+
assessment_id = Column(
|
468
|
+
String(255), primary_key=True, default=lambda: f"compliance_{uuid.uuid4().hex}"
|
469
|
+
)
|
470
|
+
framework = Column(SQLEnum(ComplianceFramework), nullable=False)
|
471
|
+
|
472
|
+
# Scope
|
473
|
+
resource_type = Column(String(100))
|
474
|
+
resource_id = Column(String(255))
|
475
|
+
|
476
|
+
# Results
|
477
|
+
overall_score = Column(Float, nullable=False)
|
478
|
+
violations = Column(JSON, default=list)
|
479
|
+
recommendations = Column(JSON, default=list)
|
480
|
+
evidence = Column(JSON, default=dict)
|
481
|
+
|
482
|
+
# Assessment details
|
483
|
+
assessment_date = Column(
|
484
|
+
DateTime(timezone=True), nullable=False, default=func.now()
|
485
|
+
)
|
486
|
+
assessor = Column(String(255))
|
487
|
+
assessment_method = Column(String(100))
|
488
|
+
|
489
|
+
# Remediation
|
490
|
+
remediation_plan = Column(JSON, default=dict)
|
491
|
+
remediation_deadline = Column(DateTime(timezone=True))
|
492
|
+
remediation_status = Column(String(50), default="pending")
|
493
|
+
|
494
|
+
# Approval
|
495
|
+
approved_by = Column(String(255))
|
496
|
+
approved_at = Column(DateTime(timezone=True))
|
497
|
+
certification_valid_until = Column(DateTime(timezone=True))
|
498
|
+
|
499
|
+
@declared_attr
|
500
|
+
def __table_args__(cls):
|
501
|
+
return (
|
502
|
+
Index(
|
503
|
+
f"idx_{cls.__tablename__}_framework_tenant", "framework", "tenant_id"
|
504
|
+
),
|
505
|
+
Index(f"idx_{cls.__tablename__}_assessment_date", "assessment_date"),
|
506
|
+
Index(f"idx_{cls.__tablename__}_score", "overall_score"),
|
507
|
+
CheckConstraint(
|
508
|
+
"overall_score >= 0 AND overall_score <= 100",
|
509
|
+
name=f"check_{cls.__tablename__}_score_range",
|
510
|
+
),
|
511
|
+
)
|
512
|
+
|
513
|
+
def to_dict(self) -> Dict[str, Any]:
|
514
|
+
"""Convert to dictionary for API responses."""
|
515
|
+
return {
|
516
|
+
"assessment_id": self.assessment_id,
|
517
|
+
"framework": self.framework.value if self.framework else None,
|
518
|
+
"tenant_id": self.tenant_id,
|
519
|
+
"overall_score": self.overall_score,
|
520
|
+
"violations": self.violations or [],
|
521
|
+
"assessment_date": (
|
522
|
+
self.assessment_date.isoformat() if self.assessment_date else None
|
523
|
+
),
|
524
|
+
"remediation_status": self.remediation_status,
|
525
|
+
}
|
@@ -0,0 +1,106 @@
|
|
1
|
+
"""
|
2
|
+
Common enums for middleware database models.
|
3
|
+
|
4
|
+
These enums are used across all Kailash applications to ensure consistency.
|
5
|
+
"""
|
6
|
+
|
7
|
+
from enum import Enum
|
8
|
+
|
9
|
+
|
10
|
+
class WorkflowStatus(Enum):
|
11
|
+
"""Workflow lifecycle status."""
|
12
|
+
|
13
|
+
DRAFT = "draft"
|
14
|
+
ACTIVE = "active"
|
15
|
+
ARCHIVED = "archived"
|
16
|
+
TEMPLATE = "template"
|
17
|
+
PUBLISHED = "published"
|
18
|
+
DEPRECATED = "deprecated"
|
19
|
+
|
20
|
+
|
21
|
+
class ExecutionStatus(Enum):
|
22
|
+
"""Workflow execution status."""
|
23
|
+
|
24
|
+
PENDING = "pending"
|
25
|
+
RUNNING = "running"
|
26
|
+
COMPLETED = "completed"
|
27
|
+
FAILED = "failed"
|
28
|
+
CANCELLED = "cancelled"
|
29
|
+
PAUSED = "paused"
|
30
|
+
TIMEOUT = "timeout"
|
31
|
+
|
32
|
+
|
33
|
+
class NodeType(Enum):
|
34
|
+
"""Node category types from comprehensive catalog."""
|
35
|
+
|
36
|
+
AI_ML = "ai_ml"
|
37
|
+
DATA_PROCESSING = "data_processing"
|
38
|
+
API_INTEGRATION = "api_integration"
|
39
|
+
LOGIC_CONTROL = "logic_control"
|
40
|
+
TRANSFORM = "transform"
|
41
|
+
ADMIN_SECURITY = "admin_security"
|
42
|
+
ENTERPRISE = "enterprise"
|
43
|
+
TESTING = "testing"
|
44
|
+
CODE_EXECUTION = "code_execution"
|
45
|
+
|
46
|
+
|
47
|
+
class TemplateCategory(Enum):
|
48
|
+
"""Template categories for organization."""
|
49
|
+
|
50
|
+
BUSINESS = "business"
|
51
|
+
DATA_PROCESSING = "data_processing"
|
52
|
+
AI_ORCHESTRATION = "ai_orchestration"
|
53
|
+
API_INTEGRATION = "api_integration"
|
54
|
+
ENTERPRISE_AUTOMATION = "enterprise_automation"
|
55
|
+
QUALITY_ASSURANCE = "quality_assurance"
|
56
|
+
ADMIN_SECURITY = "admin_security"
|
57
|
+
COMPLIANCE = "compliance"
|
58
|
+
|
59
|
+
|
60
|
+
class SecurityEventType(Enum):
|
61
|
+
"""Security event types for comprehensive monitoring."""
|
62
|
+
|
63
|
+
AUTHENTICATION = "authentication"
|
64
|
+
AUTHORIZATION = "authorization"
|
65
|
+
DATA_ACCESS = "data_access"
|
66
|
+
EXPORT = "export"
|
67
|
+
THREAT_DETECTION = "threat_detection"
|
68
|
+
POLICY_VIOLATION = "policy_violation"
|
69
|
+
COMPLIANCE_CHECK = "compliance_check"
|
70
|
+
PERMISSION_CHANGE = "permission_change"
|
71
|
+
ACCOUNT_LOCK = "account_lock"
|
72
|
+
SESSION_ANOMALY = "session_anomaly"
|
73
|
+
|
74
|
+
|
75
|
+
class ComplianceFramework(Enum):
|
76
|
+
"""Supported compliance frameworks."""
|
77
|
+
|
78
|
+
GDPR = "gdpr"
|
79
|
+
SOC2 = "soc2"
|
80
|
+
ISO27001 = "iso27001"
|
81
|
+
HIPAA = "hipaa"
|
82
|
+
PCI_DSS = "pci_dss"
|
83
|
+
|
84
|
+
|
85
|
+
class SecurityClassification(Enum):
|
86
|
+
"""Data security classification levels."""
|
87
|
+
|
88
|
+
PUBLIC = "public"
|
89
|
+
INTERNAL = "internal"
|
90
|
+
CONFIDENTIAL = "confidential"
|
91
|
+
RESTRICTED = "restricted"
|
92
|
+
|
93
|
+
|
94
|
+
class AuditAction(Enum):
|
95
|
+
"""Audit log action types."""
|
96
|
+
|
97
|
+
CREATE = "create"
|
98
|
+
READ = "read"
|
99
|
+
UPDATE = "update"
|
100
|
+
DELETE = "delete"
|
101
|
+
EXECUTE = "execute"
|
102
|
+
EXPORT = "export"
|
103
|
+
IMPORT = "import"
|
104
|
+
SHARE = "share"
|
105
|
+
ARCHIVE = "archive"
|
106
|
+
RESTORE = "restore"
|