kailash 0.3.2__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +33 -1
- kailash/access_control/__init__.py +129 -0
- kailash/access_control/managers.py +461 -0
- kailash/access_control/rule_evaluators.py +467 -0
- kailash/access_control_abac.py +825 -0
- kailash/config/__init__.py +27 -0
- kailash/config/database_config.py +359 -0
- kailash/database/__init__.py +28 -0
- kailash/database/execution_pipeline.py +499 -0
- kailash/middleware/__init__.py +306 -0
- kailash/middleware/auth/__init__.py +33 -0
- kailash/middleware/auth/access_control.py +436 -0
- kailash/middleware/auth/auth_manager.py +422 -0
- kailash/middleware/auth/jwt_auth.py +477 -0
- kailash/middleware/auth/kailash_jwt_auth.py +616 -0
- kailash/middleware/communication/__init__.py +37 -0
- kailash/middleware/communication/ai_chat.py +989 -0
- kailash/middleware/communication/api_gateway.py +802 -0
- kailash/middleware/communication/events.py +470 -0
- kailash/middleware/communication/realtime.py +710 -0
- kailash/middleware/core/__init__.py +21 -0
- kailash/middleware/core/agent_ui.py +890 -0
- kailash/middleware/core/schema.py +643 -0
- kailash/middleware/core/workflows.py +396 -0
- kailash/middleware/database/__init__.py +63 -0
- kailash/middleware/database/base.py +113 -0
- kailash/middleware/database/base_models.py +525 -0
- kailash/middleware/database/enums.py +106 -0
- kailash/middleware/database/migrations.py +12 -0
- kailash/{api/database.py → middleware/database/models.py} +183 -291
- kailash/middleware/database/repositories.py +685 -0
- kailash/middleware/database/session_manager.py +19 -0
- kailash/middleware/mcp/__init__.py +38 -0
- kailash/middleware/mcp/client_integration.py +585 -0
- kailash/middleware/mcp/enhanced_server.py +576 -0
- kailash/nodes/__init__.py +25 -3
- kailash/nodes/admin/__init__.py +35 -0
- kailash/nodes/admin/audit_log.py +794 -0
- kailash/nodes/admin/permission_check.py +864 -0
- kailash/nodes/admin/role_management.py +823 -0
- kailash/nodes/admin/security_event.py +1519 -0
- kailash/nodes/admin/user_management.py +944 -0
- kailash/nodes/ai/a2a.py +24 -7
- kailash/nodes/ai/ai_providers.py +1 -0
- kailash/nodes/ai/embedding_generator.py +11 -11
- kailash/nodes/ai/intelligent_agent_orchestrator.py +99 -11
- kailash/nodes/ai/llm_agent.py +407 -2
- kailash/nodes/ai/self_organizing.py +85 -10
- kailash/nodes/api/auth.py +287 -6
- kailash/nodes/api/rest.py +151 -0
- kailash/nodes/auth/__init__.py +17 -0
- kailash/nodes/auth/directory_integration.py +1228 -0
- kailash/nodes/auth/enterprise_auth_provider.py +1328 -0
- kailash/nodes/auth/mfa.py +2338 -0
- kailash/nodes/auth/risk_assessment.py +872 -0
- kailash/nodes/auth/session_management.py +1093 -0
- kailash/nodes/auth/sso.py +1040 -0
- kailash/nodes/base.py +344 -13
- kailash/nodes/base_cycle_aware.py +4 -2
- kailash/nodes/base_with_acl.py +1 -1
- kailash/nodes/code/python.py +283 -10
- kailash/nodes/compliance/__init__.py +9 -0
- kailash/nodes/compliance/data_retention.py +1888 -0
- kailash/nodes/compliance/gdpr.py +2004 -0
- kailash/nodes/data/__init__.py +22 -2
- kailash/nodes/data/async_connection.py +469 -0
- kailash/nodes/data/async_sql.py +757 -0
- kailash/nodes/data/async_vector.py +598 -0
- kailash/nodes/data/readers.py +767 -0
- kailash/nodes/data/retrieval.py +360 -1
- kailash/nodes/data/sharepoint_graph.py +397 -21
- kailash/nodes/data/sql.py +94 -5
- kailash/nodes/data/streaming.py +68 -8
- kailash/nodes/data/vector_db.py +54 -4
- kailash/nodes/enterprise/__init__.py +13 -0
- kailash/nodes/enterprise/batch_processor.py +741 -0
- kailash/nodes/enterprise/data_lineage.py +497 -0
- kailash/nodes/logic/convergence.py +31 -9
- kailash/nodes/logic/operations.py +14 -3
- kailash/nodes/mixins/__init__.py +8 -0
- kailash/nodes/mixins/event_emitter.py +201 -0
- kailash/nodes/mixins/mcp.py +9 -4
- kailash/nodes/mixins/security.py +165 -0
- kailash/nodes/monitoring/__init__.py +7 -0
- kailash/nodes/monitoring/performance_benchmark.py +2497 -0
- kailash/nodes/rag/__init__.py +284 -0
- kailash/nodes/rag/advanced.py +1615 -0
- kailash/nodes/rag/agentic.py +773 -0
- kailash/nodes/rag/conversational.py +999 -0
- kailash/nodes/rag/evaluation.py +875 -0
- kailash/nodes/rag/federated.py +1188 -0
- kailash/nodes/rag/graph.py +721 -0
- kailash/nodes/rag/multimodal.py +671 -0
- kailash/nodes/rag/optimized.py +933 -0
- kailash/nodes/rag/privacy.py +1059 -0
- kailash/nodes/rag/query_processing.py +1335 -0
- kailash/nodes/rag/realtime.py +764 -0
- kailash/nodes/rag/registry.py +547 -0
- kailash/nodes/rag/router.py +837 -0
- kailash/nodes/rag/similarity.py +1854 -0
- kailash/nodes/rag/strategies.py +566 -0
- kailash/nodes/rag/workflows.py +575 -0
- kailash/nodes/security/__init__.py +19 -0
- kailash/nodes/security/abac_evaluator.py +1411 -0
- kailash/nodes/security/audit_log.py +91 -0
- kailash/nodes/security/behavior_analysis.py +1893 -0
- kailash/nodes/security/credential_manager.py +401 -0
- kailash/nodes/security/rotating_credentials.py +760 -0
- kailash/nodes/security/security_event.py +132 -0
- kailash/nodes/security/threat_detection.py +1103 -0
- kailash/nodes/testing/__init__.py +9 -0
- kailash/nodes/testing/credential_testing.py +499 -0
- kailash/nodes/transform/__init__.py +10 -2
- kailash/nodes/transform/chunkers.py +592 -1
- kailash/nodes/transform/processors.py +484 -14
- kailash/nodes/validation.py +321 -0
- kailash/runtime/access_controlled.py +1 -1
- kailash/runtime/async_local.py +41 -7
- kailash/runtime/docker.py +1 -1
- kailash/runtime/local.py +474 -55
- kailash/runtime/parallel.py +1 -1
- kailash/runtime/parallel_cyclic.py +1 -1
- kailash/runtime/testing.py +210 -2
- kailash/utils/migrations/__init__.py +25 -0
- kailash/utils/migrations/generator.py +433 -0
- kailash/utils/migrations/models.py +231 -0
- kailash/utils/migrations/runner.py +489 -0
- kailash/utils/secure_logging.py +342 -0
- kailash/workflow/__init__.py +16 -0
- kailash/workflow/cyclic_runner.py +3 -4
- kailash/workflow/graph.py +70 -2
- kailash/workflow/resilience.py +249 -0
- kailash/workflow/templates.py +726 -0
- {kailash-0.3.2.dist-info → kailash-0.4.0.dist-info}/METADATA +253 -20
- kailash-0.4.0.dist-info/RECORD +223 -0
- kailash/api/__init__.py +0 -17
- kailash/api/__main__.py +0 -6
- kailash/api/studio_secure.py +0 -893
- kailash/mcp/__main__.py +0 -13
- kailash/mcp/server_new.py +0 -336
- kailash/mcp/servers/__init__.py +0 -12
- kailash-0.3.2.dist-info/RECORD +0 -136
- {kailash-0.3.2.dist-info → kailash-0.4.0.dist-info}/WHEEL +0 -0
- {kailash-0.3.2.dist-info → kailash-0.4.0.dist-info}/entry_points.txt +0 -0
- {kailash-0.3.2.dist-info → kailash-0.4.0.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.3.2.dist-info → kailash-0.4.0.dist-info}/top_level.txt +0 -0
kailash/runtime/testing.py
CHANGED
@@ -127,6 +127,214 @@ class TestDataGenerator:
|
|
127
127
|
return "\n".join(text_lines)
|
128
128
|
|
129
129
|
|
130
|
+
class CredentialMockData:
|
131
|
+
"""Generate mock credential data for testing authentication flows."""
|
132
|
+
|
133
|
+
@staticmethod
|
134
|
+
def generate_oauth2_config(provider: str = "generic") -> dict[str, Any]:
|
135
|
+
"""Generate OAuth2 configuration for testing."""
|
136
|
+
configs = {
|
137
|
+
"generic": {
|
138
|
+
"token_url": "https://auth.example.com/oauth/token",
|
139
|
+
"client_id": "test_client_id_123",
|
140
|
+
"client_secret": "test_client_secret_456",
|
141
|
+
"scope": "read write",
|
142
|
+
"grant_type": "client_credentials",
|
143
|
+
},
|
144
|
+
"github": {
|
145
|
+
"token_url": "https://github.com/login/oauth/access_token",
|
146
|
+
"client_id": "test_github_client",
|
147
|
+
"client_secret": "test_github_secret",
|
148
|
+
"scope": "repo user",
|
149
|
+
"grant_type": "authorization_code",
|
150
|
+
},
|
151
|
+
"google": {
|
152
|
+
"token_url": "https://oauth2.googleapis.com/token",
|
153
|
+
"client_id": "test_google_client.apps.googleusercontent.com",
|
154
|
+
"client_secret": "test_google_secret",
|
155
|
+
"scope": "https://www.googleapis.com/auth/userinfo.email",
|
156
|
+
"grant_type": "authorization_code",
|
157
|
+
},
|
158
|
+
}
|
159
|
+
return configs.get(provider, configs["generic"])
|
160
|
+
|
161
|
+
@staticmethod
|
162
|
+
def generate_api_key_config(service: str = "generic") -> dict[str, Any]:
|
163
|
+
"""Generate API key configuration for testing."""
|
164
|
+
configs = {
|
165
|
+
"generic": {
|
166
|
+
"api_key": "sk_test_4eC39HqLyjWDarjtT1zdp7dc",
|
167
|
+
"header_name": "X-API-Key",
|
168
|
+
"prefix": None,
|
169
|
+
},
|
170
|
+
"stripe": {
|
171
|
+
"api_key": "sk_test_4eC39HqLyjWDarjtT1zdp7dc",
|
172
|
+
"header_name": "Authorization",
|
173
|
+
"prefix": "Bearer",
|
174
|
+
},
|
175
|
+
"openai": {
|
176
|
+
"api_key": "sk-test-1234567890abcdef",
|
177
|
+
"header_name": "Authorization",
|
178
|
+
"prefix": "Bearer",
|
179
|
+
},
|
180
|
+
}
|
181
|
+
return configs.get(service, configs["generic"])
|
182
|
+
|
183
|
+
@staticmethod
|
184
|
+
def generate_jwt_claims(user_type: str = "user") -> dict[str, Any]:
|
185
|
+
"""Generate JWT claims for testing."""
|
186
|
+
import time
|
187
|
+
|
188
|
+
now = int(time.time())
|
189
|
+
claims = {
|
190
|
+
"user": {
|
191
|
+
"sub": "1234567890",
|
192
|
+
"name": "Test User",
|
193
|
+
"email": "test@example.com",
|
194
|
+
"iat": now,
|
195
|
+
"exp": now + 3600,
|
196
|
+
"iss": "test_issuer",
|
197
|
+
"aud": "test_audience",
|
198
|
+
"roles": ["user"],
|
199
|
+
},
|
200
|
+
"admin": {
|
201
|
+
"sub": "admin_123",
|
202
|
+
"name": "Admin User",
|
203
|
+
"email": "admin@example.com",
|
204
|
+
"iat": now,
|
205
|
+
"exp": now + 3600,
|
206
|
+
"iss": "test_issuer",
|
207
|
+
"aud": "test_audience",
|
208
|
+
"roles": ["admin", "user"],
|
209
|
+
"permissions": ["read", "write", "delete"],
|
210
|
+
},
|
211
|
+
"service": {
|
212
|
+
"sub": "service_account_456",
|
213
|
+
"name": "Service Account",
|
214
|
+
"iat": now,
|
215
|
+
"exp": now + 86400, # 24 hours
|
216
|
+
"iss": "test_issuer",
|
217
|
+
"aud": "test_audience",
|
218
|
+
"scope": "api:read api:write",
|
219
|
+
},
|
220
|
+
}
|
221
|
+
return claims.get(user_type, claims["user"])
|
222
|
+
|
223
|
+
|
224
|
+
class SecurityTestHelper:
|
225
|
+
"""Helper class for testing security and authentication flows."""
|
226
|
+
|
227
|
+
def __init__(self):
|
228
|
+
"""Initialize security test helper."""
|
229
|
+
self.credential_mock = CredentialMockData()
|
230
|
+
|
231
|
+
def create_auth_test_workflow(self, auth_type: str = "oauth2") -> Workflow:
|
232
|
+
"""Create a workflow for testing authentication."""
|
233
|
+
from kailash.nodes.api.auth import APIKeyNode, BasicAuthNode, OAuth2Node
|
234
|
+
from kailash.nodes.api.http import HTTPRequestNode
|
235
|
+
from kailash.nodes.testing import CredentialTestingNode
|
236
|
+
|
237
|
+
workflow = Workflow(
|
238
|
+
workflow_id=f"test_{auth_type}_auth", name=f"Test {auth_type} Auth"
|
239
|
+
)
|
240
|
+
|
241
|
+
if auth_type == "oauth2":
|
242
|
+
# Add OAuth2 testing nodes
|
243
|
+
workflow.add_node(
|
244
|
+
"credential_test",
|
245
|
+
CredentialTestingNode(),
|
246
|
+
credential_type="oauth2",
|
247
|
+
scenario="success",
|
248
|
+
)
|
249
|
+
workflow.add_node("oauth", OAuth2Node())
|
250
|
+
workflow.add_node("http", HTTPRequestNode())
|
251
|
+
|
252
|
+
# Connect nodes
|
253
|
+
workflow.connect("credential_test", "oauth", {"credentials": "mock_data"})
|
254
|
+
workflow.connect("oauth", "http", {"headers": "headers"})
|
255
|
+
|
256
|
+
elif auth_type == "api_key":
|
257
|
+
# Add API key testing nodes
|
258
|
+
workflow.add_node(
|
259
|
+
"credential_test",
|
260
|
+
CredentialTestingNode(),
|
261
|
+
credential_type="api_key",
|
262
|
+
scenario="success",
|
263
|
+
)
|
264
|
+
workflow.add_node("api_key", APIKeyNode())
|
265
|
+
workflow.add_node("http", HTTPRequestNode())
|
266
|
+
|
267
|
+
# Connect nodes
|
268
|
+
workflow.connect(
|
269
|
+
"credential_test", "api_key", {"credentials.api_key": "api_key"}
|
270
|
+
)
|
271
|
+
workflow.connect("api_key", "http", {"headers": "headers"})
|
272
|
+
|
273
|
+
elif auth_type == "basic":
|
274
|
+
# Add Basic auth testing nodes
|
275
|
+
workflow.add_node(
|
276
|
+
"credential_test",
|
277
|
+
CredentialTestingNode(),
|
278
|
+
credential_type="basic",
|
279
|
+
scenario="success",
|
280
|
+
mock_data={"username": "test_user", "password": "test_pass"},
|
281
|
+
)
|
282
|
+
workflow.add_node("basic", BasicAuthNode())
|
283
|
+
workflow.add_node("http", HTTPRequestNode())
|
284
|
+
|
285
|
+
# Connect nodes
|
286
|
+
workflow.connect(
|
287
|
+
"credential_test",
|
288
|
+
"basic",
|
289
|
+
{
|
290
|
+
"credentials.username": "username",
|
291
|
+
"credentials.password": "password",
|
292
|
+
},
|
293
|
+
)
|
294
|
+
workflow.connect("basic", "http", {"headers": "headers"})
|
295
|
+
|
296
|
+
return workflow
|
297
|
+
|
298
|
+
def test_credential_scenarios(
|
299
|
+
self, credential_type: str, scenarios: list[str] = None
|
300
|
+
) -> dict[str, Any]:
|
301
|
+
"""Test multiple credential scenarios and return results."""
|
302
|
+
from kailash.nodes.testing import CredentialTestingNode
|
303
|
+
|
304
|
+
if scenarios is None:
|
305
|
+
scenarios = ["success", "expired", "invalid", "rate_limit"]
|
306
|
+
|
307
|
+
results = {}
|
308
|
+
tester = CredentialTestingNode()
|
309
|
+
|
310
|
+
for scenario in scenarios:
|
311
|
+
try:
|
312
|
+
result = tester.run(
|
313
|
+
credential_type=credential_type,
|
314
|
+
scenario=scenario,
|
315
|
+
mock_data=(
|
316
|
+
getattr(
|
317
|
+
self.credential_mock, f"generate_{credential_type}_config"
|
318
|
+
)()
|
319
|
+
if hasattr(
|
320
|
+
self.credential_mock, f"generate_{credential_type}_config"
|
321
|
+
)
|
322
|
+
else {}
|
323
|
+
),
|
324
|
+
)
|
325
|
+
results[scenario] = {
|
326
|
+
"success": result.get("valid", False),
|
327
|
+
"result": result,
|
328
|
+
}
|
329
|
+
except Exception as e:
|
330
|
+
results[scenario] = {
|
331
|
+
"success": False,
|
332
|
+
"error": str(e),
|
333
|
+
}
|
334
|
+
|
335
|
+
return results
|
336
|
+
|
337
|
+
|
130
338
|
class WorkflowTestHelper:
|
131
339
|
"""Helper class for testing workflows."""
|
132
340
|
|
@@ -137,7 +345,7 @@ class WorkflowTestHelper:
|
|
137
345
|
|
138
346
|
def create_test_workflow(self, name: str = "test_workflow") -> Workflow:
|
139
347
|
"""Create a simple test workflow."""
|
140
|
-
workflow = Workflow(name=name)
|
348
|
+
workflow = Workflow(workflow_id=name, name=f"Test Workflow: {name}")
|
141
349
|
|
142
350
|
# Add some mock nodes
|
143
351
|
workflow.add_node("input", MockNode(), return_value={"data": [1, 2, 3]})
|
@@ -229,7 +437,7 @@ class NodeTestHelper:
|
|
229
437
|
"""Test node execution with given inputs."""
|
230
438
|
if should_fail:
|
231
439
|
try:
|
232
|
-
result = node.
|
440
|
+
result = node.execute(**inputs)
|
233
441
|
assert False, "Node execution should have failed but didn't"
|
234
442
|
except (NodeValidationError, WorkflowExecutionError):
|
235
443
|
return {}
|
@@ -0,0 +1,25 @@
|
|
1
|
+
"""Database migration framework for Kailash SDK.
|
2
|
+
|
3
|
+
This module provides a Django-inspired but async-first migration system
|
4
|
+
for managing database schema changes across different database backends.
|
5
|
+
|
6
|
+
Key Features:
|
7
|
+
- Async-first design for non-blocking migrations
|
8
|
+
- Support for PostgreSQL, MySQL, and SQLite
|
9
|
+
- Forward and backward migrations
|
10
|
+
- Dependency management between migrations
|
11
|
+
- Dry-run capability
|
12
|
+
- Migration history tracking
|
13
|
+
- Schema versioning
|
14
|
+
"""
|
15
|
+
|
16
|
+
from kailash.utils.migrations.generator import MigrationGenerator
|
17
|
+
from kailash.utils.migrations.models import Migration, MigrationHistory
|
18
|
+
from kailash.utils.migrations.runner import MigrationRunner
|
19
|
+
|
20
|
+
__all__ = [
|
21
|
+
"Migration",
|
22
|
+
"MigrationHistory",
|
23
|
+
"MigrationRunner",
|
24
|
+
"MigrationGenerator",
|
25
|
+
]
|
@@ -0,0 +1,433 @@
|
|
1
|
+
"""Migration generator for creating migration files."""
|
2
|
+
|
3
|
+
import os
|
4
|
+
import re
|
5
|
+
from datetime import datetime
|
6
|
+
from pathlib import Path
|
7
|
+
from typing import Any, Dict, List, Optional
|
8
|
+
|
9
|
+
|
10
|
+
class MigrationGenerator:
|
11
|
+
"""Generates migration files from templates.
|
12
|
+
|
13
|
+
This class helps create new migration files with proper structure,
|
14
|
+
naming conventions, and boilerplate code.
|
15
|
+
|
16
|
+
Example:
|
17
|
+
>>> generator = MigrationGenerator("./migrations")
|
18
|
+
>>> generator.create_migration(
|
19
|
+
... name="add_user_table",
|
20
|
+
... description="Create user table with email and password"
|
21
|
+
... )
|
22
|
+
Created migration: ./migrations/001_add_user_table.py
|
23
|
+
"""
|
24
|
+
|
25
|
+
def __init__(self, migrations_dir: str = "./migrations"):
|
26
|
+
"""Initialize generator.
|
27
|
+
|
28
|
+
Args:
|
29
|
+
migrations_dir: Directory to store migration files
|
30
|
+
"""
|
31
|
+
self.migrations_dir = Path(migrations_dir)
|
32
|
+
self.migrations_dir.mkdir(parents=True, exist_ok=True)
|
33
|
+
|
34
|
+
def get_next_migration_number(self) -> str:
|
35
|
+
"""Get next migration number based on existing files."""
|
36
|
+
existing_numbers = []
|
37
|
+
|
38
|
+
for file in self.migrations_dir.glob("*.py"):
|
39
|
+
match = re.match(r"^(\d+)_", file.name)
|
40
|
+
if match:
|
41
|
+
existing_numbers.append(int(match.group(1)))
|
42
|
+
|
43
|
+
next_number = max(existing_numbers, default=0) + 1
|
44
|
+
return f"{next_number:03d}"
|
45
|
+
|
46
|
+
def create_migration(
|
47
|
+
self,
|
48
|
+
name: str,
|
49
|
+
description: str,
|
50
|
+
migration_type: str = "schema",
|
51
|
+
dependencies: Optional[List[str]] = None,
|
52
|
+
) -> str:
|
53
|
+
"""Create a new migration file.
|
54
|
+
|
55
|
+
Args:
|
56
|
+
name: Migration name (will be slugified)
|
57
|
+
description: Human-readable description
|
58
|
+
migration_type: Type of migration (schema/data)
|
59
|
+
dependencies: List of migration IDs this depends on
|
60
|
+
|
61
|
+
Returns:
|
62
|
+
Path to created migration file
|
63
|
+
"""
|
64
|
+
# Slugify name
|
65
|
+
slug = re.sub(r"[^a-z0-9]+", "_", name.lower()).strip("_")
|
66
|
+
|
67
|
+
# Get migration ID
|
68
|
+
number = self.get_next_migration_number()
|
69
|
+
migration_id = f"{number}_{slug}"
|
70
|
+
filename = f"{migration_id}.py"
|
71
|
+
filepath = self.migrations_dir / filename
|
72
|
+
|
73
|
+
# Generate content
|
74
|
+
if migration_type == "schema":
|
75
|
+
content = self._generate_schema_migration(
|
76
|
+
migration_id, description, dependencies
|
77
|
+
)
|
78
|
+
elif migration_type == "data":
|
79
|
+
content = self._generate_data_migration(
|
80
|
+
migration_id, description, dependencies
|
81
|
+
)
|
82
|
+
else:
|
83
|
+
content = self._generate_base_migration(
|
84
|
+
migration_id, description, dependencies
|
85
|
+
)
|
86
|
+
|
87
|
+
# Write file
|
88
|
+
filepath.write_text(content)
|
89
|
+
print(f"Created migration: {filepath}")
|
90
|
+
|
91
|
+
return str(filepath)
|
92
|
+
|
93
|
+
def _generate_base_migration(
|
94
|
+
self,
|
95
|
+
migration_id: str,
|
96
|
+
description: str,
|
97
|
+
dependencies: Optional[List[str]] = None,
|
98
|
+
) -> str:
|
99
|
+
"""Generate base migration template."""
|
100
|
+
deps = dependencies or []
|
101
|
+
deps_str = ", ".join(f'"{d}"' for d in deps)
|
102
|
+
|
103
|
+
return f'''"""
|
104
|
+
{description}
|
105
|
+
|
106
|
+
Generated on: {datetime.now().isoformat()}
|
107
|
+
"""
|
108
|
+
|
109
|
+
from kailash.utils.migrations import Migration
|
110
|
+
|
111
|
+
|
112
|
+
class {self._class_name(migration_id)}(Migration):
|
113
|
+
"""{description}"""
|
114
|
+
|
115
|
+
id = "{migration_id}"
|
116
|
+
description = "{description}"
|
117
|
+
dependencies = [{deps_str}]
|
118
|
+
|
119
|
+
async def forward(self, connection):
|
120
|
+
"""Apply migration forward."""
|
121
|
+
# TODO: Implement forward migration
|
122
|
+
raise NotImplementedError("Forward migration not implemented")
|
123
|
+
|
124
|
+
async def backward(self, connection):
|
125
|
+
"""Rollback migration."""
|
126
|
+
# TODO: Implement backward migration
|
127
|
+
raise NotImplementedError("Backward migration not implemented")
|
128
|
+
|
129
|
+
async def validate(self, connection):
|
130
|
+
"""Validate migration can be applied."""
|
131
|
+
# Add any validation logic here
|
132
|
+
return True
|
133
|
+
'''
|
134
|
+
|
135
|
+
def _generate_schema_migration(
|
136
|
+
self,
|
137
|
+
migration_id: str,
|
138
|
+
description: str,
|
139
|
+
dependencies: Optional[List[str]] = None,
|
140
|
+
) -> str:
|
141
|
+
"""Generate schema migration template."""
|
142
|
+
deps = dependencies or []
|
143
|
+
deps_str = ", ".join(f'"{d}"' for d in deps)
|
144
|
+
|
145
|
+
return f'''"""
|
146
|
+
{description}
|
147
|
+
|
148
|
+
Generated on: {datetime.now().isoformat()}
|
149
|
+
"""
|
150
|
+
|
151
|
+
from kailash.utils.migrations import Migration
|
152
|
+
|
153
|
+
|
154
|
+
class {self._class_name(migration_id)}(Migration):
|
155
|
+
"""{description}"""
|
156
|
+
|
157
|
+
id = "{migration_id}"
|
158
|
+
description = "{description}"
|
159
|
+
dependencies = [{deps_str}]
|
160
|
+
|
161
|
+
async def forward(self, connection):
|
162
|
+
"""Apply migration forward."""
|
163
|
+
# Example: Create table
|
164
|
+
await connection.execute("""
|
165
|
+
CREATE TABLE IF NOT EXISTS example_table (
|
166
|
+
id SERIAL PRIMARY KEY,
|
167
|
+
name VARCHAR(255) NOT NULL,
|
168
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
169
|
+
)
|
170
|
+
""")
|
171
|
+
|
172
|
+
# Example: Add index
|
173
|
+
await connection.execute("""
|
174
|
+
CREATE INDEX idx_example_name ON example_table(name)
|
175
|
+
""")
|
176
|
+
|
177
|
+
async def backward(self, connection):
|
178
|
+
"""Rollback migration."""
|
179
|
+
# Drop in reverse order
|
180
|
+
await connection.execute("DROP TABLE IF EXISTS example_table CASCADE")
|
181
|
+
'''
|
182
|
+
|
183
|
+
def _generate_data_migration(
|
184
|
+
self,
|
185
|
+
migration_id: str,
|
186
|
+
description: str,
|
187
|
+
dependencies: Optional[List[str]] = None,
|
188
|
+
) -> str:
|
189
|
+
"""Generate data migration template."""
|
190
|
+
deps = dependencies or []
|
191
|
+
deps_str = ", ".join(f'"{d}"' for d in deps)
|
192
|
+
|
193
|
+
return f'''"""
|
194
|
+
{description}
|
195
|
+
|
196
|
+
Generated on: {datetime.now().isoformat()}
|
197
|
+
"""
|
198
|
+
|
199
|
+
from kailash.utils.migrations import DataMigration
|
200
|
+
|
201
|
+
|
202
|
+
class {self._class_name(migration_id)}(DataMigration):
|
203
|
+
"""{description}"""
|
204
|
+
|
205
|
+
id = "{migration_id}"
|
206
|
+
description = "{description}"
|
207
|
+
dependencies = [{deps_str}]
|
208
|
+
batch_size = 1000
|
209
|
+
|
210
|
+
async def forward(self, connection):
|
211
|
+
"""Apply migration forward."""
|
212
|
+
# Example: Update data in batches
|
213
|
+
total_updated = 0
|
214
|
+
|
215
|
+
while True:
|
216
|
+
# Get batch of records to update
|
217
|
+
rows = await connection.fetch("""
|
218
|
+
SELECT id FROM example_table
|
219
|
+
WHERE needs_update = true
|
220
|
+
LIMIT $1
|
221
|
+
""", self.batch_size)
|
222
|
+
|
223
|
+
if not rows:
|
224
|
+
break
|
225
|
+
|
226
|
+
# Update batch
|
227
|
+
ids = [row["id"] for row in rows]
|
228
|
+
await connection.execute("""
|
229
|
+
UPDATE example_table
|
230
|
+
SET processed = true, updated_at = CURRENT_TIMESTAMP
|
231
|
+
WHERE id = ANY($1)
|
232
|
+
""", ids)
|
233
|
+
|
234
|
+
total_updated += len(rows)
|
235
|
+
print(f"Updated {{total_updated}} records...")
|
236
|
+
|
237
|
+
print(f"Migration complete. Updated {{total_updated}} total records.")
|
238
|
+
|
239
|
+
async def backward(self, connection):
|
240
|
+
"""Rollback migration."""
|
241
|
+
# Reverse the data changes
|
242
|
+
await connection.execute("""
|
243
|
+
UPDATE example_table
|
244
|
+
SET processed = false
|
245
|
+
WHERE processed = true
|
246
|
+
""")
|
247
|
+
'''
|
248
|
+
|
249
|
+
def _class_name(self, migration_id: str) -> str:
|
250
|
+
"""Convert migration ID to class name."""
|
251
|
+
# Remove number prefix and convert to CamelCase
|
252
|
+
name_part = re.sub(r"^\d+_", "", migration_id)
|
253
|
+
parts = name_part.split("_")
|
254
|
+
return "".join(part.capitalize() for part in parts)
|
255
|
+
|
256
|
+
def create_initial_migrations(self) -> List[str]:
|
257
|
+
"""Create initial system migrations."""
|
258
|
+
migrations = []
|
259
|
+
|
260
|
+
# Create users table migration
|
261
|
+
migrations.append(
|
262
|
+
self.create_migration(
|
263
|
+
name="create_users_table",
|
264
|
+
description="Create users table for authentication",
|
265
|
+
migration_type="schema",
|
266
|
+
)
|
267
|
+
)
|
268
|
+
|
269
|
+
# Create tenants table migration
|
270
|
+
migrations.append(
|
271
|
+
self.create_migration(
|
272
|
+
name="create_tenants_table",
|
273
|
+
description="Create tenants table for multi-tenancy",
|
274
|
+
migration_type="schema",
|
275
|
+
dependencies=["001_create_users_table"],
|
276
|
+
)
|
277
|
+
)
|
278
|
+
|
279
|
+
# Create workflow tracking tables
|
280
|
+
migrations.append(
|
281
|
+
self.create_migration(
|
282
|
+
name="create_workflow_tables",
|
283
|
+
description="Create workflow execution tracking tables",
|
284
|
+
migration_type="schema",
|
285
|
+
dependencies=["001_create_users_table", "002_create_tenants_table"],
|
286
|
+
)
|
287
|
+
)
|
288
|
+
|
289
|
+
return migrations
|
290
|
+
|
291
|
+
def generate_from_diff(
|
292
|
+
self,
|
293
|
+
current_schema: Dict[str, Any],
|
294
|
+
target_schema: Dict[str, Any],
|
295
|
+
name: str,
|
296
|
+
description: str,
|
297
|
+
) -> str:
|
298
|
+
"""Generate migration from schema difference.
|
299
|
+
|
300
|
+
Args:
|
301
|
+
current_schema: Current database schema
|
302
|
+
target_schema: Desired database schema
|
303
|
+
name: Migration name
|
304
|
+
description: Migration description
|
305
|
+
|
306
|
+
Returns:
|
307
|
+
Path to generated migration
|
308
|
+
"""
|
309
|
+
# Analyze differences
|
310
|
+
operations = self._analyze_schema_diff(current_schema, target_schema)
|
311
|
+
|
312
|
+
# Generate migration with operations
|
313
|
+
number = self.get_next_migration_number()
|
314
|
+
migration_id = f"{number}_{re.sub(r'[^a-z0-9]+', '_', name.lower())}"
|
315
|
+
|
316
|
+
content = self._generate_diff_migration(migration_id, description, operations)
|
317
|
+
|
318
|
+
filename = f"{migration_id}.py"
|
319
|
+
filepath = self.migrations_dir / filename
|
320
|
+
filepath.write_text(content)
|
321
|
+
|
322
|
+
return str(filepath)
|
323
|
+
|
324
|
+
def _analyze_schema_diff(
|
325
|
+
self, current: Dict[str, Any], target: Dict[str, Any]
|
326
|
+
) -> List[Dict[str, Any]]:
|
327
|
+
"""Analyze schema differences."""
|
328
|
+
operations = []
|
329
|
+
|
330
|
+
current_tables = set(current.get("tables", {}).keys())
|
331
|
+
target_tables = set(target.get("tables", {}).keys())
|
332
|
+
|
333
|
+
# Find new tables
|
334
|
+
for table in target_tables - current_tables:
|
335
|
+
operations.append(
|
336
|
+
{
|
337
|
+
"type": "create_table",
|
338
|
+
"table": table,
|
339
|
+
"definition": target["tables"][table],
|
340
|
+
}
|
341
|
+
)
|
342
|
+
|
343
|
+
# Find dropped tables
|
344
|
+
for table in current_tables - target_tables:
|
345
|
+
operations.append({"type": "drop_table", "table": table})
|
346
|
+
|
347
|
+
# Find modified tables
|
348
|
+
for table in current_tables & target_tables:
|
349
|
+
table_ops = self._analyze_table_diff(
|
350
|
+
table, current["tables"][table], target["tables"][table]
|
351
|
+
)
|
352
|
+
operations.extend(table_ops)
|
353
|
+
|
354
|
+
return operations
|
355
|
+
|
356
|
+
def _analyze_table_diff(
|
357
|
+
self, table_name: str, current: Dict[str, Any], target: Dict[str, Any]
|
358
|
+
) -> List[Dict[str, Any]]:
|
359
|
+
"""Analyze table differences."""
|
360
|
+
operations = []
|
361
|
+
|
362
|
+
current_columns = set(current.get("columns", {}).keys())
|
363
|
+
target_columns = set(target.get("columns", {}).keys())
|
364
|
+
|
365
|
+
# New columns
|
366
|
+
for col in target_columns - current_columns:
|
367
|
+
operations.append(
|
368
|
+
{
|
369
|
+
"type": "add_column",
|
370
|
+
"table": table_name,
|
371
|
+
"column": col,
|
372
|
+
"definition": target["columns"][col],
|
373
|
+
}
|
374
|
+
)
|
375
|
+
|
376
|
+
# Dropped columns
|
377
|
+
for col in current_columns - target_columns:
|
378
|
+
operations.append(
|
379
|
+
{"type": "drop_column", "table": table_name, "column": col}
|
380
|
+
)
|
381
|
+
|
382
|
+
return operations
|
383
|
+
|
384
|
+
def _generate_diff_migration(
|
385
|
+
self, migration_id: str, description: str, operations: List[Dict[str, Any]]
|
386
|
+
) -> str:
|
387
|
+
"""Generate migration from operations."""
|
388
|
+
forward_ops = []
|
389
|
+
backward_ops = []
|
390
|
+
|
391
|
+
for op in operations:
|
392
|
+
if op["type"] == "create_table":
|
393
|
+
forward_ops.append(
|
394
|
+
f'await connection.execute("""{self._create_table_sql(op)}""")'
|
395
|
+
)
|
396
|
+
backward_ops.append(
|
397
|
+
f'await connection.execute("DROP TABLE IF EXISTS {op["table"]} CASCADE")'
|
398
|
+
)
|
399
|
+
# Add more operation types as needed
|
400
|
+
|
401
|
+
forward_code = "\n ".join(forward_ops) or "pass"
|
402
|
+
backward_code = "\n ".join(reversed(backward_ops)) or "pass"
|
403
|
+
|
404
|
+
return f'''"""
|
405
|
+
{description}
|
406
|
+
|
407
|
+
Generated on: {datetime.now().isoformat()}
|
408
|
+
Auto-generated from schema diff
|
409
|
+
"""
|
410
|
+
|
411
|
+
from kailash.utils.migrations import Migration
|
412
|
+
|
413
|
+
|
414
|
+
class {self._class_name(migration_id)}(Migration):
|
415
|
+
"""{description}"""
|
416
|
+
|
417
|
+
id = "{migration_id}"
|
418
|
+
description = "{description}"
|
419
|
+
dependencies = []
|
420
|
+
|
421
|
+
async def forward(self, connection):
|
422
|
+
"""Apply migration forward."""
|
423
|
+
{forward_code}
|
424
|
+
|
425
|
+
async def backward(self, connection):
|
426
|
+
"""Rollback migration."""
|
427
|
+
{backward_code}
|
428
|
+
'''
|
429
|
+
|
430
|
+
def _create_table_sql(self, operation: Dict[str, Any]) -> str:
|
431
|
+
"""Generate CREATE TABLE SQL."""
|
432
|
+
# Simplified example
|
433
|
+
return f"CREATE TABLE {operation['table']} (id SERIAL PRIMARY KEY)"
|