kailash 0.1.5__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. kailash/__init__.py +1 -1
  2. kailash/access_control.py +740 -0
  3. kailash/api/__main__.py +6 -0
  4. kailash/api/auth.py +668 -0
  5. kailash/api/custom_nodes.py +285 -0
  6. kailash/api/custom_nodes_secure.py +377 -0
  7. kailash/api/database.py +620 -0
  8. kailash/api/studio.py +915 -0
  9. kailash/api/studio_secure.py +893 -0
  10. kailash/mcp/__init__.py +53 -0
  11. kailash/mcp/__main__.py +13 -0
  12. kailash/mcp/ai_registry_server.py +712 -0
  13. kailash/mcp/client.py +447 -0
  14. kailash/mcp/client_new.py +334 -0
  15. kailash/mcp/server.py +293 -0
  16. kailash/mcp/server_new.py +336 -0
  17. kailash/mcp/servers/__init__.py +12 -0
  18. kailash/mcp/servers/ai_registry.py +289 -0
  19. kailash/nodes/__init__.py +4 -2
  20. kailash/nodes/ai/__init__.py +2 -0
  21. kailash/nodes/ai/a2a.py +714 -67
  22. kailash/nodes/ai/intelligent_agent_orchestrator.py +31 -37
  23. kailash/nodes/ai/iterative_llm_agent.py +1280 -0
  24. kailash/nodes/ai/llm_agent.py +324 -1
  25. kailash/nodes/ai/self_organizing.py +5 -6
  26. kailash/nodes/base.py +15 -2
  27. kailash/nodes/base_async.py +45 -0
  28. kailash/nodes/base_cycle_aware.py +374 -0
  29. kailash/nodes/base_with_acl.py +338 -0
  30. kailash/nodes/code/python.py +135 -27
  31. kailash/nodes/data/readers.py +16 -6
  32. kailash/nodes/data/writers.py +16 -6
  33. kailash/nodes/logic/__init__.py +8 -0
  34. kailash/nodes/logic/convergence.py +642 -0
  35. kailash/nodes/logic/loop.py +153 -0
  36. kailash/nodes/logic/operations.py +187 -27
  37. kailash/nodes/mixins/__init__.py +11 -0
  38. kailash/nodes/mixins/mcp.py +228 -0
  39. kailash/nodes/mixins.py +387 -0
  40. kailash/runtime/__init__.py +2 -1
  41. kailash/runtime/access_controlled.py +458 -0
  42. kailash/runtime/local.py +106 -33
  43. kailash/runtime/parallel_cyclic.py +529 -0
  44. kailash/sdk_exceptions.py +90 -5
  45. kailash/security.py +845 -0
  46. kailash/tracking/manager.py +38 -15
  47. kailash/tracking/models.py +1 -1
  48. kailash/tracking/storage/filesystem.py +30 -2
  49. kailash/utils/__init__.py +8 -0
  50. kailash/workflow/__init__.py +18 -0
  51. kailash/workflow/convergence.py +270 -0
  52. kailash/workflow/cycle_analyzer.py +768 -0
  53. kailash/workflow/cycle_builder.py +573 -0
  54. kailash/workflow/cycle_config.py +709 -0
  55. kailash/workflow/cycle_debugger.py +760 -0
  56. kailash/workflow/cycle_exceptions.py +601 -0
  57. kailash/workflow/cycle_profiler.py +671 -0
  58. kailash/workflow/cycle_state.py +338 -0
  59. kailash/workflow/cyclic_runner.py +985 -0
  60. kailash/workflow/graph.py +500 -39
  61. kailash/workflow/migration.py +768 -0
  62. kailash/workflow/safety.py +365 -0
  63. kailash/workflow/templates.py +744 -0
  64. kailash/workflow/validation.py +693 -0
  65. {kailash-0.1.5.dist-info → kailash-0.2.0.dist-info}/METADATA +256 -12
  66. kailash-0.2.0.dist-info/RECORD +125 -0
  67. kailash/nodes/mcp/__init__.py +0 -11
  68. kailash/nodes/mcp/client.py +0 -554
  69. kailash/nodes/mcp/resource.py +0 -682
  70. kailash/nodes/mcp/server.py +0 -577
  71. kailash-0.1.5.dist-info/RECORD +0 -88
  72. {kailash-0.1.5.dist-info → kailash-0.2.0.dist-info}/WHEEL +0 -0
  73. {kailash-0.1.5.dist-info → kailash-0.2.0.dist-info}/entry_points.txt +0 -0
  74. {kailash-0.1.5.dist-info → kailash-0.2.0.dist-info}/licenses/LICENSE +0 -0
  75. {kailash-0.1.5.dist-info → kailash-0.2.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,893 @@
1
+ """
2
+ Kailash Workflow Studio API with JWT Authentication and Tenant Isolation
3
+
4
+ This module provides REST API endpoints for the Workflow Studio frontend,
5
+ with full JWT-based authentication and tenant isolation.
6
+
7
+ Key Features:
8
+ - JWT token-based authentication
9
+ - Complete tenant data isolation
10
+ - Role-based access control
11
+ - Secure workflow execution
12
+ - API key support for automation
13
+ """
14
+
15
+ import asyncio
16
+ import logging
17
+ import os
18
+ from datetime import datetime
19
+ from pathlib import Path
20
+ from typing import Any, Dict, List, Optional
21
+
22
+ import uvicorn
23
+ from fastapi import (
24
+ Body,
25
+ Depends,
26
+ FastAPI,
27
+ HTTPException,
28
+ Query,
29
+ WebSocket,
30
+ WebSocketDisconnect,
31
+ )
32
+ from fastapi.middleware.cors import CORSMiddleware
33
+ from pydantic import BaseModel, Field
34
+ from sqlalchemy.orm import Session
35
+
36
+ from kailash.nodes.base import NodeRegistry
37
+ from kailash.runtime.local import LocalRuntime
38
+ from kailash.tracking.manager import TaskManager
39
+ from kailash.tracking.storage.filesystem import FileSystemStorage
40
+ from kailash.workflow import Workflow
41
+
42
+ from .auth import (
43
+ APIKey,
44
+ AuthService,
45
+ Tenant,
46
+ TenantContext,
47
+ TokenResponse,
48
+ User,
49
+ UserCreate,
50
+ UserLogin,
51
+ get_current_tenant,
52
+ get_current_user,
53
+ require_permission,
54
+ )
55
+ from .custom_nodes import setup_custom_node_routes
56
+ from .database import (
57
+ CustomNodeRepository,
58
+ ExecutionRepository,
59
+ WorkflowRepository,
60
+ get_db_session,
61
+ init_database,
62
+ )
63
+
64
+ logger = logging.getLogger(__name__)
65
+
66
+
67
+ # Pydantic models for API
68
+ class NodeDefinition(BaseModel):
69
+ """Node definition for frontend consumption"""
70
+
71
+ id: str
72
+ category: str
73
+ name: str
74
+ description: str
75
+ parameters: List[Dict[str, Any]]
76
+ inputs: List[Dict[str, Any]]
77
+ outputs: List[Dict[str, Any]]
78
+
79
+
80
+ class WorkflowCreate(BaseModel):
81
+ """Workflow creation request"""
82
+
83
+ name: str
84
+ description: Optional[str] = None
85
+ definition: Dict[str, Any]
86
+
87
+
88
+ class WorkflowUpdate(BaseModel):
89
+ """Workflow update request"""
90
+
91
+ name: Optional[str] = None
92
+ description: Optional[str] = None
93
+ definition: Optional[Dict[str, Any]] = None
94
+
95
+
96
+ class WorkflowResponse(BaseModel):
97
+ """Workflow response model"""
98
+
99
+ id: str
100
+ name: str
101
+ description: Optional[str]
102
+ definition: Dict[str, Any]
103
+ created_at: datetime
104
+ updated_at: datetime
105
+ created_by: Optional[str]
106
+ version: int
107
+
108
+
109
+ class ExecutionRequest(BaseModel):
110
+ """Workflow execution request"""
111
+
112
+ parameters: Optional[Dict[str, Any]] = None
113
+
114
+
115
+ class ExecutionResponse(BaseModel):
116
+ """Workflow execution response"""
117
+
118
+ id: str
119
+ workflow_id: str
120
+ status: str
121
+ started_at: datetime
122
+ completed_at: Optional[datetime]
123
+ result: Optional[Dict[str, Any]]
124
+ error: Optional[str]
125
+
126
+
127
+ class WorkflowImportRequest(BaseModel):
128
+ """Workflow import request"""
129
+
130
+ name: str
131
+ description: Optional[str] = None
132
+ format: str = Field(..., pattern="^(yaml|json|python)$")
133
+ content: str
134
+
135
+
136
+ class WorkflowImportResponse(BaseModel):
137
+ """Workflow import response"""
138
+
139
+ id: str
140
+ name: str
141
+ description: Optional[str]
142
+ definition: Dict[str, Any]
143
+ created_at: datetime
144
+ warnings: List[str] = []
145
+
146
+
147
+ class WorkflowStudioAPI:
148
+ """Main API class for Workflow Studio with authentication"""
149
+
150
+ def __init__(self, db_path: str = None):
151
+ self.app = FastAPI(
152
+ title="Kailash Workflow Studio API",
153
+ version="2.0.0",
154
+ description="Secure multi-tenant workflow studio API",
155
+ )
156
+
157
+ # Initialize database
158
+ self.SessionLocal, self.engine = init_database(db_path)
159
+
160
+ self.setup_middleware()
161
+ self.setup_auth_routes()
162
+ self.setup_routes()
163
+ self.active_executions: Dict[str, asyncio.Task] = {}
164
+ self.websocket_connections: Dict[str, List[WebSocket]] = {}
165
+
166
+ # Register custom nodes on startup
167
+ self.app.add_event_handler("startup", self._register_custom_nodes)
168
+
169
+ async def _register_custom_nodes(self):
170
+ """Register custom nodes from database into NodeRegistry"""
171
+ try:
172
+ with self.SessionLocal() as session:
173
+ # Get all tenants
174
+ tenants = session.query(Tenant).filter(Tenant.is_active).all()
175
+
176
+ for tenant in tenants:
177
+ node_repo = CustomNodeRepository(session)
178
+ custom_nodes = node_repo.list(tenant.id)
179
+
180
+ for node in custom_nodes:
181
+ # Register node in NodeRegistry with tenant prefix
182
+ # This would require dynamic node creation based on stored definition
183
+ logger.info(
184
+ f"Registered custom node: {tenant.slug}/{node.name}"
185
+ )
186
+ except Exception as e:
187
+ logger.error(f"Error registering custom nodes: {e}")
188
+
189
+ def setup_middleware(self):
190
+ """Configure CORS and other middleware"""
191
+ origins = os.getenv("CORS_ORIGINS", "http://localhost:3000").split(",")
192
+
193
+ self.app.add_middleware(
194
+ CORSMiddleware,
195
+ allow_origins=origins,
196
+ allow_credentials=True,
197
+ allow_methods=["*"],
198
+ allow_headers=["*"],
199
+ )
200
+
201
+ def setup_auth_routes(self):
202
+ """Configure authentication routes"""
203
+
204
+ @self.app.post("/api/auth/register", response_model=TokenResponse)
205
+ async def register(
206
+ user_data: UserCreate, session: Session = Depends(get_db_session)
207
+ ):
208
+ """Register a new user"""
209
+ auth_service = AuthService(session)
210
+ user, tokens = auth_service.register_user(user_data)
211
+ return tokens
212
+
213
+ @self.app.post("/api/auth/login", response_model=TokenResponse)
214
+ async def login(
215
+ credentials: UserLogin, session: Session = Depends(get_db_session)
216
+ ):
217
+ """Login and get JWT tokens"""
218
+ auth_service = AuthService(session)
219
+ user, tokens = auth_service.login_user(credentials)
220
+ return tokens
221
+
222
+ @self.app.post("/api/auth/refresh", response_model=TokenResponse)
223
+ async def refresh_token(
224
+ refresh_token: str = Body(..., embed=True),
225
+ session: Session = Depends(get_db_session),
226
+ ):
227
+ """Refresh access token using refresh token"""
228
+ auth_service = AuthService(session)
229
+ return auth_service.refresh_token(refresh_token)
230
+
231
+ @self.app.get("/api/auth/me")
232
+ async def get_current_user_info(
233
+ user: User = Depends(get_current_user),
234
+ tenant: Tenant = Depends(get_current_tenant),
235
+ ):
236
+ """Get current user information"""
237
+ return {
238
+ "user": {
239
+ "id": user.id,
240
+ "email": user.email,
241
+ "username": user.username,
242
+ "roles": user.roles,
243
+ "is_verified": user.is_verified,
244
+ },
245
+ "tenant": {
246
+ "id": tenant.id,
247
+ "name": tenant.name,
248
+ "slug": tenant.slug,
249
+ "subscription_tier": tenant.subscription_tier,
250
+ "features": tenant.features,
251
+ },
252
+ }
253
+
254
+ def setup_routes(self):
255
+ """Configure API routes with authentication"""
256
+
257
+ # Setup custom node routes
258
+ setup_custom_node_routes(self.app, self.SessionLocal)
259
+
260
+ @self.app.get("/health")
261
+ async def health_check():
262
+ """Health check endpoint"""
263
+ return {"status": "healthy", "version": "2.0.0"}
264
+
265
+ # Node discovery endpoints
266
+ @self.app.get("/api/nodes", response_model=Dict[str, List[NodeDefinition]])
267
+ async def list_nodes(user: User = Depends(get_current_user)):
268
+ """List all available nodes grouped by category"""
269
+ # Filter nodes based on user permissions
270
+ registry = NodeRegistry.list_nodes()
271
+ nodes_by_category = {}
272
+
273
+ for node_id, node_class in registry.items():
274
+ # Skip nodes user doesn't have access to
275
+ if not self._can_access_node(user, node_id):
276
+ continue
277
+
278
+ # Extract category from module path
279
+ module_parts = node_class.__module__.split(".")
280
+ if "nodes" in module_parts:
281
+ idx = module_parts.index("nodes")
282
+ if idx + 1 < len(module_parts):
283
+ category = module_parts[idx + 1]
284
+ else:
285
+ category = "misc"
286
+ else:
287
+ category = "misc"
288
+
289
+ # Get node parameters
290
+ try:
291
+ params = node_class.get_parameters()
292
+ param_list = [
293
+ {
294
+ "name": name,
295
+ "type": str(
296
+ param.type.__name__
297
+ if hasattr(param.type, "__name__")
298
+ else str(param.type)
299
+ ),
300
+ "required": param.required,
301
+ "description": param.description,
302
+ "default": param.default,
303
+ }
304
+ for name, param in params.items()
305
+ ]
306
+ except Exception:
307
+ param_list = []
308
+
309
+ # Extract input/output information
310
+ inputs = []
311
+ outputs = []
312
+
313
+ # Check if node has explicit input schema
314
+ if hasattr(node_class, "get_input_schema"):
315
+ try:
316
+ input_schema = node_class.get_input_schema()
317
+ if isinstance(input_schema, dict):
318
+ for key, schema in input_schema.items():
319
+ inputs.append(
320
+ {
321
+ "name": key,
322
+ "type": schema.get("type", "any"),
323
+ "required": schema.get("required", True),
324
+ }
325
+ )
326
+ except Exception:
327
+ pass
328
+
329
+ # If no explicit schema, infer from parameters
330
+ if not inputs:
331
+ # Check if any parameters are marked as input sources
332
+ for param_name, param in params.items():
333
+ if hasattr(param, "source") and param.source == "input":
334
+ inputs.append(
335
+ {
336
+ "name": param_name,
337
+ "type": str(
338
+ param.type.__name__
339
+ if hasattr(param.type, "__name__")
340
+ else "any"
341
+ ),
342
+ "required": param.required,
343
+ }
344
+ )
345
+
346
+ # If still no inputs and node typically processes data, add default
347
+ if not inputs and any(
348
+ keyword in node_class.__name__.lower()
349
+ for keyword in ["process", "transform", "filter", "merge"]
350
+ ):
351
+ inputs.append({"name": "data", "type": "any", "required": True})
352
+
353
+ # Extract output information
354
+ if hasattr(node_class, "get_output_schema"):
355
+ try:
356
+ output_schema = node_class.get_output_schema()
357
+ outputs.append(
358
+ {
359
+ "name": "output",
360
+ "type": (
361
+ "object"
362
+ if isinstance(output_schema, dict)
363
+ else "any"
364
+ ),
365
+ "schema": (
366
+ output_schema
367
+ if isinstance(output_schema, dict)
368
+ else None
369
+ ),
370
+ }
371
+ )
372
+ except:
373
+ outputs.append({"name": "output", "type": "any"})
374
+ else:
375
+ # Default output for all nodes
376
+ outputs.append({"name": "output", "type": "any"})
377
+
378
+ # Create node definition
379
+ node_def = NodeDefinition(
380
+ id=node_id,
381
+ category=category,
382
+ name=node_class.__name__,
383
+ description=node_class.__doc__ or "No description available",
384
+ parameters=param_list,
385
+ inputs=inputs,
386
+ outputs=outputs,
387
+ )
388
+
389
+ if category not in nodes_by_category:
390
+ nodes_by_category[category] = []
391
+ nodes_by_category[category].append(node_def)
392
+
393
+ return nodes_by_category
394
+
395
+ # Workflow management endpoints with tenant isolation
396
+ @self.app.get("/api/workflows", response_model=List[WorkflowResponse])
397
+ async def list_workflows(
398
+ limit: int = Query(100, ge=1, le=1000),
399
+ offset: int = Query(0, ge=0),
400
+ user: User = Depends(require_permission("read:workflows")),
401
+ tenant: Tenant = Depends(get_current_tenant),
402
+ session: Session = Depends(get_db_session),
403
+ ):
404
+ """List all workflows for the tenant"""
405
+ repo = WorkflowRepository(session)
406
+ workflows = repo.list(tenant.id, limit=limit, offset=offset)
407
+
408
+ return [
409
+ WorkflowResponse(
410
+ id=w.id,
411
+ name=w.name,
412
+ description=w.description,
413
+ definition=w.definition,
414
+ created_at=w.created_at,
415
+ updated_at=w.updated_at,
416
+ created_by=w.created_by,
417
+ version=w.version,
418
+ )
419
+ for w in workflows
420
+ ]
421
+
422
+ @self.app.post("/api/workflows", response_model=WorkflowResponse)
423
+ async def create_workflow(
424
+ workflow: WorkflowCreate,
425
+ user: User = Depends(require_permission("write:workflows")),
426
+ tenant: Tenant = Depends(get_current_tenant),
427
+ session: Session = Depends(get_db_session),
428
+ ):
429
+ """Create a new workflow"""
430
+ # Check workflow limit
431
+ if tenant.max_workflows["current"] >= tenant.max_workflows["limit"]:
432
+ raise HTTPException(
433
+ status_code=403, detail="Workflow limit reached for tenant"
434
+ )
435
+
436
+ repo = WorkflowRepository(session)
437
+ workflow_model = repo.create(
438
+ tenant_id=tenant.id,
439
+ name=workflow.name,
440
+ description=workflow.description,
441
+ definition=workflow.definition,
442
+ created_by=user.email,
443
+ )
444
+
445
+ # Update tenant workflow count
446
+ tenant.max_workflows["current"] += 1
447
+ session.commit()
448
+
449
+ return WorkflowResponse(
450
+ id=workflow_model.id,
451
+ name=workflow_model.name,
452
+ description=workflow_model.description,
453
+ definition=workflow_model.definition,
454
+ created_at=workflow_model.created_at,
455
+ updated_at=workflow_model.updated_at,
456
+ created_by=workflow_model.created_by,
457
+ version=workflow_model.version,
458
+ )
459
+
460
+ @self.app.get("/api/workflows/{workflow_id}", response_model=WorkflowResponse)
461
+ async def get_workflow(
462
+ workflow_id: str,
463
+ user: User = Depends(require_permission("read:workflows")),
464
+ tenant: Tenant = Depends(get_current_tenant),
465
+ session: Session = Depends(get_db_session),
466
+ ):
467
+ """Get a specific workflow"""
468
+ repo = WorkflowRepository(session)
469
+ workflow = repo.get(workflow_id)
470
+
471
+ if not workflow or workflow.tenant_id != tenant.id:
472
+ raise HTTPException(status_code=404, detail="Workflow not found")
473
+
474
+ return WorkflowResponse(
475
+ id=workflow.id,
476
+ name=workflow.name,
477
+ description=workflow.description,
478
+ definition=workflow.definition,
479
+ created_at=workflow.created_at,
480
+ updated_at=workflow.updated_at,
481
+ created_by=workflow.created_by,
482
+ version=workflow.version,
483
+ )
484
+
485
+ @self.app.put("/api/workflows/{workflow_id}", response_model=WorkflowResponse)
486
+ async def update_workflow(
487
+ workflow_id: str,
488
+ update: WorkflowUpdate,
489
+ user: User = Depends(require_permission("write:workflows")),
490
+ tenant: Tenant = Depends(get_current_tenant),
491
+ session: Session = Depends(get_db_session),
492
+ ):
493
+ """Update an existing workflow"""
494
+ repo = WorkflowRepository(session)
495
+ workflow = repo.get(workflow_id)
496
+
497
+ if not workflow or workflow.tenant_id != tenant.id:
498
+ raise HTTPException(status_code=404, detail="Workflow not found")
499
+
500
+ # Prepare updates
501
+ updates = {}
502
+ if update.name is not None:
503
+ updates["name"] = update.name
504
+ if update.description is not None:
505
+ updates["description"] = update.description
506
+ if update.definition is not None:
507
+ updates["definition"] = update.definition
508
+
509
+ workflow = repo.update(workflow_id, updates, updated_by=user.email)
510
+
511
+ return WorkflowResponse(
512
+ id=workflow.id,
513
+ name=workflow.name,
514
+ description=workflow.description,
515
+ definition=workflow.definition,
516
+ created_at=workflow.created_at,
517
+ updated_at=workflow.updated_at,
518
+ created_by=workflow.created_by,
519
+ version=workflow.version,
520
+ )
521
+
522
+ @self.app.delete("/api/workflows/{workflow_id}")
523
+ async def delete_workflow(
524
+ workflow_id: str,
525
+ user: User = Depends(require_permission("delete:workflows")),
526
+ tenant: Tenant = Depends(get_current_tenant),
527
+ session: Session = Depends(get_db_session),
528
+ ):
529
+ """Delete a workflow"""
530
+ repo = WorkflowRepository(session)
531
+ workflow = repo.get(workflow_id)
532
+
533
+ if not workflow or workflow.tenant_id != tenant.id:
534
+ raise HTTPException(status_code=404, detail="Workflow not found")
535
+
536
+ repo.delete(workflow_id)
537
+
538
+ # Update tenant workflow count
539
+ tenant.max_workflows["current"] -= 1
540
+ session.commit()
541
+
542
+ return {"message": "Workflow deleted successfully"}
543
+
544
+ # Workflow execution endpoints
545
+ @self.app.post(
546
+ "/api/workflows/{workflow_id}/execute", response_model=ExecutionResponse
547
+ )
548
+ async def execute_workflow(
549
+ workflow_id: str,
550
+ request: ExecutionRequest,
551
+ user: User = Depends(require_permission("execute:workflows")),
552
+ tenant: Tenant = Depends(get_current_tenant),
553
+ session: Session = Depends(get_db_session),
554
+ ):
555
+ """Execute a workflow"""
556
+ # Check execution limits
557
+ if (
558
+ tenant.max_executions_per_month["current"]
559
+ >= tenant.max_executions_per_month["limit"]
560
+ ):
561
+ raise HTTPException(
562
+ status_code=403, detail="Monthly execution limit reached for tenant"
563
+ )
564
+
565
+ # Get workflow
566
+ workflow_repo = WorkflowRepository(session)
567
+ workflow_model = workflow_repo.get(workflow_id)
568
+
569
+ if not workflow_model or workflow_model.tenant_id != tenant.id:
570
+ raise HTTPException(status_code=404, detail="Workflow not found")
571
+
572
+ # Create execution record
573
+ exec_repo = ExecutionRepository(session)
574
+ execution = exec_repo.create(
575
+ workflow_id=workflow_id,
576
+ tenant_id=tenant.id,
577
+ parameters=request.parameters,
578
+ )
579
+
580
+ # Update tenant execution count
581
+ tenant.max_executions_per_month["current"] += 1
582
+ session.commit()
583
+
584
+ # Create workflow from definition
585
+ try:
586
+ workflow = Workflow.from_dict(workflow_model.definition)
587
+
588
+ # Create tenant-isolated runtime
589
+ runtime = self._create_tenant_runtime(tenant.id)
590
+
591
+ # Start execution in background
592
+ task = asyncio.create_task(
593
+ self._execute_workflow_async(
594
+ execution.id,
595
+ workflow,
596
+ runtime,
597
+ request.parameters or {},
598
+ tenant.id,
599
+ )
600
+ )
601
+ self.active_executions[execution.id] = task
602
+
603
+ return ExecutionResponse(
604
+ id=execution.id,
605
+ workflow_id=workflow_id,
606
+ status=execution.status,
607
+ started_at=execution.started_at,
608
+ completed_at=execution.completed_at,
609
+ result=execution.result,
610
+ error=execution.error,
611
+ )
612
+
613
+ except Exception as e:
614
+ exec_repo.update_status(execution.id, "failed", error=str(e))
615
+ raise HTTPException(
616
+ status_code=500, detail=f"Execution failed: {str(e)}"
617
+ )
618
+
619
+ @self.app.get(
620
+ "/api/executions/{execution_id}", response_model=ExecutionResponse
621
+ )
622
+ async def get_execution(
623
+ execution_id: str,
624
+ user: User = Depends(require_permission("read:executions")),
625
+ tenant: Tenant = Depends(get_current_tenant),
626
+ session: Session = Depends(get_db_session),
627
+ ):
628
+ """Get execution status"""
629
+ repo = ExecutionRepository(session)
630
+ execution = repo.get(execution_id)
631
+
632
+ if not execution or execution.tenant_id != tenant.id:
633
+ raise HTTPException(status_code=404, detail="Execution not found")
634
+
635
+ return ExecutionResponse(
636
+ id=execution.id,
637
+ workflow_id=execution.workflow_id,
638
+ status=execution.status,
639
+ started_at=execution.started_at,
640
+ completed_at=execution.completed_at,
641
+ result=execution.result,
642
+ error=execution.error,
643
+ )
644
+
645
+ # WebSocket for real-time updates (with auth)
646
+ @self.app.websocket("/ws/executions/{execution_id}")
647
+ async def websocket_execution(
648
+ websocket: WebSocket, execution_id: str, token: str = Query(...)
649
+ ):
650
+ """WebSocket endpoint for real-time execution updates"""
651
+ # Verify token
652
+ try:
653
+ from .auth import JWTAuth
654
+
655
+ auth = JWTAuth()
656
+ token_data = auth.verify_token(token)
657
+ except Exception:
658
+ await websocket.close(code=1008, reason="Unauthorized")
659
+ return
660
+
661
+ await websocket.accept()
662
+
663
+ # Add to connection pool
664
+ if execution_id not in self.websocket_connections:
665
+ self.websocket_connections[execution_id] = []
666
+ self.websocket_connections[execution_id].append(websocket)
667
+
668
+ try:
669
+ # Keep connection alive and send updates
670
+ while True:
671
+ # Get execution from database
672
+ with self.SessionLocal() as session:
673
+ repo = ExecutionRepository(session)
674
+ execution = repo.get(execution_id)
675
+
676
+ if not execution or execution.tenant_id != token_data.tenant_id:
677
+ await websocket.send_json({"error": "Execution not found"})
678
+ break
679
+
680
+ # Send current status
681
+ await websocket.send_json(
682
+ {
683
+ "id": execution.id,
684
+ "status": execution.status,
685
+ "result": execution.result,
686
+ "error": execution.error,
687
+ }
688
+ )
689
+
690
+ # If execution is complete, close connection
691
+ if execution.status in ["completed", "failed"]:
692
+ break
693
+
694
+ # Wait before next update
695
+ await asyncio.sleep(1)
696
+
697
+ except WebSocketDisconnect:
698
+ pass
699
+ finally:
700
+ # Remove from connection pool
701
+ if execution_id in self.websocket_connections:
702
+ self.websocket_connections[execution_id].remove(websocket)
703
+ if not self.websocket_connections[execution_id]:
704
+ del self.websocket_connections[execution_id]
705
+
706
+ # API key endpoints
707
+ @self.app.post("/api/apikeys")
708
+ async def create_api_key(
709
+ name: str = Body(...),
710
+ scopes: List[str] = Body(default=["read:workflows", "execute:workflows"]),
711
+ user: User = Depends(get_current_user),
712
+ session: Session = Depends(get_db_session),
713
+ ):
714
+ """Create a new API key"""
715
+ auth_service = AuthService(session)
716
+ key, api_key_model = auth_service.create_api_key(name, user, scopes)
717
+
718
+ return {
719
+ "id": api_key_model.id,
720
+ "key": key, # Only shown once!
721
+ "name": api_key_model.name,
722
+ "scopes": api_key_model.scopes,
723
+ "created_at": api_key_model.created_at,
724
+ }
725
+
726
+ @self.app.get("/api/apikeys")
727
+ async def list_api_keys(
728
+ user: User = Depends(get_current_user),
729
+ session: Session = Depends(get_db_session),
730
+ ):
731
+ """List user's API keys"""
732
+ keys = (
733
+ session.query(APIKey)
734
+ .filter(APIKey.user_id == user.id, APIKey.tenant_id == user.tenant_id)
735
+ .all()
736
+ )
737
+
738
+ return [
739
+ {
740
+ "id": k.id,
741
+ "name": k.name,
742
+ "scopes": k.scopes,
743
+ "is_active": k.is_active,
744
+ "last_used_at": k.last_used_at,
745
+ "created_at": k.created_at,
746
+ }
747
+ for k in keys
748
+ ]
749
+
750
+ @self.app.delete("/api/apikeys/{key_id}")
751
+ async def delete_api_key(
752
+ key_id: str,
753
+ user: User = Depends(get_current_user),
754
+ session: Session = Depends(get_db_session),
755
+ ):
756
+ """Delete an API key"""
757
+ key = (
758
+ session.query(APIKey)
759
+ .filter(APIKey.id == key_id, APIKey.user_id == user.id)
760
+ .first()
761
+ )
762
+
763
+ if not key:
764
+ raise HTTPException(status_code=404, detail="API key not found")
765
+
766
+ session.delete(key)
767
+ session.commit()
768
+
769
+ return {"message": "API key deleted successfully"}
770
+
771
+ def _can_access_node(self, user: User, node_id: str) -> bool:
772
+ """Check if user can access a specific node"""
773
+ # Basic nodes available to all
774
+ basic_nodes = [
775
+ "csv_reader",
776
+ "csv_writer",
777
+ "json_reader",
778
+ "json_writer",
779
+ "text_processor",
780
+ "data_filter",
781
+ "data_aggregator",
782
+ ]
783
+
784
+ # Advanced nodes require specific permissions or subscription
785
+ # advanced_nodes = {
786
+ # "llm_agent": ["ai_features"],
787
+ # "embedding_generator": ["ai_features"],
788
+ # "python_code": ["code_execution"],
789
+ # "api_client": ["external_apis"],
790
+ # }
791
+
792
+ if node_id in basic_nodes:
793
+ return True
794
+
795
+ # Check subscription tier and features
796
+ # This would be more sophisticated in production
797
+ return True # For now, allow all nodes
798
+
799
+ def _create_tenant_runtime(self, tenant_id: str) -> LocalRuntime:
800
+ """Create a runtime with tenant isolation"""
801
+ # Create tenant-specific storage path
802
+ base_path = Path(f"tenants/{tenant_id}/runtime")
803
+ base_path.mkdir(parents=True, exist_ok=True)
804
+
805
+ # Initialize storage backend
806
+ storage = FileSystemStorage(base_path=str(base_path))
807
+ task_manager = TaskManager(storage_backend=storage)
808
+
809
+ # Create runtime with tenant context
810
+ runtime = LocalRuntime()
811
+ runtime.task_manager = task_manager
812
+
813
+ return runtime
814
+
815
+ async def _execute_workflow_async(
816
+ self,
817
+ execution_id: str,
818
+ workflow: Workflow,
819
+ runtime: LocalRuntime,
820
+ parameters: Dict[str, Any],
821
+ tenant_id: str,
822
+ ):
823
+ """Execute workflow asynchronously with tenant isolation"""
824
+ with self.SessionLocal() as session:
825
+ exec_repo = ExecutionRepository(session)
826
+
827
+ try:
828
+ # Set tenant context for execution
829
+ with TenantContext(tenant_id):
830
+ # Execute workflow
831
+ result, run_id = runtime.execute(workflow, parameters=parameters)
832
+
833
+ # Update execution record
834
+ exec_repo.update_status(execution_id, "completed", result=result)
835
+
836
+ # Notify WebSocket clients
837
+ await self._notify_websocket_clients(
838
+ execution_id,
839
+ {"id": execution_id, "status": "completed", "result": result},
840
+ )
841
+
842
+ except Exception as e:
843
+ # Update execution record with error
844
+ exec_repo.update_status(execution_id, "failed", error=str(e))
845
+
846
+ # Notify WebSocket clients
847
+ await self._notify_websocket_clients(
848
+ execution_id,
849
+ {"id": execution_id, "status": "failed", "error": str(e)},
850
+ )
851
+
852
+ finally:
853
+ # Remove from active executions
854
+ if execution_id in self.active_executions:
855
+ del self.active_executions[execution_id]
856
+
857
+ async def _notify_websocket_clients(self, execution_id: str, data: Dict[str, Any]):
858
+ """Notify all WebSocket clients watching this execution"""
859
+ if execution_id in self.websocket_connections:
860
+ for websocket in self.websocket_connections[execution_id]:
861
+ try:
862
+ await websocket.send_json(data)
863
+ except Exception:
864
+ pass # Client disconnected
865
+
866
+ def run(self, host: str = "0.0.0.0", port: int = 8000):
867
+ """Run the API server"""
868
+ uvicorn.run(self.app, host=host, port=port)
869
+
870
+
871
+ def main():
872
+ """Main entry point for the secure studio API"""
873
+ import argparse
874
+
875
+ parser = argparse.ArgumentParser(description="Kailash Workflow Studio API (Secure)")
876
+ parser.add_argument("--host", default="0.0.0.0", help="Host to bind to")
877
+ parser.add_argument("--port", type=int, default=8000, help="Port to bind to")
878
+
879
+ args = parser.parse_args()
880
+
881
+ # Set up logging
882
+ logging.basicConfig(
883
+ level=logging.INFO,
884
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
885
+ )
886
+
887
+ # Create and run API
888
+ api = WorkflowStudioAPI()
889
+ api.run(host=args.host, port=args.port)
890
+
891
+
892
+ if __name__ == "__main__":
893
+ main()