kailash 0.1.5__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. kailash/__init__.py +1 -1
  2. kailash/access_control.py +740 -0
  3. kailash/api/__main__.py +6 -0
  4. kailash/api/auth.py +668 -0
  5. kailash/api/custom_nodes.py +285 -0
  6. kailash/api/custom_nodes_secure.py +377 -0
  7. kailash/api/database.py +620 -0
  8. kailash/api/studio.py +915 -0
  9. kailash/api/studio_secure.py +893 -0
  10. kailash/mcp/__init__.py +53 -0
  11. kailash/mcp/__main__.py +13 -0
  12. kailash/mcp/ai_registry_server.py +712 -0
  13. kailash/mcp/client.py +447 -0
  14. kailash/mcp/client_new.py +334 -0
  15. kailash/mcp/server.py +293 -0
  16. kailash/mcp/server_new.py +336 -0
  17. kailash/mcp/servers/__init__.py +12 -0
  18. kailash/mcp/servers/ai_registry.py +289 -0
  19. kailash/nodes/__init__.py +4 -2
  20. kailash/nodes/ai/__init__.py +2 -0
  21. kailash/nodes/ai/a2a.py +714 -67
  22. kailash/nodes/ai/intelligent_agent_orchestrator.py +31 -37
  23. kailash/nodes/ai/iterative_llm_agent.py +1280 -0
  24. kailash/nodes/ai/llm_agent.py +324 -1
  25. kailash/nodes/ai/self_organizing.py +5 -6
  26. kailash/nodes/base.py +15 -2
  27. kailash/nodes/base_async.py +45 -0
  28. kailash/nodes/base_cycle_aware.py +374 -0
  29. kailash/nodes/base_with_acl.py +338 -0
  30. kailash/nodes/code/python.py +135 -27
  31. kailash/nodes/data/__init__.py +1 -2
  32. kailash/nodes/data/readers.py +16 -6
  33. kailash/nodes/data/sql.py +699 -256
  34. kailash/nodes/data/writers.py +16 -6
  35. kailash/nodes/logic/__init__.py +8 -0
  36. kailash/nodes/logic/convergence.py +642 -0
  37. kailash/nodes/logic/loop.py +153 -0
  38. kailash/nodes/logic/operations.py +187 -27
  39. kailash/nodes/mixins/__init__.py +11 -0
  40. kailash/nodes/mixins/mcp.py +228 -0
  41. kailash/nodes/mixins.py +387 -0
  42. kailash/runtime/__init__.py +2 -1
  43. kailash/runtime/access_controlled.py +458 -0
  44. kailash/runtime/local.py +106 -33
  45. kailash/runtime/parallel_cyclic.py +529 -0
  46. kailash/sdk_exceptions.py +90 -5
  47. kailash/security.py +845 -0
  48. kailash/tracking/manager.py +38 -15
  49. kailash/tracking/models.py +1 -1
  50. kailash/tracking/storage/filesystem.py +30 -2
  51. kailash/utils/__init__.py +8 -0
  52. kailash/workflow/__init__.py +18 -0
  53. kailash/workflow/convergence.py +270 -0
  54. kailash/workflow/cycle_analyzer.py +889 -0
  55. kailash/workflow/cycle_builder.py +579 -0
  56. kailash/workflow/cycle_config.py +725 -0
  57. kailash/workflow/cycle_debugger.py +860 -0
  58. kailash/workflow/cycle_exceptions.py +615 -0
  59. kailash/workflow/cycle_profiler.py +741 -0
  60. kailash/workflow/cycle_state.py +338 -0
  61. kailash/workflow/cyclic_runner.py +985 -0
  62. kailash/workflow/graph.py +500 -39
  63. kailash/workflow/migration.py +809 -0
  64. kailash/workflow/safety.py +365 -0
  65. kailash/workflow/templates.py +763 -0
  66. kailash/workflow/validation.py +751 -0
  67. {kailash-0.1.5.dist-info → kailash-0.2.1.dist-info}/METADATA +259 -12
  68. kailash-0.2.1.dist-info/RECORD +125 -0
  69. kailash/nodes/mcp/__init__.py +0 -11
  70. kailash/nodes/mcp/client.py +0 -554
  71. kailash/nodes/mcp/resource.py +0 -682
  72. kailash/nodes/mcp/server.py +0 -577
  73. kailash-0.1.5.dist-info/RECORD +0 -88
  74. {kailash-0.1.5.dist-info → kailash-0.2.1.dist-info}/WHEEL +0 -0
  75. {kailash-0.1.5.dist-info → kailash-0.2.1.dist-info}/entry_points.txt +0 -0
  76. {kailash-0.1.5.dist-info → kailash-0.2.1.dist-info}/licenses/LICENSE +0 -0
  77. {kailash-0.1.5.dist-info → kailash-0.2.1.dist-info}/top_level.txt +0 -0
kailash/api/studio.py ADDED
@@ -0,0 +1,915 @@
1
+ """
2
+ Kailash Workflow Studio API
3
+
4
+ This module provides REST API endpoints for the Workflow Studio frontend,
5
+ enabling visual workflow creation and management.
6
+
7
+ The API is designed to be multi-tenant aware with proper isolation between
8
+ different tenants' workflows and data.
9
+ """
10
+
11
+ import asyncio
12
+ import json
13
+ import logging
14
+ import os
15
+ import uuid
16
+ from datetime import datetime, timezone
17
+ from pathlib import Path
18
+ from typing import Any, Dict, List, Optional
19
+
20
+ import uvicorn
21
+ from fastapi import FastAPI, HTTPException, Query, WebSocket, WebSocketDisconnect
22
+ from fastapi.middleware.cors import CORSMiddleware
23
+ from pydantic import BaseModel, Field
24
+
25
+ from kailash.nodes.base import NodeRegistry
26
+ from kailash.runtime.local import LocalRuntime
27
+ from kailash.tracking.manager import TaskManager
28
+ from kailash.tracking.storage.filesystem import FileSystemStorage
29
+ from kailash.utils.export import export_workflow
30
+ from kailash.workflow import Workflow
31
+
32
+ from .custom_nodes import setup_custom_node_routes
33
+ from .database import (
34
+ CustomNodeRepository,
35
+ ExecutionRepository,
36
+ WorkflowRepository,
37
+ get_db_session,
38
+ init_database,
39
+ )
40
+
41
+ logger = logging.getLogger(__name__)
42
+
43
+
44
+ # Pydantic models for API
45
+ class NodeDefinition(BaseModel):
46
+ """Node definition for frontend consumption"""
47
+
48
+ id: str
49
+ category: str
50
+ name: str
51
+ description: str
52
+ parameters: List[Dict[str, Any]]
53
+ inputs: List[Dict[str, Any]]
54
+ outputs: List[Dict[str, Any]]
55
+
56
+
57
+ class WorkflowCreate(BaseModel):
58
+ """Workflow creation request"""
59
+
60
+ name: str
61
+ description: Optional[str] = None
62
+ definition: Dict[str, Any]
63
+
64
+
65
+ class WorkflowUpdate(BaseModel):
66
+ """Workflow update request"""
67
+
68
+ name: Optional[str] = None
69
+ description: Optional[str] = None
70
+ definition: Optional[Dict[str, Any]] = None
71
+
72
+
73
+ class WorkflowResponse(BaseModel):
74
+ """Workflow response model"""
75
+
76
+ id: str
77
+ name: str
78
+ description: Optional[str]
79
+ definition: Dict[str, Any]
80
+ created_at: datetime
81
+ updated_at: datetime
82
+
83
+
84
+ class ExecutionRequest(BaseModel):
85
+ """Workflow execution request"""
86
+
87
+ parameters: Optional[Dict[str, Any]] = None
88
+
89
+
90
+ class ExecutionResponse(BaseModel):
91
+ """Workflow execution response"""
92
+
93
+ id: str
94
+ workflow_id: str
95
+ status: str
96
+ started_at: datetime
97
+ completed_at: Optional[datetime]
98
+ result: Optional[Dict[str, Any]]
99
+ error: Optional[str]
100
+
101
+
102
+ class WorkflowImportRequest(BaseModel):
103
+ """Workflow import request"""
104
+
105
+ name: str
106
+ description: Optional[str] = None
107
+ format: str = Field(..., pattern="^(yaml|json|python)$")
108
+ content: str
109
+
110
+
111
+ class WorkflowImportResponse(BaseModel):
112
+ """Workflow import response"""
113
+
114
+ id: str
115
+ name: str
116
+ description: Optional[str]
117
+ definition: Dict[str, Any]
118
+ created_at: datetime
119
+ warnings: List[str] = []
120
+
121
+
122
+ class WorkflowStudioAPI:
123
+ """Main API class for Workflow Studio"""
124
+
125
+ def __init__(self, tenant_id: str = "default", db_path: str = None):
126
+ self.tenant_id = tenant_id
127
+ self.app = FastAPI(title="Kailash Workflow Studio API", version="1.0.0")
128
+
129
+ # Initialize database
130
+ self.SessionLocal, self.engine = init_database(db_path)
131
+
132
+ # Initialize repositories
133
+ self.setup_repositories()
134
+
135
+ self.setup_middleware()
136
+ self.setup_routes()
137
+ self.setup_storage()
138
+ self.active_executions: Dict[str, asyncio.Task] = {}
139
+ self.websocket_connections: Dict[str, List[WebSocket]] = {}
140
+
141
+ # Register custom nodes on startup
142
+ self.app.add_event_handler("startup", self._register_custom_nodes)
143
+ # Ensure built-in nodes are loaded on startup
144
+ self.app.add_event_handler("startup", self._ensure_nodes_loaded)
145
+
146
+ def setup_repositories(self):
147
+ """Initialize database repositories"""
148
+ with get_db_session(self.SessionLocal) as session:
149
+ self.workflow_repo = WorkflowRepository(session)
150
+ self.node_repo = CustomNodeRepository(session)
151
+ self.execution_repo = ExecutionRepository(session)
152
+
153
+ async def _register_custom_nodes(self):
154
+ """Register custom nodes from database into NodeRegistry"""
155
+ try:
156
+ with get_db_session(self.SessionLocal) as session:
157
+ node_repo = CustomNodeRepository(session)
158
+ custom_nodes = node_repo.list(self.tenant_id)
159
+
160
+ for node in custom_nodes:
161
+ # Register node in NodeRegistry
162
+ # This would require dynamic node creation based on stored definition
163
+ logger.info(f"Registered custom node: {node.name}")
164
+ except Exception as e:
165
+ logger.error(f"Error registering custom nodes: {e}")
166
+
167
+ async def _ensure_nodes_loaded(self):
168
+ """Ensure all built-in nodes are loaded into NodeRegistry"""
169
+ try:
170
+ # Import all node modules to trigger registration
171
+
172
+ # Force import of all submodules to trigger @register_node decorators
173
+
174
+ # Log the number of registered nodes
175
+ registry = NodeRegistry.list_nodes()
176
+ logger.info(f"Loaded {len(registry)} nodes into NodeRegistry")
177
+
178
+ # Log categories
179
+ categories = set()
180
+ for node_id, node_class in registry.items():
181
+ module_parts = node_class.__module__.split(".")
182
+ if "nodes" in module_parts:
183
+ idx = module_parts.index("nodes")
184
+ if idx + 1 < len(module_parts):
185
+ categories.add(module_parts[idx + 1])
186
+
187
+ logger.info(f"Node categories: {', '.join(sorted(categories))}")
188
+ except Exception as e:
189
+ logger.error(f"Error loading nodes: {e}")
190
+ import traceback
191
+
192
+ logger.error(traceback.format_exc())
193
+
194
+ def setup_middleware(self):
195
+ """Configure CORS and other middleware"""
196
+ origins = os.getenv("CORS_ORIGINS", "http://localhost:3000").split(",")
197
+
198
+ self.app.add_middleware(
199
+ CORSMiddleware,
200
+ allow_origins=origins,
201
+ allow_credentials=True,
202
+ allow_methods=["*"],
203
+ allow_headers=["*"],
204
+ )
205
+
206
+ def setup_storage(self):
207
+ """Initialize storage for workflows and executions"""
208
+ base_path = Path(f"tenants/{self.tenant_id}")
209
+ base_path.mkdir(parents=True, exist_ok=True)
210
+
211
+ self.workflows_path = base_path / "workflows"
212
+ self.workflows_path.mkdir(exist_ok=True)
213
+
214
+ self.executions_path = base_path / "executions"
215
+ self.executions_path.mkdir(exist_ok=True)
216
+
217
+ # Initialize task manager for execution tracking
218
+ storage = FileSystemStorage(base_path=str(base_path / "tracking"))
219
+ self.task_manager = TaskManager(storage_backend=storage)
220
+
221
+ def setup_routes(self):
222
+ """Configure API routes"""
223
+
224
+ # Setup custom node routes
225
+
226
+ setup_custom_node_routes(self.app, self.SessionLocal, self.tenant_id)
227
+
228
+ @self.app.get("/health")
229
+ async def health_check():
230
+ """Health check endpoint"""
231
+ return {"status": "healthy", "tenant_id": self.tenant_id}
232
+
233
+ # Node discovery endpoints
234
+ @self.app.get("/api/nodes", response_model=Dict[str, List[NodeDefinition]])
235
+ async def list_nodes():
236
+ """List all available nodes grouped by category"""
237
+ try:
238
+ registry = NodeRegistry.list_nodes()
239
+ nodes_by_category = {}
240
+
241
+ # Log registry contents for debugging
242
+ logger.info(f"NodeRegistry contains {len(registry)} nodes")
243
+
244
+ if not registry:
245
+ logger.warning("NodeRegistry is empty - no nodes registered")
246
+ return {}
247
+
248
+ for node_id, node_class in registry.items():
249
+ # Extract category from module path
250
+ module_parts = node_class.__module__.split(".")
251
+ if "nodes" in module_parts:
252
+ idx = module_parts.index("nodes")
253
+ if idx + 1 < len(module_parts):
254
+ category = module_parts[idx + 1]
255
+ else:
256
+ category = "misc"
257
+ else:
258
+ category = "misc"
259
+
260
+ # Get node parameters
261
+ try:
262
+ # Create a temporary instance to get parameters
263
+ # Most nodes should work with empty config
264
+ temp_node = node_class()
265
+ params = temp_node.get_parameters()
266
+ param_list = [
267
+ {
268
+ "name": name,
269
+ "type": str(
270
+ param.type.__name__
271
+ if hasattr(param.type, "__name__")
272
+ else str(param.type)
273
+ ),
274
+ "required": param.required,
275
+ "description": param.description,
276
+ "default": param.default,
277
+ }
278
+ for name, param in params.items()
279
+ ]
280
+ except Exception as e:
281
+ logger.error(
282
+ f"Error getting parameters for node {node_id}: {e}"
283
+ )
284
+ param_list = []
285
+
286
+ # Extract input/output information
287
+ inputs = []
288
+ outputs = []
289
+
290
+ # Check if node has explicit input schema
291
+ if hasattr(node_class, "get_input_schema"):
292
+ try:
293
+ input_schema = node_class.get_input_schema()
294
+ if isinstance(input_schema, dict):
295
+ for key, schema in input_schema.items():
296
+ inputs.append(
297
+ {
298
+ "name": key,
299
+ "type": schema.get("type", "any"),
300
+ "required": schema.get("required", True),
301
+ }
302
+ )
303
+ except Exception:
304
+ pass
305
+
306
+ # If no explicit schema, infer from parameters
307
+ if not inputs:
308
+ # Check if any parameters are marked as input sources
309
+ try:
310
+ if "params" in locals():
311
+ for param_name, param in params.items():
312
+ if (
313
+ hasattr(param, "source")
314
+ and param.source == "input"
315
+ ):
316
+ inputs.append(
317
+ {
318
+ "name": param_name,
319
+ "type": str(
320
+ param.type.__name__
321
+ if hasattr(param.type, "__name__")
322
+ else "any"
323
+ ),
324
+ "required": param.required,
325
+ }
326
+ )
327
+ except Exception:
328
+ pass
329
+
330
+ # If still no inputs and node typically processes data, add default
331
+ if not inputs and any(
332
+ keyword in node_class.__name__.lower()
333
+ for keyword in ["process", "transform", "filter", "merge"]
334
+ ):
335
+ inputs.append(
336
+ {"name": "data", "type": "any", "required": True}
337
+ )
338
+
339
+ # Extract output information
340
+ if hasattr(node_class, "get_output_schema"):
341
+ try:
342
+ output_schema = node_class.get_output_schema()
343
+ outputs.append(
344
+ {
345
+ "name": "output",
346
+ "type": (
347
+ "object"
348
+ if isinstance(output_schema, dict)
349
+ else "any"
350
+ ),
351
+ "schema": (
352
+ output_schema
353
+ if isinstance(output_schema, dict)
354
+ else None
355
+ ),
356
+ }
357
+ )
358
+ except Exception:
359
+ outputs.append({"name": "output", "type": "any"})
360
+ else:
361
+ # Default output for all nodes
362
+ outputs.append({"name": "output", "type": "any"})
363
+
364
+ # Create node definition
365
+ node_def = NodeDefinition(
366
+ id=node_id,
367
+ category=category,
368
+ name=node_class.__name__,
369
+ description=node_class.__doc__ or "No description available",
370
+ parameters=param_list,
371
+ inputs=inputs,
372
+ outputs=outputs,
373
+ )
374
+
375
+ if category not in nodes_by_category:
376
+ nodes_by_category[category] = []
377
+ nodes_by_category[category].append(node_def)
378
+
379
+ return nodes_by_category
380
+ except Exception as e:
381
+ logger.error(f"Error in list_nodes endpoint: {e}")
382
+ raise HTTPException(
383
+ status_code=500, detail=f"Internal server error: {str(e)}"
384
+ )
385
+
386
+ # Add alias for backward compatibility
387
+ @self.app.get(
388
+ "/api/nodes/discover", response_model=Dict[str, List[NodeDefinition]]
389
+ )
390
+ async def discover_nodes():
391
+ """Alias for list_nodes endpoint for backward compatibility"""
392
+ return await list_nodes()
393
+
394
+ @self.app.get("/api/nodes/{category}")
395
+ async def list_nodes_by_category(category: str):
396
+ """List nodes in a specific category"""
397
+ all_nodes = await list_nodes()
398
+ if category not in all_nodes:
399
+ raise HTTPException(
400
+ status_code=404, detail=f"Category '{category}' not found"
401
+ )
402
+ return all_nodes[category]
403
+
404
+ @self.app.get("/api/nodes/{category}/{node_id}")
405
+ async def get_node_details(category: str, node_id: str):
406
+ """Get detailed information about a specific node"""
407
+ all_nodes = await list_nodes()
408
+ if category not in all_nodes:
409
+ raise HTTPException(
410
+ status_code=404, detail=f"Category '{category}' not found"
411
+ )
412
+
413
+ for node in all_nodes[category]:
414
+ if node.id == node_id:
415
+ return node
416
+
417
+ raise HTTPException(
418
+ status_code=404,
419
+ detail=f"Node '{node_id}' not found in category '{category}'",
420
+ )
421
+
422
+ # Workflow management endpoints
423
+ @self.app.get("/api/workflows", response_model=List[WorkflowResponse])
424
+ async def list_workflows(
425
+ limit: int = Query(100, ge=1, le=1000), offset: int = Query(0, ge=0)
426
+ ):
427
+ """List all workflows for the tenant"""
428
+ workflows = []
429
+ workflow_files = sorted(
430
+ self.workflows_path.glob("*.json"),
431
+ key=lambda p: p.stat().st_mtime,
432
+ reverse=True,
433
+ )
434
+
435
+ for workflow_file in workflow_files[offset : offset + limit]:
436
+ try:
437
+ with open(workflow_file, "r") as f:
438
+ data = json.load(f)
439
+ workflows.append(WorkflowResponse(**data))
440
+ except Exception as e:
441
+ logger.error(f"Error loading workflow {workflow_file}: {e}")
442
+
443
+ return workflows
444
+
445
+ @self.app.post("/api/workflows", response_model=WorkflowResponse)
446
+ async def create_workflow(workflow: WorkflowCreate):
447
+ """Create a new workflow"""
448
+ workflow_id = str(uuid.uuid4())
449
+ now = datetime.now(timezone.utc)
450
+
451
+ workflow_data = {
452
+ "id": workflow_id,
453
+ "name": workflow.name,
454
+ "description": workflow.description,
455
+ "definition": workflow.definition,
456
+ "created_at": now.isoformat(),
457
+ "updated_at": now.isoformat(),
458
+ }
459
+
460
+ # Save workflow
461
+ workflow_file = self.workflows_path / f"{workflow_id}.json"
462
+ with open(workflow_file, "w") as f:
463
+ json.dump(workflow_data, f, indent=2)
464
+
465
+ return WorkflowResponse(**workflow_data)
466
+
467
+ @self.app.get("/api/workflows/{workflow_id}", response_model=WorkflowResponse)
468
+ async def get_workflow(workflow_id: str):
469
+ """Get a specific workflow"""
470
+ workflow_file = self.workflows_path / f"{workflow_id}.json"
471
+ if not workflow_file.exists():
472
+ raise HTTPException(status_code=404, detail="Workflow not found")
473
+
474
+ with open(workflow_file, "r") as f:
475
+ data = json.load(f)
476
+
477
+ return WorkflowResponse(**data)
478
+
479
+ @self.app.put("/api/workflows/{workflow_id}", response_model=WorkflowResponse)
480
+ async def update_workflow(workflow_id: str, update: WorkflowUpdate):
481
+ """Update an existing workflow"""
482
+ workflow_file = self.workflows_path / f"{workflow_id}.json"
483
+ if not workflow_file.exists():
484
+ raise HTTPException(status_code=404, detail="Workflow not found")
485
+
486
+ # Load existing workflow
487
+ with open(workflow_file, "r") as f:
488
+ data = json.load(f)
489
+
490
+ # Update fields
491
+ if update.name is not None:
492
+ data["name"] = update.name
493
+ if update.description is not None:
494
+ data["description"] = update.description
495
+ if update.definition is not None:
496
+ data["definition"] = update.definition
497
+
498
+ data["updated_at"] = datetime.now(timezone.utc).isoformat()
499
+
500
+ # Save updated workflow
501
+ with open(workflow_file, "w") as f:
502
+ json.dump(data, f, indent=2)
503
+
504
+ return WorkflowResponse(**data)
505
+
506
+ @self.app.delete("/api/workflows/{workflow_id}")
507
+ async def delete_workflow(workflow_id: str):
508
+ """Delete a workflow"""
509
+ workflow_file = self.workflows_path / f"{workflow_id}.json"
510
+ if not workflow_file.exists():
511
+ raise HTTPException(status_code=404, detail="Workflow not found")
512
+
513
+ workflow_file.unlink()
514
+ return {"message": "Workflow deleted successfully"}
515
+
516
+ # Workflow execution endpoints
517
+ @self.app.post(
518
+ "/api/workflows/{workflow_id}/execute", response_model=ExecutionResponse
519
+ )
520
+ async def execute_workflow(workflow_id: str, request: ExecutionRequest):
521
+ """Execute a workflow"""
522
+ # Load workflow
523
+ workflow_file = self.workflows_path / f"{workflow_id}.json"
524
+ if not workflow_file.exists():
525
+ raise HTTPException(status_code=404, detail="Workflow not found")
526
+
527
+ with open(workflow_file, "r") as f:
528
+ workflow_data = json.load(f)
529
+
530
+ # Create execution record
531
+ execution_id = str(uuid.uuid4())
532
+ execution_data = {
533
+ "id": execution_id,
534
+ "workflow_id": workflow_id,
535
+ "status": "running",
536
+ "started_at": datetime.now(timezone.utc).isoformat(),
537
+ "completed_at": None,
538
+ "result": None,
539
+ "error": None,
540
+ }
541
+
542
+ # Save initial execution state
543
+ execution_file = self.executions_path / f"{execution_id}.json"
544
+ with open(execution_file, "w") as f:
545
+ json.dump(execution_data, f, indent=2)
546
+
547
+ # Create workflow from definition
548
+ try:
549
+ workflow = Workflow.from_dict(workflow_data["definition"])
550
+ runtime = LocalRuntime()
551
+
552
+ # Start execution in background
553
+ task = asyncio.create_task(
554
+ self._execute_workflow_async(
555
+ execution_id, workflow, runtime, request.parameters or {}
556
+ )
557
+ )
558
+ self.active_executions[execution_id] = task
559
+
560
+ except Exception as e:
561
+ execution_data["status"] = "failed"
562
+ execution_data["error"] = str(e)
563
+ execution_data["completed_at"] = datetime.now(timezone.utc).isoformat()
564
+
565
+ with open(execution_file, "w") as f:
566
+ json.dump(execution_data, f, indent=2)
567
+
568
+ return ExecutionResponse(**execution_data)
569
+
570
+ @self.app.get(
571
+ "/api/executions/{execution_id}", response_model=ExecutionResponse
572
+ )
573
+ async def get_execution(execution_id: str):
574
+ """Get execution status"""
575
+ execution_file = self.executions_path / f"{execution_id}.json"
576
+ if not execution_file.exists():
577
+ raise HTTPException(status_code=404, detail="Execution not found")
578
+
579
+ with open(execution_file, "r") as f:
580
+ data = json.load(f)
581
+
582
+ return ExecutionResponse(**data)
583
+
584
+ # WebSocket for real-time updates
585
+ @self.app.websocket("/ws/executions/{execution_id}")
586
+ async def websocket_execution(websocket: WebSocket, execution_id: str):
587
+ """WebSocket endpoint for real-time execution updates"""
588
+ await websocket.accept()
589
+
590
+ # Add to connection pool
591
+ if execution_id not in self.websocket_connections:
592
+ self.websocket_connections[execution_id] = []
593
+ self.websocket_connections[execution_id].append(websocket)
594
+
595
+ try:
596
+ # Keep connection alive
597
+ while True:
598
+ # Check if execution exists
599
+ execution_file = self.executions_path / f"{execution_id}.json"
600
+ if not execution_file.exists():
601
+ await websocket.send_json({"error": "Execution not found"})
602
+ break
603
+
604
+ # Send current status
605
+ with open(execution_file, "r") as f:
606
+ data = json.load(f)
607
+ await websocket.send_json(data)
608
+
609
+ # If execution is complete, close connection
610
+ if data["status"] in ["completed", "failed"]:
611
+ break
612
+
613
+ # Wait before next update
614
+ await asyncio.sleep(1)
615
+
616
+ except WebSocketDisconnect:
617
+ pass
618
+ finally:
619
+ # Remove from connection pool
620
+ if execution_id in self.websocket_connections:
621
+ self.websocket_connections[execution_id].remove(websocket)
622
+ if not self.websocket_connections[execution_id]:
623
+ del self.websocket_connections[execution_id]
624
+
625
+ # Export endpoints
626
+ @self.app.get("/api/workflows/{workflow_id}/export")
627
+ async def export_workflow_endpoint(
628
+ workflow_id: str, format: str = Query("python", regex="^(python|yaml)$")
629
+ ):
630
+ """Export workflow as Python code or YAML"""
631
+ # Load workflow
632
+ workflow_file = self.workflows_path / f"{workflow_id}.json"
633
+ if not workflow_file.exists():
634
+ raise HTTPException(status_code=404, detail="Workflow not found")
635
+
636
+ with open(workflow_file, "r") as f:
637
+ workflow_data = json.load(f)
638
+
639
+ # Create workflow from definition
640
+ try:
641
+ workflow = Workflow.from_dict(workflow_data["definition"])
642
+
643
+ if format == "python":
644
+ # For Python export, we'll generate code manually
645
+ # since the SDK doesn't have a to_python method
646
+ code = self._generate_python_code(workflow, workflow_data["name"])
647
+ return {"format": "python", "content": code}
648
+ else: # yaml
649
+ yaml_content = export_workflow(workflow, format="yaml")
650
+ return {"format": "yaml", "content": yaml_content}
651
+
652
+ except Exception as e:
653
+ raise HTTPException(status_code=500, detail=f"Export failed: {str(e)}")
654
+
655
+ # Import endpoints
656
+ @self.app.post("/api/workflows/import", response_model=WorkflowImportResponse)
657
+ async def import_workflow(request: WorkflowImportRequest):
658
+ """Import workflow from Python code, YAML, or JSON"""
659
+
660
+ import yaml
661
+
662
+ workflow_id = str(uuid.uuid4())
663
+ warnings = []
664
+
665
+ try:
666
+ # Parse content based on format
667
+ if request.format == "json":
668
+ definition = json.loads(request.content)
669
+ elif request.format == "yaml":
670
+ definition = yaml.safe_load(request.content)
671
+ elif request.format == "python":
672
+ # Parse Python code to extract workflow definition
673
+ definition = self._parse_python_workflow(request.content)
674
+ warnings.append(
675
+ "Python import is experimental. Manual adjustments may be needed."
676
+ )
677
+ else:
678
+ raise ValueError(f"Unsupported format: {request.format}")
679
+
680
+ # Validate the workflow definition
681
+ try:
682
+ workflow = Workflow.from_dict(definition)
683
+ # Convert back to dict to ensure it's valid
684
+ definition = workflow.to_dict()
685
+ except Exception as e:
686
+ warnings.append(f"Workflow validation warning: {str(e)}")
687
+
688
+ # Create workflow record
689
+ now = datetime.now(timezone.utc)
690
+ workflow_data = {
691
+ "id": workflow_id,
692
+ "name": request.name,
693
+ "description": request.description,
694
+ "definition": definition,
695
+ "created_at": now.isoformat(),
696
+ "updated_at": now.isoformat(),
697
+ }
698
+
699
+ # Save workflow
700
+ workflow_file = self.workflows_path / f"{workflow_id}.json"
701
+ with open(workflow_file, "w") as f:
702
+ json.dump(workflow_data, f, indent=2)
703
+
704
+ return WorkflowImportResponse(
705
+ id=workflow_id,
706
+ name=request.name,
707
+ description=request.description,
708
+ definition=definition,
709
+ created_at=now,
710
+ warnings=warnings,
711
+ )
712
+
713
+ except Exception as e:
714
+ raise HTTPException(status_code=400, detail=f"Import failed: {str(e)}")
715
+
716
+ async def _execute_workflow_async(
717
+ self,
718
+ execution_id: str,
719
+ workflow: Workflow,
720
+ runtime: LocalRuntime,
721
+ parameters: Dict[str, Any],
722
+ ):
723
+ """Execute workflow asynchronously and update status"""
724
+ execution_file = self.executions_path / f"{execution_id}.json"
725
+
726
+ try:
727
+ # Execute workflow
728
+ result, run_id = runtime.execute(workflow, parameters=parameters)
729
+
730
+ # Update execution record
731
+ with open(execution_file, "r") as f:
732
+ execution_data = json.load(f)
733
+
734
+ execution_data["status"] = "completed"
735
+ execution_data["completed_at"] = datetime.now(timezone.utc).isoformat()
736
+ execution_data["result"] = result
737
+
738
+ with open(execution_file, "w") as f:
739
+ json.dump(execution_data, f, indent=2)
740
+
741
+ # Notify WebSocket clients
742
+ await self._notify_websocket_clients(execution_id, execution_data)
743
+
744
+ except Exception as e:
745
+ # Update execution record with error
746
+ with open(execution_file, "r") as f:
747
+ execution_data = json.load(f)
748
+
749
+ execution_data["status"] = "failed"
750
+ execution_data["completed_at"] = datetime.now(timezone.utc).isoformat()
751
+ execution_data["error"] = str(e)
752
+
753
+ with open(execution_file, "w") as f:
754
+ json.dump(execution_data, f, indent=2)
755
+
756
+ # Notify WebSocket clients
757
+ await self._notify_websocket_clients(execution_id, execution_data)
758
+
759
+ finally:
760
+ # Remove from active executions
761
+ if execution_id in self.active_executions:
762
+ del self.active_executions[execution_id]
763
+
764
+ async def _notify_websocket_clients(self, execution_id: str, data: Dict[str, Any]):
765
+ """Notify all WebSocket clients watching this execution"""
766
+ if execution_id in self.websocket_connections:
767
+ for websocket in self.websocket_connections[execution_id]:
768
+ try:
769
+ await websocket.send_json(data)
770
+ except Exception:
771
+ pass # Client disconnected
772
+
773
+ def _generate_python_code(self, workflow: Workflow, workflow_name: str) -> str:
774
+ """Generate Python code from a workflow"""
775
+ lines = [
776
+ "#!/usr/bin/env python3",
777
+ '"""',
778
+ f"Workflow: {workflow_name}",
779
+ "Generated by Kailash Workflow Studio",
780
+ '"""',
781
+ "",
782
+ "from kailash.workflow import Workflow",
783
+ "from kailash.runtime.local import LocalRuntime",
784
+ "",
785
+ ]
786
+
787
+ # Import node classes
788
+ node_imports = set()
789
+ for node_id in workflow.graph.nodes:
790
+ node_data = workflow.graph.nodes[node_id]
791
+ if "node" in node_data:
792
+ node = node_data["node"]
793
+ module = node.__class__.__module__
794
+ class_name = node.__class__.__name__
795
+ node_imports.add(f"from {module} import {class_name}")
796
+
797
+ lines.extend(sorted(node_imports))
798
+ lines.extend(
799
+ ["", "", "def main():", f' """Execute {workflow_name} workflow."""']
800
+ )
801
+ lines.append(" # Create workflow")
802
+ lines.append(
803
+ f' workflow = Workflow(workflow_id="{workflow.workflow_id}", name="{workflow.name}")'
804
+ )
805
+ lines.append("")
806
+
807
+ # Add nodes
808
+ lines.append(" # Add nodes")
809
+ for node_id in workflow.graph.nodes:
810
+ node_data = workflow.graph.nodes[node_id]
811
+ if "node" in node_data:
812
+ node = node_data["node"]
813
+ class_name = node.__class__.__name__
814
+ config = node.config
815
+
816
+ # Format config as Python dict
817
+ config_str = self._format_config(config)
818
+ lines.append(f" {node_id} = {class_name}({config_str})")
819
+ lines.append(
820
+ f' workflow.add_node(node_id="{node_id}", node_or_type={node_id})'
821
+ )
822
+ lines.append("")
823
+
824
+ # Add connections
825
+ if workflow.graph.edges:
826
+ lines.append(" # Add connections")
827
+ for edge_data in workflow.graph.edges(data=True):
828
+ source, target, data = edge_data
829
+ mapping = data.get("mapping", {})
830
+ if mapping:
831
+ mapping_str = repr(mapping)
832
+ lines.append(
833
+ f' workflow.connect(source_node="{source}", target_node="{target}", mapping={mapping_str})'
834
+ )
835
+ lines.append("")
836
+
837
+ # Add execution
838
+ lines.extend(
839
+ [
840
+ " # Execute workflow",
841
+ " runtime = LocalRuntime()",
842
+ " result, run_id = runtime.execute(workflow)",
843
+ ' print(f"Workflow completed: {run_id}")',
844
+ ' print(f"Result: {result}")',
845
+ "",
846
+ "",
847
+ 'if __name__ == "__main__":',
848
+ " main()",
849
+ ]
850
+ )
851
+
852
+ return "\n".join(lines)
853
+
854
+ def _parse_python_workflow(self, python_code: str) -> Dict[str, Any]:
855
+ """Parse Python code to extract workflow definition.
856
+
857
+ This is a simplified parser that extracts workflow structure from Python code.
858
+ In production, this would use AST parsing for more robust extraction.
859
+ """
860
+ # For now, return a basic workflow structure
861
+ # This would need to be implemented with proper Python AST parsing
862
+ return {
863
+ "nodes": {},
864
+ "connections": [],
865
+ "metadata": {
866
+ "imported_from": "python",
867
+ "warning": "Python import requires manual verification",
868
+ },
869
+ }
870
+
871
+ def _format_config(self, config: Dict[str, Any]) -> str:
872
+ """Format config dict as Python code"""
873
+ if not config:
874
+ return ""
875
+
876
+ parts = []
877
+ for key, value in config.items():
878
+ if isinstance(value, str):
879
+ parts.append(f'{key}="{value}"')
880
+ else:
881
+ parts.append(f"{key}={repr(value)}")
882
+
883
+ return ", ".join(parts)
884
+
885
+ def run(self, host: str = "0.0.0.0", port: int = 8000):
886
+ """Run the API server"""
887
+ uvicorn.run(self.app, host=host, port=port)
888
+
889
+
890
+ def main():
891
+ """Main entry point for the studio API"""
892
+ import argparse
893
+
894
+ parser = argparse.ArgumentParser(description="Kailash Workflow Studio API")
895
+ parser.add_argument("--host", default="0.0.0.0", help="Host to bind to")
896
+ parser.add_argument("--port", type=int, default=8000, help="Port to bind to")
897
+ parser.add_argument(
898
+ "--tenant-id", default=os.getenv("TENANT_ID", "default"), help="Tenant ID"
899
+ )
900
+
901
+ args = parser.parse_args()
902
+
903
+ # Set up logging
904
+ logging.basicConfig(
905
+ level=logging.INFO,
906
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
907
+ )
908
+
909
+ # Create and run API
910
+ api = WorkflowStudioAPI(tenant_id=args.tenant_id)
911
+ api.run(host=args.host, port=args.port)
912
+
913
+
914
+ if __name__ == "__main__":
915
+ main()