kailash 0.1.2__tar.gz → 0.1.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. {kailash-0.1.2/src/kailash.egg-info → kailash-0.1.3}/PKG-INFO +98 -2
  2. {kailash-0.1.2 → kailash-0.1.3}/README.md +95 -0
  3. {kailash-0.1.2 → kailash-0.1.3}/pyproject.toml +3 -2
  4. kailash-0.1.3/src/kailash/api/__init__.py +7 -0
  5. kailash-0.1.3/src/kailash/api/workflow_api.py +383 -0
  6. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/api/http.py +0 -4
  7. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/api/rest.py +1 -1
  8. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/data/sql.py +3 -3
  9. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/data/vector_db.py +2 -2
  10. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/logic/__init__.py +2 -1
  11. kailash-0.1.3/src/kailash/nodes/logic/workflow.py +439 -0
  12. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/mcp/resource.py +1 -1
  13. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/mcp/server.py +10 -4
  14. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/transform/processors.py +5 -3
  15. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/runtime/docker.py +2 -0
  16. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/tracking/models.py +0 -20
  17. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/tracking/storage/database.py +4 -4
  18. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/tracking/storage/filesystem.py +0 -1
  19. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/visualization/performance.py +7 -7
  20. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/visualization/reports.py +1 -1
  21. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/workflow/graph.py +4 -4
  22. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/workflow/mock_registry.py +1 -1
  23. {kailash-0.1.2 → kailash-0.1.3/src/kailash.egg-info}/PKG-INFO +98 -2
  24. {kailash-0.1.2 → kailash-0.1.3}/src/kailash.egg-info/SOURCES.txt +3 -0
  25. {kailash-0.1.2 → kailash-0.1.3}/src/kailash.egg-info/requires.txt +2 -1
  26. {kailash-0.1.2 → kailash-0.1.3}/LICENSE +0 -0
  27. {kailash-0.1.2 → kailash-0.1.3}/MANIFEST.in +0 -0
  28. {kailash-0.1.2 → kailash-0.1.3}/setup.cfg +0 -0
  29. {kailash-0.1.2 → kailash-0.1.3}/setup.py +0 -0
  30. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/__init__.py +0 -0
  31. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/__main__.py +0 -0
  32. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/cli/__init__.py +0 -0
  33. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/cli/commands.py +0 -0
  34. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/manifest.py +0 -0
  35. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/__init__.py +0 -0
  36. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/ai/__init__.py +0 -0
  37. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/ai/agents.py +0 -0
  38. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/ai/ai_providers.py +0 -0
  39. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/ai/embedding_generator.py +0 -0
  40. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/ai/llm_agent.py +0 -0
  41. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/ai/models.py +0 -0
  42. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/api/__init__.py +0 -0
  43. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/api/auth.py +0 -0
  44. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/api/graphql.py +0 -0
  45. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/api/rate_limiting.py +0 -0
  46. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/base.py +0 -0
  47. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/base_async.py +0 -0
  48. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/code/__init__.py +0 -0
  49. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/code/python.py +0 -0
  50. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/data/__init__.py +0 -0
  51. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/data/readers.py +0 -0
  52. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/data/retrieval.py +0 -0
  53. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/data/sharepoint_graph.py +0 -0
  54. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/data/sources.py +0 -0
  55. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/data/streaming.py +0 -0
  56. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/data/writers.py +0 -0
  57. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/logic/async_operations.py +0 -0
  58. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/logic/operations.py +0 -0
  59. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/mcp/__init__.py +0 -0
  60. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/mcp/client.py +0 -0
  61. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/transform/__init__.py +0 -0
  62. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/transform/chunkers.py +0 -0
  63. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/nodes/transform/formatters.py +0 -0
  64. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/runtime/__init__.py +0 -0
  65. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/runtime/async_local.py +0 -0
  66. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/runtime/local.py +0 -0
  67. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/runtime/parallel.py +0 -0
  68. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/runtime/runner.py +0 -0
  69. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/runtime/testing.py +0 -0
  70. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/sdk_exceptions.py +0 -0
  71. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/tracking/__init__.py +0 -0
  72. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/tracking/manager.py +0 -0
  73. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/tracking/metrics_collector.py +0 -0
  74. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/tracking/storage/__init__.py +0 -0
  75. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/tracking/storage/base.py +0 -0
  76. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/utils/__init__.py +0 -0
  77. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/utils/export.py +0 -0
  78. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/utils/templates.py +0 -0
  79. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/visualization/__init__.py +0 -0
  80. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/visualization/api.py +0 -0
  81. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/visualization/dashboard.py +0 -0
  82. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/workflow/__init__.py +0 -0
  83. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/workflow/builder.py +0 -0
  84. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/workflow/mermaid_visualizer.py +0 -0
  85. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/workflow/runner.py +0 -0
  86. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/workflow/state.py +0 -0
  87. {kailash-0.1.2 → kailash-0.1.3}/src/kailash/workflow/visualization.py +0 -0
  88. {kailash-0.1.2 → kailash-0.1.3}/src/kailash.egg-info/dependency_links.txt +0 -0
  89. {kailash-0.1.2 → kailash-0.1.3}/src/kailash.egg-info/entry_points.txt +0 -0
  90. {kailash-0.1.2 → kailash-0.1.3}/src/kailash.egg-info/not-zip-safe +0 -0
  91. {kailash-0.1.2 → kailash-0.1.3}/src/kailash.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: kailash
3
- Version: 0.1.2
3
+ Version: 0.1.3
4
4
  Summary: Python SDK for the Kailash container-node architecture
5
5
  Home-page: https://github.com/integrum/kailash-python-sdk
6
6
  Author: Integrum
@@ -41,7 +41,8 @@ Requires-Dist: autodoc>=0.5.0
41
41
  Requires-Dist: myst-parser>=4.0.1
42
42
  Requires-Dist: black>=25.1.0
43
43
  Requires-Dist: psutil>=7.0.0
44
- Requires-Dist: fastapi[all]>=0.115.12
44
+ Requires-Dist: fastapi>=0.115.12
45
+ Requires-Dist: uvicorn[standard]>=0.31.0
45
46
  Requires-Dist: pytest-asyncio>=1.0.0
46
47
  Requires-Dist: pre-commit>=4.2.0
47
48
  Requires-Dist: twine>=6.1.0
@@ -89,6 +90,7 @@ Dynamic: requires-python
89
90
  - ⚡ **Fast Installation**: Uses `uv` for lightning-fast Python package management
90
91
  - 🤖 **AI-Powered**: Complete LLM agents, embeddings, and hierarchical RAG architecture
91
92
  - 🧠 **Retrieval-Augmented Generation**: Full RAG pipeline with intelligent document processing
93
+ - 🌐 **REST API Wrapper**: Expose any workflow as a production-ready API in 3 lines
92
94
 
93
95
  ## 🎯 Who Is This For?
94
96
 
@@ -273,6 +275,66 @@ results, run_id = runtime.execute(workflow)
273
275
  print("RAG Response:", results["llm_agent"]["response"])
274
276
  ```
275
277
 
278
+ ### Workflow API Wrapper - Expose Workflows as REST APIs
279
+
280
+ Transform any Kailash workflow into a production-ready REST API in just 3 lines of code:
281
+
282
+ ```python
283
+ from kailash.api.workflow_api import WorkflowAPI
284
+
285
+ # Take any workflow and expose it as an API
286
+ api = WorkflowAPI(workflow)
287
+ api.run(port=8000) # That's it! Your workflow is now a REST API
288
+ ```
289
+
290
+ #### Features
291
+
292
+ - **Automatic REST Endpoints**:
293
+ - `POST /execute` - Execute workflow with inputs
294
+ - `GET /workflow/info` - Get workflow metadata
295
+ - `GET /health` - Health check endpoint
296
+ - Automatic OpenAPI docs at `/docs`
297
+
298
+ - **Multiple Execution Modes**:
299
+ ```python
300
+ # Synchronous execution (wait for results)
301
+ curl -X POST http://localhost:8000/execute \
302
+ -d '{"inputs": {...}, "mode": "sync"}'
303
+
304
+ # Asynchronous execution (get execution ID)
305
+ curl -X POST http://localhost:8000/execute \
306
+ -d '{"inputs": {...}, "mode": "async"}'
307
+
308
+ # Check async status
309
+ curl http://localhost:8000/status/{execution_id}
310
+ ```
311
+
312
+ - **Specialized APIs** for specific domains:
313
+ ```python
314
+ from kailash.api.workflow_api import create_workflow_api
315
+
316
+ # Create a RAG-specific API with custom endpoints
317
+ api = create_workflow_api(rag_workflow, api_type="rag")
318
+ # Adds /documents and /query endpoints
319
+ ```
320
+
321
+ - **Production Ready**:
322
+ ```python
323
+ # Development
324
+ api.run(reload=True, log_level="debug")
325
+
326
+ # Production with SSL
327
+ api.run(
328
+ host="0.0.0.0",
329
+ port=443,
330
+ ssl_keyfile="key.pem",
331
+ ssl_certfile="cert.pem",
332
+ workers=4
333
+ )
334
+ ```
335
+
336
+ See the [API demo example](examples/integration_examples/integration_api_demo.py) for complete usage patterns.
337
+
276
338
  ## 📚 Documentation
277
339
 
278
340
  | Resource | Description |
@@ -316,6 +378,11 @@ The SDK includes a rich set of pre-built nodes for common operations:
316
378
  - `Filter` - Filter records
317
379
  - `Aggregator` - Aggregate data
318
380
 
381
+ **Logic Nodes**
382
+ - `Switch` - Conditional routing
383
+ - `Merge` - Combine multiple inputs
384
+ - `WorkflowNode` - Wrap workflows as reusable nodes
385
+
319
386
  </td>
320
387
  </tr>
321
388
  <tr>
@@ -384,6 +451,35 @@ workflow.connect("route", "process_valid")
384
451
  workflow.connect("route", "handle_errors")
385
452
  ```
386
453
 
454
+ #### Hierarchical Workflow Composition
455
+ ```python
456
+ from kailash.workflow import Workflow
457
+ from kailash.nodes.logic import WorkflowNode
458
+ from kailash.runtime.local import LocalRuntime
459
+
460
+ # Create a reusable data processing workflow
461
+ inner_workflow = Workflow("data_processor", name="Data Processor")
462
+ # ... add nodes to inner workflow ...
463
+
464
+ # Wrap the workflow as a node
465
+ processor_node = WorkflowNode(
466
+ workflow=inner_workflow,
467
+ name="data_processor"
468
+ )
469
+
470
+ # Use in a larger workflow
471
+ main_workflow = Workflow("main", name="Main Pipeline")
472
+ main_workflow.add_node("process", processor_node)
473
+ main_workflow.add_node("analyze", analyzer_node)
474
+
475
+ # Connect workflows
476
+ main_workflow.connect("process", "analyze")
477
+
478
+ # Execute - parameters automatically mapped to inner workflow
479
+ runtime = LocalRuntime()
480
+ results, _ = runtime.execute(main_workflow)
481
+ ```
482
+
387
483
  #### Immutable State Management
388
484
  ```python
389
485
  from kailash.workflow import Workflow
@@ -30,6 +30,7 @@
30
30
  - ⚡ **Fast Installation**: Uses `uv` for lightning-fast Python package management
31
31
  - 🤖 **AI-Powered**: Complete LLM agents, embeddings, and hierarchical RAG architecture
32
32
  - 🧠 **Retrieval-Augmented Generation**: Full RAG pipeline with intelligent document processing
33
+ - 🌐 **REST API Wrapper**: Expose any workflow as a production-ready API in 3 lines
33
34
 
34
35
  ## 🎯 Who Is This For?
35
36
 
@@ -214,6 +215,66 @@ results, run_id = runtime.execute(workflow)
214
215
  print("RAG Response:", results["llm_agent"]["response"])
215
216
  ```
216
217
 
218
+ ### Workflow API Wrapper - Expose Workflows as REST APIs
219
+
220
+ Transform any Kailash workflow into a production-ready REST API in just 3 lines of code:
221
+
222
+ ```python
223
+ from kailash.api.workflow_api import WorkflowAPI
224
+
225
+ # Take any workflow and expose it as an API
226
+ api = WorkflowAPI(workflow)
227
+ api.run(port=8000) # That's it! Your workflow is now a REST API
228
+ ```
229
+
230
+ #### Features
231
+
232
+ - **Automatic REST Endpoints**:
233
+ - `POST /execute` - Execute workflow with inputs
234
+ - `GET /workflow/info` - Get workflow metadata
235
+ - `GET /health` - Health check endpoint
236
+ - Automatic OpenAPI docs at `/docs`
237
+
238
+ - **Multiple Execution Modes**:
239
+ ```python
240
+ # Synchronous execution (wait for results)
241
+ curl -X POST http://localhost:8000/execute \
242
+ -d '{"inputs": {...}, "mode": "sync"}'
243
+
244
+ # Asynchronous execution (get execution ID)
245
+ curl -X POST http://localhost:8000/execute \
246
+ -d '{"inputs": {...}, "mode": "async"}'
247
+
248
+ # Check async status
249
+ curl http://localhost:8000/status/{execution_id}
250
+ ```
251
+
252
+ - **Specialized APIs** for specific domains:
253
+ ```python
254
+ from kailash.api.workflow_api import create_workflow_api
255
+
256
+ # Create a RAG-specific API with custom endpoints
257
+ api = create_workflow_api(rag_workflow, api_type="rag")
258
+ # Adds /documents and /query endpoints
259
+ ```
260
+
261
+ - **Production Ready**:
262
+ ```python
263
+ # Development
264
+ api.run(reload=True, log_level="debug")
265
+
266
+ # Production with SSL
267
+ api.run(
268
+ host="0.0.0.0",
269
+ port=443,
270
+ ssl_keyfile="key.pem",
271
+ ssl_certfile="cert.pem",
272
+ workers=4
273
+ )
274
+ ```
275
+
276
+ See the [API demo example](examples/integration_examples/integration_api_demo.py) for complete usage patterns.
277
+
217
278
  ## 📚 Documentation
218
279
 
219
280
  | Resource | Description |
@@ -257,6 +318,11 @@ The SDK includes a rich set of pre-built nodes for common operations:
257
318
  - `Filter` - Filter records
258
319
  - `Aggregator` - Aggregate data
259
320
 
321
+ **Logic Nodes**
322
+ - `Switch` - Conditional routing
323
+ - `Merge` - Combine multiple inputs
324
+ - `WorkflowNode` - Wrap workflows as reusable nodes
325
+
260
326
  </td>
261
327
  </tr>
262
328
  <tr>
@@ -325,6 +391,35 @@ workflow.connect("route", "process_valid")
325
391
  workflow.connect("route", "handle_errors")
326
392
  ```
327
393
 
394
+ #### Hierarchical Workflow Composition
395
+ ```python
396
+ from kailash.workflow import Workflow
397
+ from kailash.nodes.logic import WorkflowNode
398
+ from kailash.runtime.local import LocalRuntime
399
+
400
+ # Create a reusable data processing workflow
401
+ inner_workflow = Workflow("data_processor", name="Data Processor")
402
+ # ... add nodes to inner workflow ...
403
+
404
+ # Wrap the workflow as a node
405
+ processor_node = WorkflowNode(
406
+ workflow=inner_workflow,
407
+ name="data_processor"
408
+ )
409
+
410
+ # Use in a larger workflow
411
+ main_workflow = Workflow("main", name="Main Pipeline")
412
+ main_workflow.add_node("process", processor_node)
413
+ main_workflow.add_node("analyze", analyzer_node)
414
+
415
+ # Connect workflows
416
+ main_workflow.connect("process", "analyze")
417
+
418
+ # Execute - parameters automatically mapped to inner workflow
419
+ runtime = LocalRuntime()
420
+ results, _ = runtime.execute(main_workflow)
421
+ ```
422
+
328
423
  #### Immutable State Management
329
424
  ```python
330
425
  from kailash.workflow import Workflow
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "kailash"
7
- version = "0.1.2"
7
+ version = "0.1.3"
8
8
  description = "Python SDK for the Kailash container-node architecture"
9
9
  authors = [
10
10
  {name = "Integrum", email = "info@integrum.com"}
@@ -45,7 +45,8 @@ dependencies = [
45
45
  "myst-parser>=4.0.1",
46
46
  "black>=25.1.0",
47
47
  "psutil>=7.0.0",
48
- "fastapi[all]>=0.115.12",
48
+ "fastapi>=0.115.12",
49
+ "uvicorn[standard]>=0.31.0",
49
50
  "pytest-asyncio>=1.0.0",
50
51
  "pre-commit>=4.2.0",
51
52
  "twine>=6.1.0",
@@ -0,0 +1,7 @@
1
+ """
2
+ Kailash API module for exposing workflows as REST APIs.
3
+ """
4
+
5
+ from .workflow_api import HierarchicalRAGAPI, WorkflowAPI, create_workflow_api
6
+
7
+ __all__ = ["WorkflowAPI", "HierarchicalRAGAPI", "create_workflow_api"]
@@ -0,0 +1,383 @@
1
+ """
2
+ Lean API wrapper for Kailash workflows using FastAPI.
3
+
4
+ This module provides a general-purpose API wrapper that can expose any Kailash
5
+ workflow as a REST API with minimal configuration.
6
+ """
7
+
8
+ import asyncio
9
+ from contextlib import asynccontextmanager
10
+ from enum import Enum
11
+ from typing import Any, Dict, List, Optional, Union
12
+
13
+ import uvicorn
14
+ from fastapi import BackgroundTasks, FastAPI, HTTPException
15
+ from fastapi.responses import StreamingResponse
16
+ from pydantic import BaseModel, Field
17
+
18
+ from kailash.runtime.local import LocalRuntime
19
+ from kailash.workflow.builder import WorkflowBuilder
20
+ from kailash.workflow.graph import Workflow
21
+
22
+
23
+ class ExecutionMode(str, Enum):
24
+ """Execution modes for workflow API."""
25
+
26
+ SYNC = "sync"
27
+ ASYNC = "async"
28
+ STREAM = "stream"
29
+
30
+
31
+ class WorkflowRequest(BaseModel):
32
+ """Base request model for workflow execution."""
33
+
34
+ inputs: Dict[str, Any] = Field(..., description="Input data for workflow nodes")
35
+ config: Optional[Dict[str, Any]] = Field(
36
+ None, description="Node configuration overrides"
37
+ )
38
+ mode: ExecutionMode = Field(ExecutionMode.SYNC, description="Execution mode")
39
+
40
+
41
+ class WorkflowResponse(BaseModel):
42
+ """Base response model for workflow execution."""
43
+
44
+ outputs: Dict[str, Any] = Field(..., description="Output data from workflow nodes")
45
+ execution_time: float = Field(..., description="Execution time in seconds")
46
+ workflow_id: str = Field(..., description="Workflow identifier")
47
+ version: str = Field(..., description="Workflow version")
48
+
49
+
50
+ class WorkflowAPI:
51
+ """
52
+ Lean API wrapper for Kailash workflows.
53
+
54
+ This class provides a minimal, efficient way to expose any Kailash workflow
55
+ as a REST API with support for synchronous, asynchronous, and streaming execution.
56
+
57
+ Example:
58
+ >>> # For any workflow
59
+ >>> from my_workflows import rag_workflow
60
+ >>> api = WorkflowAPI(rag_workflow)
61
+ >>> api.run(port=8000)
62
+ """
63
+
64
+ def __init__(
65
+ self,
66
+ workflow: Union[WorkflowBuilder, Workflow],
67
+ app_name: str = "Kailash Workflow API",
68
+ version: str = "1.0.0",
69
+ description: str = "API wrapper for Kailash workflow execution",
70
+ ):
71
+ """
72
+ Initialize the API wrapper.
73
+
74
+ Args:
75
+ workflow: The WorkflowBuilder or Workflow instance to expose
76
+ app_name: Name of the API application
77
+ version: API version
78
+ description: API description
79
+ """
80
+ if isinstance(workflow, WorkflowBuilder):
81
+ self.workflow = workflow
82
+ self.workflow_graph = workflow.build()
83
+ self.workflow_id = getattr(workflow, "workflow_id", "unnamed")
84
+ self.version = getattr(workflow, "version", "1.0.0")
85
+ else: # Workflow instance
86
+ self.workflow = workflow
87
+ self.workflow_graph = workflow
88
+ self.workflow_id = workflow.workflow_id
89
+ self.version = workflow.version
90
+
91
+ self.runtime = LocalRuntime()
92
+
93
+ # Create FastAPI app with lifespan management
94
+ self.app = FastAPI(
95
+ title=app_name,
96
+ version=version,
97
+ description=description,
98
+ lifespan=self._lifespan,
99
+ )
100
+
101
+ # Setup routes
102
+ self._setup_routes()
103
+
104
+ # Cache for async executions
105
+ self._execution_cache: Dict[str, Dict[str, Any]] = {}
106
+
107
+ @asynccontextmanager
108
+ async def _lifespan(self, app: FastAPI):
109
+ """Manage app lifecycle."""
110
+ # Startup
111
+ yield
112
+ # Shutdown - cleanup cache
113
+ self._execution_cache.clear()
114
+
115
+ def _setup_routes(self):
116
+ """Setup API routes dynamically based on workflow."""
117
+
118
+ # Main execution endpoint
119
+ @self.app.post("/execute", response_model=WorkflowResponse)
120
+ async def execute_workflow(
121
+ request: WorkflowRequest, background_tasks: BackgroundTasks
122
+ ):
123
+ """Execute the workflow with provided inputs."""
124
+
125
+ if request.mode == ExecutionMode.SYNC:
126
+ return await self._execute_sync(request)
127
+ elif request.mode == ExecutionMode.ASYNC:
128
+ return await self._execute_async(request, background_tasks)
129
+ else: # STREAM
130
+ return StreamingResponse(
131
+ self._execute_stream(request), media_type="application/json"
132
+ )
133
+
134
+ # Status endpoint for async executions
135
+ @self.app.get("/status/{execution_id}")
136
+ async def get_execution_status(execution_id: str):
137
+ """Get status of async execution."""
138
+ if execution_id not in self._execution_cache:
139
+ raise HTTPException(status_code=404, detail="Execution not found")
140
+ return self._execution_cache[execution_id]
141
+
142
+ # Workflow metadata endpoint
143
+ @self.app.get("/workflow/info")
144
+ async def get_workflow_info():
145
+ """Get workflow metadata and structure."""
146
+ graph_data = self.workflow_graph
147
+ return {
148
+ "id": self.workflow_id,
149
+ "version": self.version,
150
+ "nodes": list(graph_data.nodes()),
151
+ "edges": list(graph_data.edges()),
152
+ "input_nodes": [
153
+ n for n in graph_data.nodes() if graph_data.in_degree(n) == 0
154
+ ],
155
+ "output_nodes": [
156
+ n for n in graph_data.nodes() if graph_data.out_degree(n) == 0
157
+ ],
158
+ }
159
+
160
+ # Health check
161
+ @self.app.get("/health")
162
+ async def health_check():
163
+ """Check API health."""
164
+ return {"status": "healthy", "workflow": self.workflow_id}
165
+
166
+ async def _execute_sync(self, request: WorkflowRequest) -> WorkflowResponse:
167
+ """Execute workflow synchronously."""
168
+ import time
169
+
170
+ start_time = time.time()
171
+
172
+ try:
173
+ # Apply configuration overrides if provided
174
+ if request.config:
175
+ for node_id, config in request.config.items():
176
+ # This would need workflow builder enhancement to support
177
+ # dynamic config updates
178
+ pass
179
+
180
+ # Execute workflow with inputs
181
+ results = await asyncio.to_thread(
182
+ self.runtime.execute, self.workflow_graph, request.inputs
183
+ )
184
+
185
+ # Handle tuple return from runtime
186
+ if isinstance(results, tuple):
187
+ results = results[0] if results else {}
188
+
189
+ execution_time = time.time() - start_time
190
+
191
+ return WorkflowResponse(
192
+ outputs=results,
193
+ execution_time=execution_time,
194
+ workflow_id=self.workflow_id,
195
+ version=self.version,
196
+ )
197
+
198
+ except Exception as e:
199
+ raise HTTPException(status_code=500, detail=str(e))
200
+
201
+ async def _execute_async(
202
+ self, request: WorkflowRequest, background_tasks: BackgroundTasks
203
+ ):
204
+ """Execute workflow asynchronously."""
205
+ import uuid
206
+
207
+ execution_id = str(uuid.uuid4())
208
+
209
+ # Initialize cache entry
210
+ self._execution_cache[execution_id] = {
211
+ "status": "pending",
212
+ "workflow_id": self.workflow_id,
213
+ "version": self.version,
214
+ }
215
+
216
+ # Schedule background execution
217
+ background_tasks.add_task(self._run_async_execution, execution_id, request)
218
+
219
+ return {
220
+ "execution_id": execution_id,
221
+ "status": "pending",
222
+ "message": f"Execution started. Check status at /status/{execution_id}",
223
+ }
224
+
225
+ async def _run_async_execution(self, execution_id: str, request: WorkflowRequest):
226
+ """Run async execution in background."""
227
+ try:
228
+ self._execution_cache[execution_id]["status"] = "running"
229
+
230
+ result = await self._execute_sync(request)
231
+
232
+ self._execution_cache[execution_id].update(
233
+ {"status": "completed", "result": result.dict()}
234
+ )
235
+
236
+ except Exception as e:
237
+ self._execution_cache[execution_id].update(
238
+ {"status": "failed", "error": str(e)}
239
+ )
240
+
241
+ async def _execute_stream(self, request: WorkflowRequest):
242
+ """Execute workflow with streaming response."""
243
+ import json
244
+ import time
245
+
246
+ try:
247
+ # For streaming, we'd need workflow runner enhancement
248
+ # to support progress callbacks. For now, simulate with
249
+ # start/end events
250
+
251
+ yield json.dumps(
252
+ {
253
+ "event": "start",
254
+ "workflow_id": self.workflow_id,
255
+ "timestamp": time.time(),
256
+ }
257
+ ) + "\n"
258
+
259
+ result = await self._execute_sync(request)
260
+
261
+ yield json.dumps(
262
+ {"event": "complete", "result": result.dict(), "timestamp": time.time()}
263
+ ) + "\n"
264
+
265
+ except Exception as e:
266
+ yield json.dumps(
267
+ {"event": "error", "error": str(e), "timestamp": time.time()}
268
+ ) + "\n"
269
+
270
+ def run(self, host: str = "0.0.0.0", port: int = 8000, **kwargs):
271
+ """Run the API server."""
272
+ uvicorn.run(self.app, host=host, port=port, **kwargs)
273
+
274
+
275
+ # Specialized API wrapper for Hierarchical RAG workflows
276
+ class HierarchicalRAGAPI(WorkflowAPI):
277
+ """
278
+ Specialized API wrapper for Hierarchical RAG workflows.
279
+
280
+ Provides RAG-specific endpoints and models for better developer experience.
281
+ """
282
+
283
+ def __init__(self, workflow: WorkflowBuilder, **kwargs):
284
+ super().__init__(workflow, **kwargs)
285
+ self._setup_rag_routes()
286
+
287
+ def _setup_rag_routes(self):
288
+ """Setup RAG-specific routes."""
289
+
290
+ class Document(BaseModel):
291
+ id: str
292
+ title: str
293
+ content: str
294
+
295
+ class RAGQuery(BaseModel):
296
+ query: str
297
+ top_k: int = 3
298
+ similarity_method: str = "cosine"
299
+ temperature: float = 0.7
300
+ max_tokens: int = 500
301
+
302
+ class RAGResponse(BaseModel):
303
+ answer: str
304
+ sources: List[Dict[str, Any]]
305
+ query: str
306
+ execution_time: float
307
+
308
+ @self.app.post("/documents")
309
+ async def add_documents(documents: List[Document]):
310
+ """Add documents to the knowledge base."""
311
+ # This would integrate with document storage
312
+ return {"message": f"Added {len(documents)} documents"}
313
+
314
+ @self.app.post("/query", response_model=RAGResponse)
315
+ async def query_rag(request: RAGQuery):
316
+ """Query the RAG system."""
317
+ import time
318
+
319
+ start_time = time.time()
320
+
321
+ # Transform to workflow format
322
+ workflow_request = WorkflowRequest(
323
+ inputs={
324
+ "query": request.query,
325
+ "config": {
326
+ "relevance_scorer": {
327
+ "top_k": request.top_k,
328
+ "similarity_method": request.similarity_method,
329
+ },
330
+ "llm_agent": {
331
+ "temperature": request.temperature,
332
+ "max_tokens": request.max_tokens,
333
+ },
334
+ },
335
+ }
336
+ )
337
+
338
+ result = await self._execute_sync(workflow_request)
339
+
340
+ # Extract RAG-specific outputs
341
+ outputs = result.outputs
342
+ answer = (
343
+ outputs.get("llm_response", {})
344
+ .get("choices", [{}])[0]
345
+ .get("message", {})
346
+ .get("content", "")
347
+ )
348
+ sources = outputs.get("relevant_chunks", [])
349
+
350
+ return RAGResponse(
351
+ answer=answer,
352
+ sources=sources,
353
+ query=request.query,
354
+ execution_time=time.time() - start_time,
355
+ )
356
+
357
+
358
+ # Factory function for creating API wrappers
359
+ def create_workflow_api(
360
+ workflow: WorkflowBuilder, api_type: str = "generic", **kwargs
361
+ ) -> WorkflowAPI:
362
+ """
363
+ Factory function to create appropriate API wrapper.
364
+
365
+ Args:
366
+ workflow: The workflow to wrap
367
+ api_type: Type of API wrapper ("generic", "rag", etc.)
368
+ **kwargs: Additional arguments for API initialization
369
+
370
+ Returns:
371
+ Configured WorkflowAPI instance
372
+
373
+ Example:
374
+ >>> api = create_workflow_api(my_workflow, api_type="rag")
375
+ >>> api.run(port=8000)
376
+ """
377
+ api_classes = {
378
+ "generic": WorkflowAPI,
379
+ "rag": HierarchicalRAGAPI,
380
+ }
381
+
382
+ api_class = api_classes.get(api_type, WorkflowAPI)
383
+ return api_class(workflow, **kwargs)
@@ -428,7 +428,6 @@ class HTTPRequestNode(Node):
428
428
  self.logger.info(f"Making {method} request to {url}")
429
429
 
430
430
  response = None
431
- last_error = None
432
431
 
433
432
  for attempt in range(retry_count + 1):
434
433
  if attempt > 0:
@@ -453,7 +452,6 @@ class HTTPRequestNode(Node):
453
452
  break
454
453
 
455
454
  except requests.RequestException as e:
456
- last_error = e
457
455
  self.logger.warning(f"Request failed: {str(e)}")
458
456
 
459
457
  # Last attempt, no more retries
@@ -779,7 +777,6 @@ class AsyncHTTPRequestNode(AsyncNode):
779
777
  self.logger.info(f"Making async {method} request to {url}")
780
778
 
781
779
  response = None
782
- last_error = None
783
780
 
784
781
  for attempt in range(retry_count + 1):
785
782
  if attempt > 0:
@@ -860,7 +857,6 @@ class AsyncHTTPRequestNode(AsyncNode):
860
857
  return result
861
858
 
862
859
  except (aiohttp.ClientError, asyncio.TimeoutError) as e:
863
- last_error = e
864
860
  self.logger.warning(f"Async request failed: {str(e)}")
865
861
 
866
862
  # Last attempt, no more retries