kailash 0.1.2__tar.gz → 0.1.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. {kailash-0.1.2/src/kailash.egg-info → kailash-0.1.4}/PKG-INFO +198 -27
  2. {kailash-0.1.2 → kailash-0.1.4}/README.md +195 -25
  3. {kailash-0.1.2 → kailash-0.1.4}/pyproject.toml +3 -2
  4. {kailash-0.1.2 → kailash-0.1.4}/setup.py +1 -1
  5. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/__init__.py +1 -1
  6. kailash-0.1.4/src/kailash/api/__init__.py +17 -0
  7. kailash-0.1.4/src/kailash/api/gateway.py +394 -0
  8. kailash-0.1.4/src/kailash/api/mcp_integration.py +478 -0
  9. kailash-0.1.4/src/kailash/api/workflow_api.py +399 -0
  10. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/ai/__init__.py +4 -4
  11. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/ai/agents.py +4 -4
  12. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/ai/ai_providers.py +18 -22
  13. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/ai/embedding_generator.py +34 -38
  14. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/ai/llm_agent.py +351 -356
  15. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/api/http.py +0 -4
  16. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/api/rest.py +1 -1
  17. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/base.py +60 -64
  18. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/code/python.py +61 -42
  19. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/data/__init__.py +10 -10
  20. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/data/readers.py +27 -29
  21. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/data/retrieval.py +1 -1
  22. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/data/sharepoint_graph.py +23 -25
  23. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/data/sql.py +27 -29
  24. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/data/vector_db.py +2 -2
  25. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/data/writers.py +41 -44
  26. kailash-0.1.4/src/kailash/nodes/logic/__init__.py +13 -0
  27. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/logic/async_operations.py +14 -14
  28. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/logic/operations.py +18 -22
  29. kailash-0.1.4/src/kailash/nodes/logic/workflow.py +439 -0
  30. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/mcp/client.py +29 -33
  31. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/mcp/resource.py +1 -1
  32. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/mcp/server.py +10 -4
  33. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/transform/formatters.py +1 -1
  34. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/transform/processors.py +5 -3
  35. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/runtime/docker.py +2 -0
  36. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/tracking/metrics_collector.py +6 -7
  37. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/tracking/models.py +0 -20
  38. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/tracking/storage/database.py +4 -4
  39. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/tracking/storage/filesystem.py +0 -1
  40. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/utils/export.py +2 -2
  41. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/utils/templates.py +16 -16
  42. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/visualization/performance.py +7 -7
  43. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/visualization/reports.py +1 -1
  44. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/workflow/graph.py +4 -4
  45. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/workflow/mock_registry.py +1 -1
  46. {kailash-0.1.2 → kailash-0.1.4/src/kailash.egg-info}/PKG-INFO +198 -27
  47. {kailash-0.1.2 → kailash-0.1.4}/src/kailash.egg-info/SOURCES.txt +5 -0
  48. {kailash-0.1.2 → kailash-0.1.4}/src/kailash.egg-info/requires.txt +2 -1
  49. kailash-0.1.2/src/kailash/nodes/logic/__init__.py +0 -6
  50. {kailash-0.1.2 → kailash-0.1.4}/LICENSE +0 -0
  51. {kailash-0.1.2 → kailash-0.1.4}/MANIFEST.in +0 -0
  52. {kailash-0.1.2 → kailash-0.1.4}/setup.cfg +0 -0
  53. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/__main__.py +0 -0
  54. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/cli/__init__.py +0 -0
  55. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/cli/commands.py +0 -0
  56. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/manifest.py +0 -0
  57. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/__init__.py +0 -0
  58. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/ai/models.py +0 -0
  59. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/api/__init__.py +0 -0
  60. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/api/auth.py +0 -0
  61. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/api/graphql.py +0 -0
  62. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/api/rate_limiting.py +0 -0
  63. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/base_async.py +0 -0
  64. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/code/__init__.py +0 -0
  65. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/data/sources.py +0 -0
  66. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/data/streaming.py +0 -0
  67. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/mcp/__init__.py +0 -0
  68. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/transform/__init__.py +0 -0
  69. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/nodes/transform/chunkers.py +0 -0
  70. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/runtime/__init__.py +0 -0
  71. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/runtime/async_local.py +0 -0
  72. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/runtime/local.py +0 -0
  73. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/runtime/parallel.py +0 -0
  74. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/runtime/runner.py +0 -0
  75. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/runtime/testing.py +0 -0
  76. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/sdk_exceptions.py +0 -0
  77. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/tracking/__init__.py +0 -0
  78. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/tracking/manager.py +0 -0
  79. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/tracking/storage/__init__.py +0 -0
  80. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/tracking/storage/base.py +0 -0
  81. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/utils/__init__.py +0 -0
  82. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/visualization/__init__.py +0 -0
  83. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/visualization/api.py +0 -0
  84. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/visualization/dashboard.py +0 -0
  85. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/workflow/__init__.py +0 -0
  86. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/workflow/builder.py +0 -0
  87. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/workflow/mermaid_visualizer.py +0 -0
  88. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/workflow/runner.py +0 -0
  89. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/workflow/state.py +0 -0
  90. {kailash-0.1.2 → kailash-0.1.4}/src/kailash/workflow/visualization.py +0 -0
  91. {kailash-0.1.2 → kailash-0.1.4}/src/kailash.egg-info/dependency_links.txt +0 -0
  92. {kailash-0.1.2 → kailash-0.1.4}/src/kailash.egg-info/entry_points.txt +0 -0
  93. {kailash-0.1.2 → kailash-0.1.4}/src/kailash.egg-info/not-zip-safe +0 -0
  94. {kailash-0.1.2 → kailash-0.1.4}/src/kailash.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: kailash
3
- Version: 0.1.2
3
+ Version: 0.1.4
4
4
  Summary: Python SDK for the Kailash container-node architecture
5
5
  Home-page: https://github.com/integrum/kailash-python-sdk
6
6
  Author: Integrum
@@ -41,7 +41,8 @@ Requires-Dist: autodoc>=0.5.0
41
41
  Requires-Dist: myst-parser>=4.0.1
42
42
  Requires-Dist: black>=25.1.0
43
43
  Requires-Dist: psutil>=7.0.0
44
- Requires-Dist: fastapi[all]>=0.115.12
44
+ Requires-Dist: fastapi>=0.115.12
45
+ Requires-Dist: uvicorn[standard]>=0.31.0
45
46
  Requires-Dist: pytest-asyncio>=1.0.0
46
47
  Requires-Dist: pre-commit>=4.2.0
47
48
  Requires-Dist: twine>=6.1.0
@@ -65,7 +66,7 @@ Dynamic: requires-python
65
66
  <a href="https://pepy.tech/project/kailash"><img src="https://static.pepy.tech/badge/kailash" alt="Downloads"></a>
66
67
  <img src="https://img.shields.io/badge/license-MIT-green.svg" alt="MIT License">
67
68
  <img src="https://img.shields.io/badge/code%20style-black-000000.svg" alt="Code style: black">
68
- <img src="https://img.shields.io/badge/tests-746%20passing-brightgreen.svg" alt="Tests: 746 passing">
69
+ <img src="https://img.shields.io/badge/tests-753%20passing-brightgreen.svg" alt="Tests: 753 passing">
69
70
  <img src="https://img.shields.io/badge/coverage-100%25-brightgreen.svg" alt="Coverage: 100%">
70
71
  </p>
71
72
 
@@ -89,6 +90,8 @@ Dynamic: requires-python
89
90
  - ⚡ **Fast Installation**: Uses `uv` for lightning-fast Python package management
90
91
  - 🤖 **AI-Powered**: Complete LLM agents, embeddings, and hierarchical RAG architecture
91
92
  - 🧠 **Retrieval-Augmented Generation**: Full RAG pipeline with intelligent document processing
93
+ - 🌐 **REST API Wrapper**: Expose any workflow as a production-ready API in 3 lines
94
+ - 🚪 **Multi-Workflow Gateway**: Manage multiple workflows through unified API with MCP integration
92
95
 
93
96
  ## 🎯 Who Is This For?
94
97
 
@@ -122,7 +125,7 @@ uv sync
122
125
 
123
126
  ```python
124
127
  from kailash.workflow import Workflow
125
- from kailash.nodes.data import CSVReader
128
+ from kailash.nodes.data import CSVReaderNode
126
129
  from kailash.nodes.code import PythonCodeNode
127
130
  from kailash.runtime.local import LocalRuntime
128
131
  import pandas as pd
@@ -131,7 +134,7 @@ import pandas as pd
131
134
  workflow = Workflow("customer_analysis", name="customer_analysis")
132
135
 
133
136
  # Add data reader
134
- reader = CSVReader(file_path="customers.csv")
137
+ reader = CSVReaderNode(file_path="customers.csv")
135
138
  workflow.add_node("read_customers", reader)
136
139
 
137
140
  # Add custom processing using Python code
@@ -169,7 +172,7 @@ workflow.save("customer_analysis.yaml", format="yaml")
169
172
 
170
173
  ```python
171
174
  from kailash.workflow import Workflow
172
- from kailash.nodes.data import SharePointGraphReader, CSVWriter
175
+ from kailash.nodes.data import SharePointGraphReader, CSVWriterNode
173
176
  import os
174
177
 
175
178
  # Create workflow for SharePoint file processing
@@ -180,7 +183,7 @@ sharepoint = SharePointGraphReader()
180
183
  workflow.add_node("read_sharepoint", sharepoint)
181
184
 
182
185
  # Process downloaded files
183
- csv_writer = CSVWriter(file_path="sharepoint_output.csv")
186
+ csv_writer = CSVWriterNode(file_path="sharepoint_output.csv")
184
187
  workflow.add_node("save_locally", csv_writer)
185
188
 
186
189
  # Connect nodes
@@ -208,8 +211,8 @@ results, run_id = runtime.execute(workflow, inputs=inputs)
208
211
 
209
212
  ```python
210
213
  from kailash.workflow import Workflow
211
- from kailash.nodes.ai.embedding_generator import EmbeddingGenerator
212
- from kailash.nodes.ai.llm_agent import LLMAgent
214
+ from kailash.nodes.ai.embedding_generator import EmbeddingGeneratorNode
215
+ from kailash.nodes.ai.llm_agent import LLMAgentNode
213
216
  from kailash.nodes.data.sources import DocumentSourceNode, QuerySourceNode
214
217
  from kailash.nodes.data.retrieval import RelevanceScorerNode
215
218
  from kailash.nodes.transform.chunkers import HierarchicalChunkerNode
@@ -230,17 +233,17 @@ chunk_text_extractor = ChunkTextExtractorNode()
230
233
  query_text_wrapper = QueryTextWrapperNode()
231
234
 
232
235
  # AI processing with Ollama
233
- chunk_embedder = EmbeddingGenerator(
236
+ chunk_embedder = EmbeddingGeneratorNode(
234
237
  provider="ollama", model="nomic-embed-text", operation="embed_batch"
235
238
  )
236
- query_embedder = EmbeddingGenerator(
239
+ query_embedder = EmbeddingGeneratorNode(
237
240
  provider="ollama", model="nomic-embed-text", operation="embed_batch"
238
241
  )
239
242
 
240
243
  # Retrieval and response generation
241
244
  relevance_scorer = RelevanceScorerNode()
242
245
  context_formatter = ContextFormatterNode()
243
- llm_agent = LLMAgent(provider="ollama", model="llama3.2", temperature=0.7)
246
+ llm_agent = LLMAgentNode(provider="ollama", model="llama3.2", temperature=0.7)
244
247
 
245
248
  # Add all nodes to workflow
246
249
  for name, node in {
@@ -273,6 +276,140 @@ results, run_id = runtime.execute(workflow)
273
276
  print("RAG Response:", results["llm_agent"]["response"])
274
277
  ```
275
278
 
279
+ ### Workflow API Wrapper - Expose Workflows as REST APIs
280
+
281
+ Transform any Kailash workflow into a production-ready REST API in just 3 lines of code:
282
+
283
+ ```python
284
+ from kailash.api.workflow_api import WorkflowAPI
285
+
286
+ # Take any workflow and expose it as an API
287
+ api = WorkflowAPI(workflow)
288
+ api.run(port=8000) # That's it! Your workflow is now a REST API
289
+ ```
290
+
291
+ #### Features
292
+
293
+ - **Automatic REST Endpoints**:
294
+ - `POST /execute` - Execute workflow with inputs
295
+ - `GET /workflow/info` - Get workflow metadata
296
+ - `GET /health` - Health check endpoint
297
+ - Automatic OpenAPI docs at `/docs`
298
+
299
+ - **Multiple Execution Modes**:
300
+ ```python
301
+ # Synchronous execution (wait for results)
302
+ curl -X POST http://localhost:8000/execute \
303
+ -d '{"inputs": {...}, "mode": "sync"}'
304
+
305
+ # Asynchronous execution (get execution ID)
306
+ curl -X POST http://localhost:8000/execute \
307
+ -d '{"inputs": {...}, "mode": "async"}'
308
+
309
+ # Check async status
310
+ curl http://localhost:8000/status/{execution_id}
311
+ ```
312
+
313
+ - **Specialized APIs** for specific domains:
314
+ ```python
315
+ from kailash.api.workflow_api import create_workflow_api
316
+
317
+ # Create a RAG-specific API with custom endpoints
318
+ api = create_workflow_api(rag_workflow, api_type="rag")
319
+ # Adds /documents and /query endpoints
320
+ ```
321
+
322
+ - **Production Ready**:
323
+ ```python
324
+ # Development
325
+ api.run(reload=True, log_level="debug")
326
+
327
+ # Production with SSL
328
+ api.run(
329
+ host="0.0.0.0",
330
+ port=443,
331
+ ssl_keyfile="key.pem",
332
+ ssl_certfile="cert.pem",
333
+ workers=4
334
+ )
335
+ ```
336
+
337
+ See the [API demo example](examples/integration_examples/integration_api_demo.py) for complete usage patterns.
338
+
339
+ ### Multi-Workflow API Gateway - Manage Multiple Workflows
340
+
341
+ Run multiple workflows through a single unified API gateway with dynamic routing and MCP integration:
342
+
343
+ ```python
344
+ from kailash.api.gateway import WorkflowAPIGateway
345
+ from kailash.api.mcp_integration import MCPIntegration
346
+
347
+ # Create gateway
348
+ gateway = WorkflowAPIGateway(
349
+ title="Enterprise Platform",
350
+ description="Unified API for all workflows"
351
+ )
352
+
353
+ # Register multiple workflows
354
+ gateway.register_workflow("sales", sales_workflow)
355
+ gateway.register_workflow("analytics", analytics_workflow)
356
+ gateway.register_workflow("reports", reporting_workflow)
357
+
358
+ # Add AI-powered tools via MCP
359
+ mcp = MCPIntegration("ai_tools")
360
+ mcp.add_tool("analyze", analyze_function)
361
+ mcp.add_tool("predict", predict_function)
362
+ gateway.register_mcp_server("ai", mcp)
363
+
364
+ # Run unified server
365
+ gateway.run(port=8000)
366
+ ```
367
+
368
+ #### Gateway Features
369
+
370
+ - **Unified Access Point**: All workflows accessible through one server
371
+ - `/sales/execute` - Execute sales workflow
372
+ - `/analytics/execute` - Execute analytics workflow
373
+ - `/workflows` - List all available workflows
374
+ - `/health` - Check health of all services
375
+
376
+ - **MCP Integration**: AI-powered tools available to all workflows
377
+ ```python
378
+ # Use MCP tools in workflows
379
+ from kailash.api.mcp_integration import MCPToolNode
380
+
381
+ tool_node = MCPToolNode(
382
+ mcp_server="ai_tools",
383
+ tool_name="analyze"
384
+ )
385
+ workflow.add_node("ai_analysis", tool_node)
386
+ ```
387
+
388
+ - **Flexible Deployment Patterns**:
389
+ ```python
390
+ # Pattern 1: Single Gateway (most cases)
391
+ gateway.register_workflow("workflow1", wf1)
392
+ gateway.register_workflow("workflow2", wf2)
393
+
394
+ # Pattern 2: Hybrid (heavy workflows separate)
395
+ gateway.register_workflow("light", light_wf)
396
+ gateway.proxy_workflow("heavy", "http://gpu-service:8080")
397
+
398
+ # Pattern 3: High Availability
399
+ # Run multiple gateway instances behind load balancer
400
+
401
+ # Pattern 4: Kubernetes
402
+ # Deploy with horizontal pod autoscaling
403
+ ```
404
+
405
+ - **Production Features**:
406
+ - WebSocket support for real-time updates
407
+ - Health monitoring across all workflows
408
+ - Dynamic workflow registration/unregistration
409
+ - Built-in CORS and authentication support
410
+
411
+ See the [Gateway examples](examples/integration_examples/gateway_comprehensive_demo.py) for complete implementation patterns.
412
+
276
413
  ## 📚 Documentation
277
414
 
278
415
  | Resource | Description |
@@ -294,14 +431,14 @@ The SDK includes a rich set of pre-built nodes for common operations:
294
431
  <td width="50%">
295
432
 
296
433
  **Data Operations**
297
- - `CSVReader` - Read CSV files
298
- - `JSONReader` - Read JSON files
434
+ - `CSVReaderNode` - Read CSV files
435
+ - `JSONReaderNode` - Read JSON files
299
436
  - `DocumentSourceNode` - Sample document provider
300
437
  - `QuerySourceNode` - Sample query provider
301
438
  - `RelevanceScorerNode` - Multi-method similarity
302
439
  - `SQLDatabaseNode` - Query databases
303
- - `CSVWriter` - Write CSV files
304
- - `JSONWriter` - Write JSON files
440
+ - `CSVWriterNode` - Write CSV files
441
+ - `JSONWriterNode` - Write JSON files
305
442
 
306
443
  </td>
307
444
  <td width="50%">
@@ -316,14 +453,19 @@ The SDK includes a rich set of pre-built nodes for common operations:
316
453
  - `Filter` - Filter records
317
454
  - `Aggregator` - Aggregate data
318
455
 
456
+ **Logic Nodes**
457
+ - `SwitchNode` - Conditional routing
458
+ - `MergeNode` - Combine multiple inputs
459
+ - `WorkflowNode` - Wrap workflows as reusable nodes
460
+
319
461
  </td>
320
462
  </tr>
321
463
  <tr>
322
464
  <td width="50%">
323
465
 
324
466
  **AI/ML Nodes**
325
- - `LLMAgent` - Multi-provider LLM with memory & tools
326
- - `EmbeddingGenerator` - Vector embeddings with caching
467
+ - `LLMAgentNode` - Multi-provider LLM with memory & tools
468
+ - `EmbeddingGeneratorNode` - Vector embeddings with caching
327
469
  - `MCPClient/MCPServer` - Model Context Protocol
328
470
  - `TextClassifier` - Text classification
329
471
  - `SentimentAnalyzer` - Sentiment analysis
@@ -363,14 +505,14 @@ The SDK includes a rich set of pre-built nodes for common operations:
363
505
  #### Workflow Management
364
506
  ```python
365
507
  from kailash.workflow import Workflow
366
- from kailash.nodes.logic import Switch
508
+ from kailash.nodes.logic import SwitchNode
367
509
  from kailash.nodes.transform import DataTransformer
368
510
 
369
511
  # Create complex workflows with branching logic
370
512
  workflow = Workflow("data_pipeline", name="data_pipeline")
371
513
 
372
- # Add conditional branching with Switch node
373
- switch = Switch()
514
+ # Add conditional branching with SwitchNode
515
+ switch = SwitchNode()
374
516
  workflow.add_node("route", switch)
375
517
 
376
518
  # Different paths based on validation
@@ -384,6 +526,35 @@ workflow.connect("route", "process_valid")
384
526
  workflow.connect("route", "handle_errors")
385
527
  ```
386
528
 
529
+ #### Hierarchical Workflow Composition
530
+ ```python
531
+ from kailash.workflow import Workflow
532
+ from kailash.nodes.logic import WorkflowNode
533
+ from kailash.runtime.local import LocalRuntime
534
+
535
+ # Create a reusable data processing workflow
536
+ inner_workflow = Workflow("data_processor", name="Data Processor")
537
+ # ... add nodes to inner workflow ...
538
+
539
+ # Wrap the workflow as a node
540
+ processor_node = WorkflowNode(
541
+ workflow=inner_workflow,
542
+ name="data_processor"
543
+ )
544
+
545
+ # Use in a larger workflow
546
+ main_workflow = Workflow("main", name="Main Pipeline")
547
+ main_workflow.add_node("process", processor_node)
548
+ main_workflow.add_node("analyze", analyzer_node)
549
+
550
+ # Connect workflows
551
+ main_workflow.connect("process", "analyze")
552
+
553
+ # Execute - parameters automatically mapped to inner workflow
554
+ runtime = LocalRuntime()
555
+ results, _ = runtime.execute(main_workflow)
556
+ ```
557
+
387
558
  #### Immutable State Management
388
559
  ```python
389
560
  from kailash.workflow import Workflow
@@ -667,13 +838,13 @@ chunk_text_extractor = ChunkTextExtractorNode()
667
838
  query_text_wrapper = QueryTextWrapperNode()
668
839
 
669
840
  # Create embedding generators
670
- chunk_embedder = EmbeddingGenerator(
841
+ chunk_embedder = EmbeddingGeneratorNode(
671
842
  provider="ollama",
672
843
  model="nomic-embed-text",
673
844
  operation="embed_batch"
674
845
  )
675
846
 
676
- query_embedder = EmbeddingGenerator(
847
+ query_embedder = EmbeddingGeneratorNode(
677
848
  provider="ollama",
678
849
  model="nomic-embed-text",
679
850
  operation="embed_batch"
@@ -684,7 +855,7 @@ relevance_scorer = RelevanceScorerNode(similarity_method="cosine")
684
855
  context_formatter = ContextFormatterNode()
685
856
 
686
857
  # Create LLM agent for final answer generation
687
- llm_agent = LLMAgent(
858
+ llm_agent = LLMAgentNode(
688
859
  provider="ollama",
689
860
  model="llama3.2",
690
861
  temperature=0.7,
@@ -803,10 +974,10 @@ kailash/
803
974
  The SDK features a unified provider architecture for AI capabilities:
804
975
 
805
976
  ```python
806
- from kailash.nodes.ai import LLMAgent, EmbeddingGenerator
977
+ from kailash.nodes.ai import LLMAgentNode, EmbeddingGeneratorNode
807
978
 
808
979
  # Multi-provider LLM support
809
- agent = LLMAgent()
980
+ agent = LLMAgentNode()
810
981
  result = agent.run(
811
982
  provider="ollama", # or "openai", "anthropic", "mock"
812
983
  model="llama3.1:8b-instruct-q8_0",
@@ -815,7 +986,7 @@ result = agent.run(
815
986
  )
816
987
 
817
988
  # Vector embeddings with the same providers
818
- embedder = EmbeddingGenerator()
989
+ embedder = EmbeddingGeneratorNode()
819
990
  embedding = embedder.run(
820
991
  provider="ollama", # Same providers support embeddings
821
992
  model="snowflake-arctic-embed2",