kailash 0.1.3__tar.gz → 0.1.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. {kailash-0.1.3/src/kailash.egg-info → kailash-0.1.5}/PKG-INFO +293 -29
  2. {kailash-0.1.3 → kailash-0.1.5}/README.md +292 -28
  3. {kailash-0.1.3 → kailash-0.1.5}/pyproject.toml +1 -1
  4. {kailash-0.1.3 → kailash-0.1.5}/setup.py +1 -1
  5. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/__init__.py +1 -1
  6. kailash-0.1.5/src/kailash/api/__init__.py +17 -0
  7. kailash-0.1.5/src/kailash/api/gateway.py +394 -0
  8. kailash-0.1.5/src/kailash/api/mcp_integration.py +478 -0
  9. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/api/workflow_api.py +29 -13
  10. kailash-0.1.5/src/kailash/nodes/ai/__init__.py +88 -0
  11. kailash-0.1.5/src/kailash/nodes/ai/a2a.py +1143 -0
  12. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/ai/agents.py +120 -6
  13. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/ai/ai_providers.py +224 -30
  14. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/ai/embedding_generator.py +34 -38
  15. kailash-0.1.5/src/kailash/nodes/ai/intelligent_agent_orchestrator.py +2114 -0
  16. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/ai/llm_agent.py +351 -356
  17. kailash-0.1.5/src/kailash/nodes/ai/self_organizing.py +1624 -0
  18. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/api/http.py +106 -25
  19. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/api/rest.py +116 -21
  20. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/base.py +60 -64
  21. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/code/python.py +61 -42
  22. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/data/__init__.py +10 -10
  23. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/data/readers.py +117 -66
  24. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/data/retrieval.py +1 -1
  25. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/data/sharepoint_graph.py +23 -25
  26. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/data/sql.py +24 -26
  27. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/data/writers.py +41 -44
  28. kailash-0.1.5/src/kailash/nodes/logic/__init__.py +13 -0
  29. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/logic/async_operations.py +60 -21
  30. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/logic/operations.py +43 -22
  31. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/logic/workflow.py +26 -18
  32. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/mcp/client.py +29 -33
  33. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/transform/__init__.py +8 -1
  34. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/transform/formatters.py +1 -1
  35. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/transform/processors.py +119 -4
  36. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/tracking/metrics_collector.py +6 -7
  37. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/utils/export.py +2 -2
  38. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/utils/templates.py +16 -16
  39. {kailash-0.1.3 → kailash-0.1.5/src/kailash.egg-info}/PKG-INFO +293 -29
  40. {kailash-0.1.3 → kailash-0.1.5}/src/kailash.egg-info/SOURCES.txt +5 -0
  41. kailash-0.1.3/src/kailash/api/__init__.py +0 -7
  42. kailash-0.1.3/src/kailash/nodes/ai/__init__.py +0 -52
  43. kailash-0.1.3/src/kailash/nodes/logic/__init__.py +0 -7
  44. {kailash-0.1.3 → kailash-0.1.5}/LICENSE +0 -0
  45. {kailash-0.1.3 → kailash-0.1.5}/MANIFEST.in +0 -0
  46. {kailash-0.1.3 → kailash-0.1.5}/setup.cfg +0 -0
  47. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/__main__.py +0 -0
  48. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/cli/__init__.py +0 -0
  49. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/cli/commands.py +0 -0
  50. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/manifest.py +0 -0
  51. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/__init__.py +0 -0
  52. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/ai/models.py +0 -0
  53. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/api/__init__.py +0 -0
  54. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/api/auth.py +0 -0
  55. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/api/graphql.py +0 -0
  56. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/api/rate_limiting.py +0 -0
  57. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/base_async.py +0 -0
  58. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/code/__init__.py +0 -0
  59. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/data/sources.py +0 -0
  60. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/data/streaming.py +0 -0
  61. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/data/vector_db.py +0 -0
  62. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/mcp/__init__.py +0 -0
  63. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/mcp/resource.py +0 -0
  64. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/mcp/server.py +0 -0
  65. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/nodes/transform/chunkers.py +0 -0
  66. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/runtime/__init__.py +0 -0
  67. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/runtime/async_local.py +0 -0
  68. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/runtime/docker.py +0 -0
  69. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/runtime/local.py +0 -0
  70. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/runtime/parallel.py +0 -0
  71. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/runtime/runner.py +0 -0
  72. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/runtime/testing.py +0 -0
  73. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/sdk_exceptions.py +0 -0
  74. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/tracking/__init__.py +0 -0
  75. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/tracking/manager.py +0 -0
  76. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/tracking/models.py +0 -0
  77. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/tracking/storage/__init__.py +0 -0
  78. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/tracking/storage/base.py +0 -0
  79. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/tracking/storage/database.py +0 -0
  80. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/tracking/storage/filesystem.py +0 -0
  81. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/utils/__init__.py +0 -0
  82. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/visualization/__init__.py +0 -0
  83. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/visualization/api.py +0 -0
  84. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/visualization/dashboard.py +0 -0
  85. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/visualization/performance.py +0 -0
  86. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/visualization/reports.py +0 -0
  87. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/workflow/__init__.py +0 -0
  88. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/workflow/builder.py +0 -0
  89. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/workflow/graph.py +0 -0
  90. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/workflow/mermaid_visualizer.py +0 -0
  91. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/workflow/mock_registry.py +0 -0
  92. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/workflow/runner.py +0 -0
  93. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/workflow/state.py +0 -0
  94. {kailash-0.1.3 → kailash-0.1.5}/src/kailash/workflow/visualization.py +0 -0
  95. {kailash-0.1.3 → kailash-0.1.5}/src/kailash.egg-info/dependency_links.txt +0 -0
  96. {kailash-0.1.3 → kailash-0.1.5}/src/kailash.egg-info/entry_points.txt +0 -0
  97. {kailash-0.1.3 → kailash-0.1.5}/src/kailash.egg-info/not-zip-safe +0 -0
  98. {kailash-0.1.3 → kailash-0.1.5}/src/kailash.egg-info/requires.txt +0 -0
  99. {kailash-0.1.3 → kailash-0.1.5}/src/kailash.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: kailash
3
- Version: 0.1.3
3
+ Version: 0.1.5
4
4
  Summary: Python SDK for the Kailash container-node architecture
5
5
  Home-page: https://github.com/integrum/kailash-python-sdk
6
6
  Author: Integrum
@@ -66,7 +66,7 @@ Dynamic: requires-python
66
66
  <a href="https://pepy.tech/project/kailash"><img src="https://static.pepy.tech/badge/kailash" alt="Downloads"></a>
67
67
  <img src="https://img.shields.io/badge/license-MIT-green.svg" alt="MIT License">
68
68
  <img src="https://img.shields.io/badge/code%20style-black-000000.svg" alt="Code style: black">
69
- <img src="https://img.shields.io/badge/tests-746%20passing-brightgreen.svg" alt="Tests: 746 passing">
69
+ <img src="https://img.shields.io/badge/tests-753%20passing-brightgreen.svg" alt="Tests: 753 passing">
70
70
  <img src="https://img.shields.io/badge/coverage-100%25-brightgreen.svg" alt="Coverage: 100%">
71
71
  </p>
72
72
 
@@ -91,6 +91,9 @@ Dynamic: requires-python
91
91
  - 🤖 **AI-Powered**: Complete LLM agents, embeddings, and hierarchical RAG architecture
92
92
  - 🧠 **Retrieval-Augmented Generation**: Full RAG pipeline with intelligent document processing
93
93
  - 🌐 **REST API Wrapper**: Expose any workflow as a production-ready API in 3 lines
94
+ - 🚪 **Multi-Workflow Gateway**: Manage multiple workflows through unified API with MCP integration
95
+ - 🤖 **Self-Organizing Agents**: Autonomous agent pools with intelligent team formation and convergence detection
96
+ - 🧠 **Agent-to-Agent Communication**: Shared memory pools and intelligent caching for coordinated multi-agent systems
94
97
 
95
98
  ## 🎯 Who Is This For?
96
99
 
@@ -124,7 +127,7 @@ uv sync
124
127
 
125
128
  ```python
126
129
  from kailash.workflow import Workflow
127
- from kailash.nodes.data import CSVReader
130
+ from kailash.nodes.data import CSVReaderNode
128
131
  from kailash.nodes.code import PythonCodeNode
129
132
  from kailash.runtime.local import LocalRuntime
130
133
  import pandas as pd
@@ -133,7 +136,7 @@ import pandas as pd
133
136
  workflow = Workflow("customer_analysis", name="customer_analysis")
134
137
 
135
138
  # Add data reader
136
- reader = CSVReader(file_path="customers.csv")
139
+ reader = CSVReaderNode(file_path="customers.csv")
137
140
  workflow.add_node("read_customers", reader)
138
141
 
139
142
  # Add custom processing using Python code
@@ -171,7 +174,7 @@ workflow.save("customer_analysis.yaml", format="yaml")
171
174
 
172
175
  ```python
173
176
  from kailash.workflow import Workflow
174
- from kailash.nodes.data import SharePointGraphReader, CSVWriter
177
+ from kailash.nodes.data import SharePointGraphReader, CSVWriterNode
175
178
  import os
176
179
 
177
180
  # Create workflow for SharePoint file processing
@@ -182,7 +185,7 @@ sharepoint = SharePointGraphReader()
182
185
  workflow.add_node("read_sharepoint", sharepoint)
183
186
 
184
187
  # Process downloaded files
185
- csv_writer = CSVWriter(file_path="sharepoint_output.csv")
188
+ csv_writer = CSVWriterNode(file_path="sharepoint_output.csv")
186
189
  workflow.add_node("save_locally", csv_writer)
187
190
 
188
191
  # Connect nodes
@@ -210,8 +213,8 @@ results, run_id = runtime.execute(workflow, inputs=inputs)
210
213
 
211
214
  ```python
212
215
  from kailash.workflow import Workflow
213
- from kailash.nodes.ai.embedding_generator import EmbeddingGenerator
214
- from kailash.nodes.ai.llm_agent import LLMAgent
216
+ from kailash.nodes.ai.embedding_generator import EmbeddingGeneratorNode
217
+ from kailash.nodes.ai.llm_agent import LLMAgentNode
215
218
  from kailash.nodes.data.sources import DocumentSourceNode, QuerySourceNode
216
219
  from kailash.nodes.data.retrieval import RelevanceScorerNode
217
220
  from kailash.nodes.transform.chunkers import HierarchicalChunkerNode
@@ -232,17 +235,17 @@ chunk_text_extractor = ChunkTextExtractorNode()
232
235
  query_text_wrapper = QueryTextWrapperNode()
233
236
 
234
237
  # AI processing with Ollama
235
- chunk_embedder = EmbeddingGenerator(
238
+ chunk_embedder = EmbeddingGeneratorNode(
236
239
  provider="ollama", model="nomic-embed-text", operation="embed_batch"
237
240
  )
238
- query_embedder = EmbeddingGenerator(
241
+ query_embedder = EmbeddingGeneratorNode(
239
242
  provider="ollama", model="nomic-embed-text", operation="embed_batch"
240
243
  )
241
244
 
242
245
  # Retrieval and response generation
243
246
  relevance_scorer = RelevanceScorerNode()
244
247
  context_formatter = ContextFormatterNode()
245
- llm_agent = LLMAgent(provider="ollama", model="llama3.2", temperature=0.7)
248
+ llm_agent = LLMAgentNode(provider="ollama", model="llama3.2", temperature=0.7)
246
249
 
247
250
  # Add all nodes to workflow
248
251
  for name, node in {
@@ -335,6 +338,247 @@ api.run(port=8000) # That's it! Your workflow is now a REST API
335
338
 
336
339
  See the [API demo example](examples/integration_examples/integration_api_demo.py) for complete usage patterns.
337
340
 
341
+ ### Multi-Workflow API Gateway - Manage Multiple Workflows
342
+
343
+ Run multiple workflows through a single unified API gateway with dynamic routing and MCP integration:
344
+
345
+ ```python
346
+ from kailash.api.gateway import WorkflowAPIGateway
347
+ from kailash.api.mcp_integration import MCPIntegration
348
+
349
+ # Create gateway
350
+ gateway = WorkflowAPIGateway(
351
+ title="Enterprise Platform",
352
+ description="Unified API for all workflows"
353
+ )
354
+
355
+ # Register multiple workflows
356
+ gateway.register_workflow("sales", sales_workflow)
357
+ gateway.register_workflow("analytics", analytics_workflow)
358
+ gateway.register_workflow("reports", reporting_workflow)
359
+
360
+ # Add AI-powered tools via MCP
361
+ mcp = MCPIntegration("ai_tools")
362
+ mcp.add_tool("analyze", analyze_function)
363
+ mcp.add_tool("predict", predict_function)
364
+ gateway.register_mcp_server("ai", mcp)
365
+
366
+ # Run unified server
367
+ gateway.run(port=8000)
368
+ ```
369
+
370
+ #### Gateway Features
371
+
372
+ - **Unified Access Point**: All workflows accessible through one server
373
+ - `/sales/execute` - Execute sales workflow
374
+ - `/analytics/execute` - Execute analytics workflow
375
+ - `/workflows` - List all available workflows
376
+ - `/health` - Check health of all services
377
+
378
+ - **MCP Integration**: AI-powered tools available to all workflows
379
+ ```python
380
+ # Use MCP tools in workflows
381
+ from kailash.api.mcp_integration import MCPToolNode
382
+
383
+ tool_node = MCPToolNode(
384
+ mcp_server="ai_tools",
385
+ tool_name="analyze"
386
+ )
387
+ workflow.add_node("ai_analysis", tool_node)
388
+ ```
389
+
390
+ - **Flexible Deployment Patterns**:
391
+ ```python
392
+ # Pattern 1: Single Gateway (most cases)
393
+ gateway.register_workflow("workflow1", wf1)
394
+ gateway.register_workflow("workflow2", wf2)
395
+
396
+ # Pattern 2: Hybrid (heavy workflows separate)
397
+ gateway.register_workflow("light", light_wf)
398
+ gateway.proxy_workflow("heavy", "http://gpu-service:8080")
399
+
400
+ # Pattern 3: High Availability
401
+ # Run multiple gateway instances behind load balancer
402
+
403
+ # Pattern 4: Kubernetes
404
+ # Deploy with horizontal pod autoscaling
405
+ ```
406
+
407
+ - **Production Features**:
408
+ - WebSocket support for real-time updates
409
+ - Health monitoring across all workflows
410
+ - Dynamic workflow registration/unregistration
411
+ - Built-in CORS and authentication support
412
+
413
+ See the [Gateway examples](examples/integration_examples/gateway_comprehensive_demo.py) for complete implementation patterns.
414
+
415
+ ### Self-Organizing Agent Pools - Autonomous Multi-Agent Systems
416
+
417
+ Build intelligent agent systems that can autonomously form teams, share information, and solve complex problems collaboratively:
418
+
419
+ ```python
420
+ from kailash import Workflow
421
+ from kailash.runtime import LocalRuntime
422
+ from kailash.nodes.ai.intelligent_agent_orchestrator import (
423
+ OrchestrationManagerNode,
424
+ IntelligentCacheNode,
425
+ ConvergenceDetectorNode
426
+ )
427
+ from kailash.nodes.ai.self_organizing import (
428
+ AgentPoolManagerNode,
429
+ TeamFormationNode,
430
+ ProblemAnalyzerNode
431
+ )
432
+ from kailash.nodes.ai.a2a import SharedMemoryPoolNode, A2AAgentNode
433
+
434
+ # Create self-organizing agent workflow
435
+ workflow = Workflow("self_organizing_research")
436
+
437
+ # Shared infrastructure
438
+ memory_pool = SharedMemoryPoolNode(
439
+ memory_size_limit=1000,
440
+ attention_window=50
441
+ )
442
+ workflow.add_node("memory", memory_pool)
443
+
444
+ # Intelligent caching to prevent redundant operations
445
+ cache = IntelligentCacheNode(
446
+ ttl=3600, # 1 hour cache
447
+ similarity_threshold=0.8,
448
+ max_entries=1000
449
+ )
450
+ workflow.add_node("cache", cache)
451
+
452
+ # Problem analysis and team formation
453
+ problem_analyzer = ProblemAnalyzerNode()
454
+ team_former = TeamFormationNode(
455
+ formation_strategy="capability_matching",
456
+ optimization_rounds=3
457
+ )
458
+ workflow.add_node("analyzer", problem_analyzer)
459
+ workflow.add_node("team_former", team_former)
460
+
461
+ # Self-organizing agent pool
462
+ pool_manager = AgentPoolManagerNode(
463
+ max_active_agents=20,
464
+ agent_timeout=120
465
+ )
466
+ workflow.add_node("pool", pool_manager)
467
+
468
+ # Convergence detection for stopping criteria
469
+ convergence = ConvergenceDetectorNode(
470
+ quality_threshold=0.85,
471
+ improvement_threshold=0.02,
472
+ max_iterations=10
473
+ )
474
+ workflow.add_node("convergence", convergence)
475
+
476
+ # Orchestration manager coordinates the entire system
477
+ orchestrator = OrchestrationManagerNode(
478
+ max_iterations=10,
479
+ quality_threshold=0.85,
480
+ parallel_execution=True
481
+ )
482
+ workflow.add_node("orchestrator", orchestrator)
483
+
484
+ # Execute with complex business problem
485
+ runtime = LocalRuntime()
486
+ result, _ = runtime.execute(workflow, parameters={
487
+ "orchestrator": {
488
+ "query": "Analyze market trends and develop growth strategy for fintech",
489
+ "agent_pool_size": 12,
490
+ "mcp_servers": [
491
+ {"name": "market_data", "command": "python", "args": ["-m", "market_mcp"]},
492
+ {"name": "financial", "command": "python", "args": ["-m", "finance_mcp"]},
493
+ {"name": "research", "command": "python", "args": ["-m", "research_mcp"]}
494
+ ],
495
+ "context": {
496
+ "domain": "fintech",
497
+ "depth": "comprehensive",
498
+ "output_format": "strategic_report"
499
+ }
500
+ }
501
+ })
502
+
503
+ print(f"Solution Quality: {result['orchestrator']['quality_score']:.2%}")
504
+ print(f"Agents Used: {result['orchestrator']['agents_deployed']}")
505
+ print(f"Iterations: {result['orchestrator']['iterations_completed']}")
506
+ print(f"Final Strategy: {result['orchestrator']['final_solution']['strategy']}")
507
+ ```
508
+
509
+ #### Key Self-Organizing Features
510
+
511
+ - **Autonomous Team Formation**: Agents automatically form optimal teams based on:
512
+ - Capability matching for skill-specific tasks
513
+ - Swarm-based formation for exploration
514
+ - Market-based allocation for resource constraints
515
+ - Hierarchical organization for complex problems
516
+
517
+ - **Intelligent Information Sharing**:
518
+ - **SharedMemoryPoolNode**: Selective attention mechanisms for relevant information
519
+ - **IntelligentCacheNode**: Semantic similarity detection prevents redundant operations
520
+ - **A2AAgentNode**: Direct agent-to-agent communication with context awareness
521
+
522
+ - **Convergence Detection**: Automatic termination when:
523
+ - Solution quality exceeds threshold (e.g., 85% confidence)
524
+ - Improvement rate drops below minimum (e.g., <2% per iteration)
525
+ - Maximum iterations reached
526
+ - Time limits exceeded
527
+
528
+ - **MCP Integration**: Agents can access external tools and data sources:
529
+ - File systems, databases, APIs
530
+ - Web scraping and research tools
531
+ - Specialized domain knowledge bases
532
+ - Real-time data streams
533
+
534
+ - **Performance Optimization**:
535
+ - Multi-level caching strategies
536
+ - Parallel agent execution
537
+ - Resource management and monitoring
538
+ - Cost tracking for API usage
539
+
540
+ See the [Self-Organizing Agents examples](examples/integration_examples/) for complete implementation patterns and the [Agent Systems Guide](docs/guides/self_organizing_agents.rst) for detailed documentation.
541
+
542
+ ### Zero-Code MCP Ecosystem - Visual Workflow Builder
543
+
544
+ Build and deploy workflows through an interactive web interface without writing any code:
545
+
546
+ ```python
547
+ from kailash.api.gateway import WorkflowAPIGateway
548
+ from kailash.api.mcp_integration import MCPServerRegistry
549
+
550
+ # Run the MCP ecosystem demo
551
+ # cd examples/integration_examples
552
+ # ./run_ecosystem.sh
553
+
554
+ # Or run programmatically:
555
+ python examples/integration_examples/mcp_ecosystem_demo.py
556
+ ```
557
+
558
+ #### Features
559
+
560
+ - **Drag-and-Drop Builder**: Visual interface for creating workflows
561
+ - Drag nodes from palette (CSV Reader, Python Code, JSON Writer, etc.)
562
+ - Drop onto canvas to build workflows
563
+ - Deploy with one click
564
+
565
+ - **Live Dashboard**: Real-time monitoring and statistics
566
+ - Connected MCP server status
567
+ - Running workflow count
568
+ - Execution logs with timestamps
569
+
570
+ - **Pre-built Templates**: One-click deployment
571
+ - GitHub → Slack Notifier
572
+ - Data Processing Pipeline (CSV → Transform → JSON)
573
+ - AI Research Assistant
574
+
575
+ - **Technology Stack**: Lightweight and fast
576
+ - Backend: FastAPI + Kailash SDK
577
+ - Frontend: Vanilla HTML/CSS/JavaScript (no frameworks)
578
+ - Zero build process required
579
+
580
+ See the [MCP Ecosystem example](examples/integration_examples/) for the complete zero-code workflow deployment platform.
581
+
338
582
  ## 📚 Documentation
339
583
 
340
584
  | Resource | Description |
@@ -356,14 +600,14 @@ The SDK includes a rich set of pre-built nodes for common operations:
356
600
  <td width="50%">
357
601
 
358
602
  **Data Operations**
359
- - `CSVReader` - Read CSV files
360
- - `JSONReader` - Read JSON files
603
+ - `CSVReaderNode` - Read CSV files
604
+ - `JSONReaderNode` - Read JSON files
361
605
  - `DocumentSourceNode` - Sample document provider
362
606
  - `QuerySourceNode` - Sample query provider
363
607
  - `RelevanceScorerNode` - Multi-method similarity
364
608
  - `SQLDatabaseNode` - Query databases
365
- - `CSVWriter` - Write CSV files
366
- - `JSONWriter` - Write JSON files
609
+ - `CSVWriterNode` - Write CSV files
610
+ - `JSONWriterNode` - Write JSON files
367
611
 
368
612
  </td>
369
613
  <td width="50%">
@@ -379,8 +623,8 @@ The SDK includes a rich set of pre-built nodes for common operations:
379
623
  - `Aggregator` - Aggregate data
380
624
 
381
625
  **Logic Nodes**
382
- - `Switch` - Conditional routing
383
- - `Merge` - Combine multiple inputs
626
+ - `SwitchNode` - Conditional routing
627
+ - `MergeNode` - Combine multiple inputs
384
628
  - `WorkflowNode` - Wrap workflows as reusable nodes
385
629
 
386
630
  </td>
@@ -389,13 +633,28 @@ The SDK includes a rich set of pre-built nodes for common operations:
389
633
  <td width="50%">
390
634
 
391
635
  **AI/ML Nodes**
392
- - `LLMAgent` - Multi-provider LLM with memory & tools
393
- - `EmbeddingGenerator` - Vector embeddings with caching
636
+ - `LLMAgentNode` - Multi-provider LLM with memory & tools
637
+ - `EmbeddingGeneratorNode` - Vector embeddings with caching
394
638
  - `MCPClient/MCPServer` - Model Context Protocol
395
639
  - `TextClassifier` - Text classification
396
640
  - `SentimentAnalyzer` - Sentiment analysis
397
641
  - `NamedEntityRecognizer` - NER extraction
398
642
 
643
+ **Self-Organizing Agent Nodes**
644
+ - `SharedMemoryPoolNode` - Agent memory sharing
645
+ - `A2AAgentNode` - Agent-to-agent communication
646
+ - `A2ACoordinatorNode` - Multi-agent coordination
647
+ - `IntelligentCacheNode` - Semantic caching system
648
+ - `MCPAgentNode` - MCP-enabled agents
649
+ - `QueryAnalysisNode` - Query complexity analysis
650
+ - `OrchestrationManagerNode` - System orchestration
651
+ - `ConvergenceDetectorNode` - Solution convergence
652
+ - `AgentPoolManagerNode` - Agent pool management
653
+ - `ProblemAnalyzerNode` - Problem decomposition
654
+ - `TeamFormationNode` - Optimal team creation
655
+ - `SolutionEvaluatorNode` - Multi-criteria evaluation
656
+ - `SelfOrganizingAgentNode` - Adaptive individual agents
657
+
399
658
  </td>
400
659
  <td width="50%">
401
660
 
@@ -430,14 +689,14 @@ The SDK includes a rich set of pre-built nodes for common operations:
430
689
  #### Workflow Management
431
690
  ```python
432
691
  from kailash.workflow import Workflow
433
- from kailash.nodes.logic import Switch
692
+ from kailash.nodes.logic import SwitchNode
434
693
  from kailash.nodes.transform import DataTransformer
435
694
 
436
695
  # Create complex workflows with branching logic
437
696
  workflow = Workflow("data_pipeline", name="data_pipeline")
438
697
 
439
- # Add conditional branching with Switch node
440
- switch = Switch()
698
+ # Add conditional branching with SwitchNode
699
+ switch = SwitchNode()
441
700
  workflow.add_node("route", switch)
442
701
 
443
702
  # Different paths based on validation
@@ -763,13 +1022,13 @@ chunk_text_extractor = ChunkTextExtractorNode()
763
1022
  query_text_wrapper = QueryTextWrapperNode()
764
1023
 
765
1024
  # Create embedding generators
766
- chunk_embedder = EmbeddingGenerator(
1025
+ chunk_embedder = EmbeddingGeneratorNode(
767
1026
  provider="ollama",
768
1027
  model="nomic-embed-text",
769
1028
  operation="embed_batch"
770
1029
  )
771
1030
 
772
- query_embedder = EmbeddingGenerator(
1031
+ query_embedder = EmbeddingGeneratorNode(
773
1032
  provider="ollama",
774
1033
  model="nomic-embed-text",
775
1034
  operation="embed_batch"
@@ -780,7 +1039,7 @@ relevance_scorer = RelevanceScorerNode(similarity_method="cosine")
780
1039
  context_formatter = ContextFormatterNode()
781
1040
 
782
1041
  # Create LLM agent for final answer generation
783
- llm_agent = LLMAgent(
1042
+ llm_agent = LLMAgentNode(
784
1043
  provider="ollama",
785
1044
  model="llama3.2",
786
1045
  temperature=0.7,
@@ -899,10 +1158,10 @@ kailash/
899
1158
  The SDK features a unified provider architecture for AI capabilities:
900
1159
 
901
1160
  ```python
902
- from kailash.nodes.ai import LLMAgent, EmbeddingGenerator
1161
+ from kailash.nodes.ai import LLMAgentNode, EmbeddingGeneratorNode
903
1162
 
904
1163
  # Multi-provider LLM support
905
- agent = LLMAgent()
1164
+ agent = LLMAgentNode()
906
1165
  result = agent.run(
907
1166
  provider="ollama", # or "openai", "anthropic", "mock"
908
1167
  model="llama3.1:8b-instruct-q8_0",
@@ -911,7 +1170,7 @@ result = agent.run(
911
1170
  )
912
1171
 
913
1172
  # Vector embeddings with the same providers
914
- embedder = EmbeddingGenerator()
1173
+ embedder = EmbeddingGeneratorNode()
915
1174
  embedding = embedder.run(
916
1175
  provider="ollama", # Same providers support embeddings
917
1176
  model="snowflake-arctic-embed2",
@@ -1029,7 +1288,7 @@ pre-commit run pytest-check
1029
1288
  <td width="40%">
1030
1289
 
1031
1290
  ### ✅ Completed
1032
- - Core node system with 15+ node types
1291
+ - Core node system with 66+ node types
1033
1292
  - Workflow builder with DAG validation
1034
1293
  - Local & async execution engines
1035
1294
  - Task tracking with metrics
@@ -1040,6 +1299,11 @@ pre-commit run pytest-check
1040
1299
  - API integration with rate limiting
1041
1300
  - OAuth 2.0 authentication
1042
1301
  - SharePoint Graph API integration
1302
+ - **Self-organizing agent pools with 13 specialized nodes**
1303
+ - **Agent-to-agent communication and shared memory**
1304
+ - **Intelligent caching and convergence detection**
1305
+ - **MCP integration for external tool access**
1306
+ - **Multi-strategy team formation algorithms**
1043
1307
  - **Real-time performance metrics collection**
1044
1308
  - **Performance visualization dashboards**
1045
1309
  - **Real-time monitoring dashboard with WebSocket streaming**