kailash 0.6.6__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. kailash/__init__.py +35 -5
  2. kailash/access_control.py +64 -46
  3. kailash/adapters/__init__.py +5 -0
  4. kailash/adapters/mcp_platform_adapter.py +273 -0
  5. kailash/api/workflow_api.py +34 -3
  6. kailash/channels/__init__.py +21 -0
  7. kailash/channels/api_channel.py +409 -0
  8. kailash/channels/base.py +271 -0
  9. kailash/channels/cli_channel.py +661 -0
  10. kailash/channels/event_router.py +496 -0
  11. kailash/channels/mcp_channel.py +648 -0
  12. kailash/channels/session.py +423 -0
  13. kailash/mcp_server/discovery.py +57 -18
  14. kailash/middleware/communication/api_gateway.py +23 -3
  15. kailash/middleware/communication/realtime.py +83 -0
  16. kailash/middleware/core/agent_ui.py +1 -1
  17. kailash/middleware/gateway/storage_backends.py +393 -0
  18. kailash/middleware/mcp/enhanced_server.py +22 -16
  19. kailash/nexus/__init__.py +21 -0
  20. kailash/nexus/cli/__init__.py +5 -0
  21. kailash/nexus/cli/__main__.py +6 -0
  22. kailash/nexus/cli/main.py +176 -0
  23. kailash/nexus/factory.py +413 -0
  24. kailash/nexus/gateway.py +545 -0
  25. kailash/nodes/__init__.py +8 -5
  26. kailash/nodes/ai/iterative_llm_agent.py +988 -17
  27. kailash/nodes/ai/llm_agent.py +29 -9
  28. kailash/nodes/api/__init__.py +2 -2
  29. kailash/nodes/api/monitoring.py +1 -1
  30. kailash/nodes/base.py +29 -5
  31. kailash/nodes/base_async.py +54 -14
  32. kailash/nodes/code/async_python.py +1 -1
  33. kailash/nodes/code/python.py +50 -6
  34. kailash/nodes/data/async_sql.py +90 -0
  35. kailash/nodes/data/bulk_operations.py +939 -0
  36. kailash/nodes/data/query_builder.py +373 -0
  37. kailash/nodes/data/query_cache.py +512 -0
  38. kailash/nodes/monitoring/__init__.py +10 -0
  39. kailash/nodes/monitoring/deadlock_detector.py +964 -0
  40. kailash/nodes/monitoring/performance_anomaly.py +1078 -0
  41. kailash/nodes/monitoring/race_condition_detector.py +1151 -0
  42. kailash/nodes/monitoring/transaction_metrics.py +790 -0
  43. kailash/nodes/monitoring/transaction_monitor.py +931 -0
  44. kailash/nodes/security/behavior_analysis.py +414 -0
  45. kailash/nodes/system/__init__.py +17 -0
  46. kailash/nodes/system/command_parser.py +820 -0
  47. kailash/nodes/transaction/__init__.py +48 -0
  48. kailash/nodes/transaction/distributed_transaction_manager.py +983 -0
  49. kailash/nodes/transaction/saga_coordinator.py +652 -0
  50. kailash/nodes/transaction/saga_state_storage.py +411 -0
  51. kailash/nodes/transaction/saga_step.py +467 -0
  52. kailash/nodes/transaction/transaction_context.py +756 -0
  53. kailash/nodes/transaction/two_phase_commit.py +978 -0
  54. kailash/nodes/transform/processors.py +17 -1
  55. kailash/nodes/validation/__init__.py +21 -0
  56. kailash/nodes/validation/test_executor.py +532 -0
  57. kailash/nodes/validation/validation_nodes.py +447 -0
  58. kailash/resources/factory.py +1 -1
  59. kailash/runtime/access_controlled.py +9 -7
  60. kailash/runtime/async_local.py +84 -21
  61. kailash/runtime/local.py +21 -2
  62. kailash/runtime/parameter_injector.py +187 -31
  63. kailash/runtime/runner.py +6 -4
  64. kailash/runtime/testing.py +1 -1
  65. kailash/security.py +22 -3
  66. kailash/servers/__init__.py +32 -0
  67. kailash/servers/durable_workflow_server.py +430 -0
  68. kailash/servers/enterprise_workflow_server.py +522 -0
  69. kailash/servers/gateway.py +183 -0
  70. kailash/servers/workflow_server.py +293 -0
  71. kailash/utils/data_validation.py +192 -0
  72. kailash/workflow/builder.py +382 -15
  73. kailash/workflow/cyclic_runner.py +102 -10
  74. kailash/workflow/validation.py +144 -8
  75. kailash/workflow/visualization.py +99 -27
  76. {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/METADATA +3 -2
  77. {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/RECORD +81 -40
  78. kailash/workflow/builder_improvements.py +0 -207
  79. {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/WHEEL +0 -0
  80. {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/entry_points.txt +0 -0
  81. {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/licenses/LICENSE +0 -0
  82. {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,393 @@
1
+ """Storage backend implementations for middleware components."""
2
+
3
+ import asyncio
4
+ import json
5
+ from abc import ABC, abstractmethod
6
+ from datetime import UTC, datetime
7
+ from typing import Any, Dict, List, Optional
8
+
9
+ try:
10
+ import redis.asyncio as redis
11
+ except ImportError:
12
+ try:
13
+ import aioredis as redis
14
+ except ImportError:
15
+ redis = None
16
+ import asyncpg
17
+
18
+
19
+ class StorageBackend(ABC):
20
+ """Abstract base class for storage backends."""
21
+
22
+ @abstractmethod
23
+ async def save(self, key: str, data: bytes) -> None:
24
+ """Save data to storage."""
25
+ pass
26
+
27
+ @abstractmethod
28
+ async def load(self, key: str) -> Optional[bytes]:
29
+ """Load data from storage."""
30
+ pass
31
+
32
+ @abstractmethod
33
+ async def delete(self, key: str) -> None:
34
+ """Delete data from storage."""
35
+ pass
36
+
37
+ @abstractmethod
38
+ async def list_keys(self, prefix: str = "") -> List[str]:
39
+ """List keys with optional prefix."""
40
+ pass
41
+
42
+ @abstractmethod
43
+ async def close(self) -> None:
44
+ """Close storage connection."""
45
+ pass
46
+
47
+
48
+ class RedisStorage(StorageBackend):
49
+ """Redis-based storage backend."""
50
+
51
+ def __init__(
52
+ self,
53
+ host: str = "localhost",
54
+ port: int = 6379,
55
+ db: int = 0,
56
+ password: Optional[str] = None,
57
+ key_prefix: str = "kailash:",
58
+ ):
59
+ self.host = host
60
+ self.port = port
61
+ self.db = db
62
+ self.password = password
63
+ self.key_prefix = key_prefix
64
+ self._redis: Optional[redis.Redis] = None
65
+
66
+ async def _get_redis(self) -> redis.Redis:
67
+ """Get Redis connection."""
68
+ if self._redis is None:
69
+ self._redis = await redis.from_url(
70
+ f"redis://{self.host}:{self.port}",
71
+ db=self.db,
72
+ password=self.password,
73
+ decode_responses=False,
74
+ )
75
+ return self._redis
76
+
77
+ def _make_key(self, key: str) -> str:
78
+ """Create prefixed key."""
79
+ return f"{self.key_prefix}{key}"
80
+
81
+ async def save(self, key: str, data: bytes) -> None:
82
+ """Save data to Redis."""
83
+ redis = await self._get_redis()
84
+ await redis.set(self._make_key(key), data)
85
+
86
+ async def load(self, key: str) -> Optional[bytes]:
87
+ """Load data from Redis."""
88
+ redis = await self._get_redis()
89
+ return await redis.get(self._make_key(key))
90
+
91
+ async def delete(self, key: str) -> None:
92
+ """Delete data from Redis."""
93
+ redis = await self._get_redis()
94
+ await redis.delete(self._make_key(key))
95
+
96
+ async def list_keys(self, prefix: str = "") -> List[str]:
97
+ """List keys with prefix."""
98
+ redis = await self._get_redis()
99
+ pattern = self._make_key(f"{prefix}*")
100
+ keys = await redis.keys(pattern)
101
+
102
+ # Remove prefix from keys
103
+ prefix_len = len(self.key_prefix)
104
+ return [key.decode()[prefix_len:] for key in keys]
105
+
106
+ async def append(self, key: str, data: List[Dict[str, Any]]) -> None:
107
+ """Append data to a Redis list."""
108
+ redis = await self._get_redis()
109
+ serialized_data = [json.dumps(item) for item in data]
110
+ await redis.lpush(self._make_key(key), *serialized_data)
111
+
112
+ async def get(self, key: str) -> List[Dict[str, Any]]:
113
+ """Get data from Redis list."""
114
+ redis = await self._get_redis()
115
+ data = await redis.lrange(self._make_key(key), 0, -1)
116
+ return [json.loads(item) for item in reversed(data)]
117
+
118
+ async def close(self) -> None:
119
+ """Close Redis connection."""
120
+ if self._redis:
121
+ await self._redis.close()
122
+ self._redis = None
123
+
124
+
125
+ class PostgreSQLStorage(StorageBackend):
126
+ """PostgreSQL-based storage backend."""
127
+
128
+ def __init__(
129
+ self,
130
+ host: str = "localhost",
131
+ port: int = 5432,
132
+ database: str = "kailash",
133
+ username: str = "postgres",
134
+ password: str = "",
135
+ table_name: str = "storage",
136
+ ):
137
+ self.host = host
138
+ self.port = port
139
+ self.database = database
140
+ self.username = username
141
+ self.password = password
142
+ self.table_name = table_name
143
+ self._pool: Optional[asyncpg.Pool] = None
144
+
145
+ async def _get_pool(self) -> asyncpg.Pool:
146
+ """Get PostgreSQL connection pool."""
147
+ if self._pool is None:
148
+ self._pool = await asyncpg.create_pool(
149
+ host=self.host,
150
+ port=self.port,
151
+ database=self.database,
152
+ user=self.username,
153
+ password=self.password,
154
+ )
155
+ await self._ensure_table()
156
+ return self._pool
157
+
158
+ async def _ensure_table(self) -> None:
159
+ """Ensure storage table exists."""
160
+ pool = self._pool
161
+ async with pool.acquire() as conn:
162
+ await conn.execute(
163
+ f"""
164
+ CREATE TABLE IF NOT EXISTS {self.table_name} (
165
+ key VARCHAR PRIMARY KEY,
166
+ data BYTEA NOT NULL,
167
+ created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
168
+ updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
169
+ )
170
+ """
171
+ )
172
+
173
+ async def save(self, key: str, data: bytes) -> None:
174
+ """Save data to PostgreSQL."""
175
+ pool = await self._get_pool()
176
+ async with pool.acquire() as conn:
177
+ await conn.execute(
178
+ f"""
179
+ INSERT INTO {self.table_name} (key, data, updated_at)
180
+ VALUES ($1, $2, NOW())
181
+ ON CONFLICT (key) DO UPDATE SET
182
+ data = EXCLUDED.data,
183
+ updated_at = NOW()
184
+ """,
185
+ key,
186
+ data,
187
+ )
188
+
189
+ async def load(self, key: str) -> Optional[bytes]:
190
+ """Load data from PostgreSQL."""
191
+ pool = await self._get_pool()
192
+ async with pool.acquire() as conn:
193
+ row = await conn.fetchrow(
194
+ f"SELECT data FROM {self.table_name} WHERE key = $1", key
195
+ )
196
+ return row["data"] if row else None
197
+
198
+ async def delete(self, key: str) -> None:
199
+ """Delete data from PostgreSQL."""
200
+ pool = await self._get_pool()
201
+ async with pool.acquire() as conn:
202
+ await conn.execute(f"DELETE FROM {self.table_name} WHERE key = $1", key)
203
+
204
+ async def list_keys(self, prefix: str = "") -> List[str]:
205
+ """List keys with prefix."""
206
+ pool = await self._get_pool()
207
+ async with pool.acquire() as conn:
208
+ rows = await conn.fetch(
209
+ f"SELECT key FROM {self.table_name} WHERE key LIKE $1",
210
+ f"{prefix}%",
211
+ )
212
+ return [row["key"] for row in rows]
213
+
214
+ async def close(self) -> None:
215
+ """Close PostgreSQL connection pool."""
216
+ if self._pool:
217
+ await self._pool.close()
218
+ self._pool = None
219
+
220
+
221
+ class RedisEventStorage:
222
+ """Redis-based event storage for EventStore."""
223
+
224
+ def __init__(
225
+ self,
226
+ host: str = "localhost",
227
+ port: int = 6379,
228
+ db: int = 0,
229
+ password: Optional[str] = None,
230
+ key_prefix: str = "events:",
231
+ ):
232
+ self.host = host
233
+ self.port = port
234
+ self.db = db
235
+ self.password = password
236
+ self.key_prefix = key_prefix
237
+ self._redis: Optional[redis.Redis] = None
238
+
239
+ async def _get_redis(self) -> redis.Redis:
240
+ """Get Redis connection."""
241
+ if self._redis is None:
242
+ self._redis = await redis.from_url(
243
+ f"redis://{self.host}:{self.port}",
244
+ db=self.db,
245
+ password=self.password,
246
+ decode_responses=False,
247
+ )
248
+ return self._redis
249
+
250
+ def _make_key(self, key: str) -> str:
251
+ """Create prefixed key."""
252
+ return f"{self.key_prefix}{key}"
253
+
254
+ async def append(self, key: str, events: List[Dict[str, Any]]) -> None:
255
+ """Append events to Redis list."""
256
+ redis = await self._get_redis()
257
+ serialized_events = [json.dumps(event) for event in events]
258
+ await redis.lpush(self._make_key(key), *serialized_events)
259
+
260
+ async def get(self, key: str) -> List[Dict[str, Any]]:
261
+ """Get events from Redis list."""
262
+ redis = await self._get_redis()
263
+ data = await redis.lrange(self._make_key(key), 0, -1)
264
+ return [json.loads(item) for item in reversed(data)]
265
+
266
+ async def close(self) -> None:
267
+ """Close Redis connection."""
268
+ if self._redis:
269
+ await self._redis.close()
270
+ self._redis = None
271
+
272
+
273
+ class PostgreSQLEventStorage:
274
+ """PostgreSQL-based event storage for EventStore."""
275
+
276
+ def __init__(
277
+ self,
278
+ host: str = "localhost",
279
+ port: int = 5432,
280
+ database: str = "kailash",
281
+ username: str = "postgres",
282
+ password: str = "",
283
+ table_name: str = "events",
284
+ ):
285
+ self.host = host
286
+ self.port = port
287
+ self.database = database
288
+ self.username = username
289
+ self.password = password
290
+ self.table_name = table_name
291
+ self._pool: Optional[asyncpg.Pool] = None
292
+
293
+ async def _get_pool(self) -> asyncpg.Pool:
294
+ """Get PostgreSQL connection pool."""
295
+ if self._pool is None:
296
+ self._pool = await asyncpg.create_pool(
297
+ host=self.host,
298
+ port=self.port,
299
+ database=self.database,
300
+ user=self.username,
301
+ password=self.password,
302
+ )
303
+ await self._ensure_table()
304
+ return self._pool
305
+
306
+ async def _ensure_table(self) -> None:
307
+ """Ensure events table exists."""
308
+ pool = self._pool
309
+ async with pool.acquire() as conn:
310
+ await conn.execute(
311
+ f"""
312
+ CREATE TABLE IF NOT EXISTS {self.table_name} (
313
+ id SERIAL PRIMARY KEY,
314
+ request_id VARCHAR NOT NULL,
315
+ event_id VARCHAR NOT NULL,
316
+ event_type VARCHAR NOT NULL,
317
+ sequence_number INTEGER NOT NULL,
318
+ timestamp TIMESTAMP WITH TIME ZONE NOT NULL,
319
+ data JSONB,
320
+ metadata JSONB,
321
+ created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
322
+ )
323
+ """
324
+ )
325
+
326
+ await conn.execute(
327
+ f"""
328
+ CREATE INDEX IF NOT EXISTS idx_{self.table_name}_request_id
329
+ ON {self.table_name} (request_id)
330
+ """
331
+ )
332
+
333
+ async def append(self, key: str, events: List[Dict[str, Any]]) -> None:
334
+ """Append events to PostgreSQL."""
335
+ pool = await self._get_pool()
336
+ async with pool.acquire() as conn:
337
+ for event in events:
338
+ await conn.execute(
339
+ f"""
340
+ INSERT INTO {self.table_name}
341
+ (request_id, event_id, event_type, sequence_number, timestamp, data, metadata)
342
+ VALUES ($1, $2, $3, $4, $5, $6, $7)
343
+ """,
344
+ event["request_id"],
345
+ event["event_id"],
346
+ event["event_type"],
347
+ event["sequence_number"],
348
+ datetime.fromisoformat(event["timestamp"]),
349
+ json.dumps(event["data"]),
350
+ json.dumps(event["metadata"]),
351
+ )
352
+
353
+ async def get(self, key: str) -> List[Dict[str, Any]]:
354
+ """Get events from PostgreSQL."""
355
+ # Extract request_id from key (format: "events:request_id")
356
+ request_id = key.split(":", 1)[1] if ":" in key else key
357
+
358
+ pool = await self._get_pool()
359
+ async with pool.acquire() as conn:
360
+ rows = await conn.fetch(
361
+ f"""
362
+ SELECT event_id, event_type, request_id, sequence_number,
363
+ timestamp, data, metadata
364
+ FROM {self.table_name}
365
+ WHERE request_id = $1
366
+ ORDER BY sequence_number
367
+ """,
368
+ request_id,
369
+ )
370
+
371
+ events = []
372
+ for row in rows:
373
+ events.append(
374
+ {
375
+ "event_id": row["event_id"],
376
+ "event_type": row["event_type"],
377
+ "request_id": row["request_id"],
378
+ "sequence_number": row["sequence_number"],
379
+ "timestamp": row["timestamp"].isoformat(),
380
+ "data": json.loads(row["data"]) if row["data"] else {},
381
+ "metadata": (
382
+ json.loads(row["metadata"]) if row["metadata"] else {}
383
+ ),
384
+ }
385
+ )
386
+
387
+ return events
388
+
389
+ async def close(self) -> None:
390
+ """Close PostgreSQL connection pool."""
391
+ if self._pool:
392
+ await self._pool.close()
393
+ self._pool = None
@@ -199,9 +199,12 @@ class MiddlewareMCPServer:
199
199
  # Tool Registration Workflow
200
200
  self.tool_register_workflow = WorkflowBuilder()
201
201
 
202
- validator = PythonCodeNode(
203
- name="validate_tool",
204
- code="""
202
+ # Use proper WorkflowBuilder syntax with string class names
203
+ self.tool_register_workflow.add_node(
204
+ "PythonCodeNode",
205
+ "validate_tool",
206
+ {
207
+ "code": """
205
208
  # Validate tool registration using Kailash patterns
206
209
  tool_data = input_data.get('tool_data', {})
207
210
 
@@ -220,12 +223,15 @@ else:
220
223
  'tool_data': tool_data,
221
224
  'validation_passed': True
222
225
  }
223
- """,
226
+ """
227
+ },
224
228
  )
225
229
 
226
- register_handler = PythonCodeNode(
227
- name="register_tool",
228
- code="""
230
+ self.tool_register_workflow.add_node(
231
+ "PythonCodeNode",
232
+ "register_tool",
233
+ {
234
+ "code": """
229
235
  # Register tool using Kailash patterns
230
236
  validation_result = input_data.get('validation_result', {})
231
237
 
@@ -243,11 +249,10 @@ else:
243
249
  'tool_registered': True,
244
250
  'registration_time': datetime.now().isoformat()
245
251
  }
246
- """,
252
+ """
253
+ },
247
254
  )
248
255
 
249
- self.tool_register_workflow.add_node(validator)
250
- self.tool_register_workflow.add_node(register_handler)
251
256
  self.tool_register_workflow.add_connection(
252
257
  "validate_tool", "result", "register_tool", "validation_result"
253
258
  )
@@ -255,9 +260,11 @@ else:
255
260
  # Tool Execution Workflow
256
261
  self.tool_execute_workflow = WorkflowBuilder()
257
262
 
258
- executor = PythonCodeNode(
259
- name="execute_tool",
260
- code="""
263
+ self.tool_execute_workflow.add_node(
264
+ "PythonCodeNode",
265
+ "execute_tool",
266
+ {
267
+ "code": """
261
268
  # Execute MCP tool using Kailash patterns
262
269
  tool_name = input_data.get('tool_name')
263
270
  tool_args = input_data.get('arguments', {})
@@ -272,11 +279,10 @@ execution_result = {
272
279
  }
273
280
 
274
281
  result = {'execution_result': execution_result}
275
- """,
282
+ """
283
+ },
276
284
  )
277
285
 
278
- self.tool_execute_workflow.add_node(executor)
279
-
280
286
  async def register_tool(
281
287
  self,
282
288
  name: str,
@@ -0,0 +1,21 @@
1
+ """Kailash Nexus - Multi-channel workflow orchestration framework."""
2
+
3
+ from .factory import (
4
+ create_api_nexus,
5
+ create_cli_nexus,
6
+ create_development_nexus,
7
+ create_mcp_nexus,
8
+ create_nexus,
9
+ create_production_nexus,
10
+ )
11
+ from .gateway import NexusGateway
12
+
13
+ __all__ = [
14
+ "NexusGateway",
15
+ "create_nexus",
16
+ "create_api_nexus",
17
+ "create_cli_nexus",
18
+ "create_mcp_nexus",
19
+ "create_development_nexus",
20
+ "create_production_nexus",
21
+ ]
@@ -0,0 +1,5 @@
1
+ """Nexus CLI module for command-line workflow interaction."""
2
+
3
+ from .main import main
4
+
5
+ __all__ = ["main"]
@@ -0,0 +1,6 @@
1
+ """Entry point for Nexus CLI when run as a module."""
2
+
3
+ from .main import main
4
+
5
+ if __name__ == "__main__":
6
+ main()
@@ -0,0 +1,176 @@
1
+ """Main CLI interface for Nexus workflow orchestration.
2
+
3
+ This module provides command-line access to Nexus workflows running on a server.
4
+ It connects to a running Nexus instance and allows listing and executing workflows.
5
+ """
6
+
7
+ import argparse
8
+ import json
9
+ import sys
10
+ from typing import Any, Dict, Optional
11
+
12
+ import requests
13
+
14
+
15
+ class NexusCLI:
16
+ """Command-line interface for Nexus workflows."""
17
+
18
+ def __init__(self, base_url: str = "http://localhost:8000"):
19
+ """Initialize CLI with Nexus server URL.
20
+
21
+ Args:
22
+ base_url: Base URL of the Nexus server
23
+ """
24
+ self.base_url = base_url.rstrip("/")
25
+
26
+ def list_workflows(self) -> None:
27
+ """List all available workflows."""
28
+ try:
29
+ response = requests.get(f"{self.base_url}/workflows", timeout=5)
30
+ response.raise_for_status()
31
+
32
+ workflows = response.json()
33
+
34
+ if not workflows:
35
+ print("No workflows available.")
36
+ return
37
+
38
+ print("Available workflows:")
39
+ for workflow_name in sorted(workflows.keys()):
40
+ print(f" - {workflow_name}")
41
+
42
+ except requests.RequestException as e:
43
+ print(f"Error connecting to Nexus server: {e}", file=sys.stderr)
44
+ sys.exit(1)
45
+ except json.JSONDecodeError as e:
46
+ print(f"Error parsing server response: {e}", file=sys.stderr)
47
+ sys.exit(1)
48
+
49
+ def run_workflow(
50
+ self, workflow_name: str, parameters: Optional[Dict[str, Any]] = None
51
+ ) -> None:
52
+ """Execute a workflow with optional parameters.
53
+
54
+ Args:
55
+ workflow_name: Name of the workflow to execute
56
+ parameters: Optional parameters for the workflow
57
+ """
58
+ try:
59
+ payload = {"parameters": parameters or {}}
60
+
61
+ response = requests.post(
62
+ f"{self.base_url}/workflows/{workflow_name}", json=payload, timeout=30
63
+ )
64
+ response.raise_for_status()
65
+
66
+ result = response.json()
67
+
68
+ # Handle enterprise workflow execution format
69
+ if "outputs" in result:
70
+ # Extract results from each node
71
+ for node_name, node_result in result["outputs"].items():
72
+ if "result" in node_result:
73
+ node_output = node_result["result"]
74
+ # Print meaningful output
75
+ for key, value in node_output.items():
76
+ print(f"{key}: {value}")
77
+ else:
78
+ # Handle direct result format
79
+ print(json.dumps(result, indent=2))
80
+
81
+ except requests.RequestException as e:
82
+ print(f"Error executing workflow: {e}", file=sys.stderr)
83
+ sys.exit(1)
84
+ except json.JSONDecodeError as e:
85
+ print(f"Error parsing execution result: {e}", file=sys.stderr)
86
+ sys.exit(1)
87
+
88
+ def parse_parameters(self, param_strings: list) -> Dict[str, Any]:
89
+ """Parse parameter strings in key=value format.
90
+
91
+ Args:
92
+ param_strings: List of parameter strings in "key=value" format
93
+
94
+ Returns:
95
+ Dictionary of parsed parameters
96
+ """
97
+ parameters = {}
98
+
99
+ for param_str in param_strings:
100
+ if "=" not in param_str:
101
+ print(
102
+ f"Invalid parameter format: {param_str}. Use key=value format.",
103
+ file=sys.stderr,
104
+ )
105
+ sys.exit(1)
106
+
107
+ key, value = param_str.split("=", 1)
108
+
109
+ # Try to parse as JSON for complex values, otherwise use as string
110
+ try:
111
+ parameters[key] = json.loads(value)
112
+ except json.JSONDecodeError:
113
+ parameters[key] = value
114
+
115
+ return parameters
116
+
117
+
118
+ def main():
119
+ """Main CLI entry point."""
120
+ parser = argparse.ArgumentParser(
121
+ description="Nexus CLI - Command-line interface for workflow orchestration",
122
+ formatter_class=argparse.RawDescriptionHelpFormatter,
123
+ epilog="""
124
+ Examples:
125
+ python -m kailash.nexus.cli list
126
+ python -m kailash.nexus.cli run my-workflow
127
+ python -m kailash.nexus.cli run my-workflow --param name=value --param count=5
128
+
129
+ # Connect to different server:
130
+ python -m kailash.nexus.cli --url http://localhost:8001 list
131
+ """,
132
+ )
133
+
134
+ parser.add_argument(
135
+ "--url",
136
+ default="http://localhost:8000",
137
+ help="Base URL of the Nexus server (default: http://localhost:8000)",
138
+ )
139
+
140
+ subparsers = parser.add_subparsers(dest="command", help="Available commands")
141
+
142
+ # List command
143
+ list_parser = subparsers.add_parser("list", help="List available workflows")
144
+
145
+ # Run command
146
+ run_parser = subparsers.add_parser("run", help="Execute a workflow")
147
+ run_parser.add_argument("workflow", help="Name of the workflow to execute")
148
+ run_parser.add_argument(
149
+ "--param",
150
+ action="append",
151
+ default=[],
152
+ help="Workflow parameters in key=value format (can be used multiple times)",
153
+ )
154
+
155
+ args = parser.parse_args()
156
+
157
+ if not args.command:
158
+ parser.print_help()
159
+ sys.exit(1)
160
+
161
+ # Initialize CLI client
162
+ cli = NexusCLI(base_url=args.url)
163
+
164
+ # Execute command
165
+ if args.command == "list":
166
+ cli.list_workflows()
167
+ elif args.command == "run":
168
+ parameters = cli.parse_parameters(args.param)
169
+ cli.run_workflow(args.workflow, parameters)
170
+ else:
171
+ parser.print_help()
172
+ sys.exit(1)
173
+
174
+
175
+ if __name__ == "__main__":
176
+ main()