kailash 0.7.0__py3-none-any.whl → 0.8.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,393 @@
1
+ """Storage backend implementations for middleware components."""
2
+
3
+ import asyncio
4
+ import json
5
+ from abc import ABC, abstractmethod
6
+ from datetime import UTC, datetime
7
+ from typing import Any, Dict, List, Optional
8
+
9
+ try:
10
+ import redis.asyncio as redis
11
+ except ImportError:
12
+ try:
13
+ import aioredis as redis
14
+ except ImportError:
15
+ redis = None
16
+ import asyncpg
17
+
18
+
19
+ class StorageBackend(ABC):
20
+ """Abstract base class for storage backends."""
21
+
22
+ @abstractmethod
23
+ async def save(self, key: str, data: bytes) -> None:
24
+ """Save data to storage."""
25
+ pass
26
+
27
+ @abstractmethod
28
+ async def load(self, key: str) -> Optional[bytes]:
29
+ """Load data from storage."""
30
+ pass
31
+
32
+ @abstractmethod
33
+ async def delete(self, key: str) -> None:
34
+ """Delete data from storage."""
35
+ pass
36
+
37
+ @abstractmethod
38
+ async def list_keys(self, prefix: str = "") -> List[str]:
39
+ """List keys with optional prefix."""
40
+ pass
41
+
42
+ @abstractmethod
43
+ async def close(self) -> None:
44
+ """Close storage connection."""
45
+ pass
46
+
47
+
48
+ class RedisStorage(StorageBackend):
49
+ """Redis-based storage backend."""
50
+
51
+ def __init__(
52
+ self,
53
+ host: str = "localhost",
54
+ port: int = 6379,
55
+ db: int = 0,
56
+ password: Optional[str] = None,
57
+ key_prefix: str = "kailash:",
58
+ ):
59
+ self.host = host
60
+ self.port = port
61
+ self.db = db
62
+ self.password = password
63
+ self.key_prefix = key_prefix
64
+ self._redis: Optional[redis.Redis] = None
65
+
66
+ async def _get_redis(self) -> redis.Redis:
67
+ """Get Redis connection."""
68
+ if self._redis is None:
69
+ self._redis = await redis.from_url(
70
+ f"redis://{self.host}:{self.port}",
71
+ db=self.db,
72
+ password=self.password,
73
+ decode_responses=False,
74
+ )
75
+ return self._redis
76
+
77
+ def _make_key(self, key: str) -> str:
78
+ """Create prefixed key."""
79
+ return f"{self.key_prefix}{key}"
80
+
81
+ async def save(self, key: str, data: bytes) -> None:
82
+ """Save data to Redis."""
83
+ redis = await self._get_redis()
84
+ await redis.set(self._make_key(key), data)
85
+
86
+ async def load(self, key: str) -> Optional[bytes]:
87
+ """Load data from Redis."""
88
+ redis = await self._get_redis()
89
+ return await redis.get(self._make_key(key))
90
+
91
+ async def delete(self, key: str) -> None:
92
+ """Delete data from Redis."""
93
+ redis = await self._get_redis()
94
+ await redis.delete(self._make_key(key))
95
+
96
+ async def list_keys(self, prefix: str = "") -> List[str]:
97
+ """List keys with prefix."""
98
+ redis = await self._get_redis()
99
+ pattern = self._make_key(f"{prefix}*")
100
+ keys = await redis.keys(pattern)
101
+
102
+ # Remove prefix from keys
103
+ prefix_len = len(self.key_prefix)
104
+ return [key.decode()[prefix_len:] for key in keys]
105
+
106
+ async def append(self, key: str, data: List[Dict[str, Any]]) -> None:
107
+ """Append data to a Redis list."""
108
+ redis = await self._get_redis()
109
+ serialized_data = [json.dumps(item) for item in data]
110
+ await redis.lpush(self._make_key(key), *serialized_data)
111
+
112
+ async def get(self, key: str) -> List[Dict[str, Any]]:
113
+ """Get data from Redis list."""
114
+ redis = await self._get_redis()
115
+ data = await redis.lrange(self._make_key(key), 0, -1)
116
+ return [json.loads(item) for item in reversed(data)]
117
+
118
+ async def close(self) -> None:
119
+ """Close Redis connection."""
120
+ if self._redis:
121
+ await self._redis.close()
122
+ self._redis = None
123
+
124
+
125
+ class PostgreSQLStorage(StorageBackend):
126
+ """PostgreSQL-based storage backend."""
127
+
128
+ def __init__(
129
+ self,
130
+ host: str = "localhost",
131
+ port: int = 5432,
132
+ database: str = "kailash",
133
+ username: str = "postgres",
134
+ password: str = "",
135
+ table_name: str = "storage",
136
+ ):
137
+ self.host = host
138
+ self.port = port
139
+ self.database = database
140
+ self.username = username
141
+ self.password = password
142
+ self.table_name = table_name
143
+ self._pool: Optional[asyncpg.Pool] = None
144
+
145
+ async def _get_pool(self) -> asyncpg.Pool:
146
+ """Get PostgreSQL connection pool."""
147
+ if self._pool is None:
148
+ self._pool = await asyncpg.create_pool(
149
+ host=self.host,
150
+ port=self.port,
151
+ database=self.database,
152
+ user=self.username,
153
+ password=self.password,
154
+ )
155
+ await self._ensure_table()
156
+ return self._pool
157
+
158
+ async def _ensure_table(self) -> None:
159
+ """Ensure storage table exists."""
160
+ pool = self._pool
161
+ async with pool.acquire() as conn:
162
+ await conn.execute(
163
+ f"""
164
+ CREATE TABLE IF NOT EXISTS {self.table_name} (
165
+ key VARCHAR PRIMARY KEY,
166
+ data BYTEA NOT NULL,
167
+ created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
168
+ updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
169
+ )
170
+ """
171
+ )
172
+
173
+ async def save(self, key: str, data: bytes) -> None:
174
+ """Save data to PostgreSQL."""
175
+ pool = await self._get_pool()
176
+ async with pool.acquire() as conn:
177
+ await conn.execute(
178
+ f"""
179
+ INSERT INTO {self.table_name} (key, data, updated_at)
180
+ VALUES ($1, $2, NOW())
181
+ ON CONFLICT (key) DO UPDATE SET
182
+ data = EXCLUDED.data,
183
+ updated_at = NOW()
184
+ """,
185
+ key,
186
+ data,
187
+ )
188
+
189
+ async def load(self, key: str) -> Optional[bytes]:
190
+ """Load data from PostgreSQL."""
191
+ pool = await self._get_pool()
192
+ async with pool.acquire() as conn:
193
+ row = await conn.fetchrow(
194
+ f"SELECT data FROM {self.table_name} WHERE key = $1", key
195
+ )
196
+ return row["data"] if row else None
197
+
198
+ async def delete(self, key: str) -> None:
199
+ """Delete data from PostgreSQL."""
200
+ pool = await self._get_pool()
201
+ async with pool.acquire() as conn:
202
+ await conn.execute(f"DELETE FROM {self.table_name} WHERE key = $1", key)
203
+
204
+ async def list_keys(self, prefix: str = "") -> List[str]:
205
+ """List keys with prefix."""
206
+ pool = await self._get_pool()
207
+ async with pool.acquire() as conn:
208
+ rows = await conn.fetch(
209
+ f"SELECT key FROM {self.table_name} WHERE key LIKE $1",
210
+ f"{prefix}%",
211
+ )
212
+ return [row["key"] for row in rows]
213
+
214
+ async def close(self) -> None:
215
+ """Close PostgreSQL connection pool."""
216
+ if self._pool:
217
+ await self._pool.close()
218
+ self._pool = None
219
+
220
+
221
+ class RedisEventStorage:
222
+ """Redis-based event storage for EventStore."""
223
+
224
+ def __init__(
225
+ self,
226
+ host: str = "localhost",
227
+ port: int = 6379,
228
+ db: int = 0,
229
+ password: Optional[str] = None,
230
+ key_prefix: str = "events:",
231
+ ):
232
+ self.host = host
233
+ self.port = port
234
+ self.db = db
235
+ self.password = password
236
+ self.key_prefix = key_prefix
237
+ self._redis: Optional[redis.Redis] = None
238
+
239
+ async def _get_redis(self) -> redis.Redis:
240
+ """Get Redis connection."""
241
+ if self._redis is None:
242
+ self._redis = await redis.from_url(
243
+ f"redis://{self.host}:{self.port}",
244
+ db=self.db,
245
+ password=self.password,
246
+ decode_responses=False,
247
+ )
248
+ return self._redis
249
+
250
+ def _make_key(self, key: str) -> str:
251
+ """Create prefixed key."""
252
+ return f"{self.key_prefix}{key}"
253
+
254
+ async def append(self, key: str, events: List[Dict[str, Any]]) -> None:
255
+ """Append events to Redis list."""
256
+ redis = await self._get_redis()
257
+ serialized_events = [json.dumps(event) for event in events]
258
+ await redis.lpush(self._make_key(key), *serialized_events)
259
+
260
+ async def get(self, key: str) -> List[Dict[str, Any]]:
261
+ """Get events from Redis list."""
262
+ redis = await self._get_redis()
263
+ data = await redis.lrange(self._make_key(key), 0, -1)
264
+ return [json.loads(item) for item in reversed(data)]
265
+
266
+ async def close(self) -> None:
267
+ """Close Redis connection."""
268
+ if self._redis:
269
+ await self._redis.close()
270
+ self._redis = None
271
+
272
+
273
+ class PostgreSQLEventStorage:
274
+ """PostgreSQL-based event storage for EventStore."""
275
+
276
+ def __init__(
277
+ self,
278
+ host: str = "localhost",
279
+ port: int = 5432,
280
+ database: str = "kailash",
281
+ username: str = "postgres",
282
+ password: str = "",
283
+ table_name: str = "events",
284
+ ):
285
+ self.host = host
286
+ self.port = port
287
+ self.database = database
288
+ self.username = username
289
+ self.password = password
290
+ self.table_name = table_name
291
+ self._pool: Optional[asyncpg.Pool] = None
292
+
293
+ async def _get_pool(self) -> asyncpg.Pool:
294
+ """Get PostgreSQL connection pool."""
295
+ if self._pool is None:
296
+ self._pool = await asyncpg.create_pool(
297
+ host=self.host,
298
+ port=self.port,
299
+ database=self.database,
300
+ user=self.username,
301
+ password=self.password,
302
+ )
303
+ await self._ensure_table()
304
+ return self._pool
305
+
306
+ async def _ensure_table(self) -> None:
307
+ """Ensure events table exists."""
308
+ pool = self._pool
309
+ async with pool.acquire() as conn:
310
+ await conn.execute(
311
+ f"""
312
+ CREATE TABLE IF NOT EXISTS {self.table_name} (
313
+ id SERIAL PRIMARY KEY,
314
+ request_id VARCHAR NOT NULL,
315
+ event_id VARCHAR NOT NULL,
316
+ event_type VARCHAR NOT NULL,
317
+ sequence_number INTEGER NOT NULL,
318
+ timestamp TIMESTAMP WITH TIME ZONE NOT NULL,
319
+ data JSONB,
320
+ metadata JSONB,
321
+ created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
322
+ )
323
+ """
324
+ )
325
+
326
+ await conn.execute(
327
+ f"""
328
+ CREATE INDEX IF NOT EXISTS idx_{self.table_name}_request_id
329
+ ON {self.table_name} (request_id)
330
+ """
331
+ )
332
+
333
+ async def append(self, key: str, events: List[Dict[str, Any]]) -> None:
334
+ """Append events to PostgreSQL."""
335
+ pool = await self._get_pool()
336
+ async with pool.acquire() as conn:
337
+ for event in events:
338
+ await conn.execute(
339
+ f"""
340
+ INSERT INTO {self.table_name}
341
+ (request_id, event_id, event_type, sequence_number, timestamp, data, metadata)
342
+ VALUES ($1, $2, $3, $4, $5, $6, $7)
343
+ """,
344
+ event["request_id"],
345
+ event["event_id"],
346
+ event["event_type"],
347
+ event["sequence_number"],
348
+ datetime.fromisoformat(event["timestamp"]),
349
+ json.dumps(event["data"]),
350
+ json.dumps(event["metadata"]),
351
+ )
352
+
353
+ async def get(self, key: str) -> List[Dict[str, Any]]:
354
+ """Get events from PostgreSQL."""
355
+ # Extract request_id from key (format: "events:request_id")
356
+ request_id = key.split(":", 1)[1] if ":" in key else key
357
+
358
+ pool = await self._get_pool()
359
+ async with pool.acquire() as conn:
360
+ rows = await conn.fetch(
361
+ f"""
362
+ SELECT event_id, event_type, request_id, sequence_number,
363
+ timestamp, data, metadata
364
+ FROM {self.table_name}
365
+ WHERE request_id = $1
366
+ ORDER BY sequence_number
367
+ """,
368
+ request_id,
369
+ )
370
+
371
+ events = []
372
+ for row in rows:
373
+ events.append(
374
+ {
375
+ "event_id": row["event_id"],
376
+ "event_type": row["event_type"],
377
+ "request_id": row["request_id"],
378
+ "sequence_number": row["sequence_number"],
379
+ "timestamp": row["timestamp"].isoformat(),
380
+ "data": json.loads(row["data"]) if row["data"] else {},
381
+ "metadata": (
382
+ json.loads(row["metadata"]) if row["metadata"] else {}
383
+ ),
384
+ }
385
+ )
386
+
387
+ return events
388
+
389
+ async def close(self) -> None:
390
+ """Close PostgreSQL connection pool."""
391
+ if self._pool:
392
+ await self._pool.close()
393
+ self._pool = None
@@ -0,0 +1,5 @@
1
+ """Nexus CLI module for command-line workflow interaction."""
2
+
3
+ from .main import main
4
+
5
+ __all__ = ["main"]
@@ -0,0 +1,6 @@
1
+ """Entry point for Nexus CLI when run as a module."""
2
+
3
+ from .main import main
4
+
5
+ if __name__ == "__main__":
6
+ main()
@@ -0,0 +1,176 @@
1
+ """Main CLI interface for Nexus workflow orchestration.
2
+
3
+ This module provides command-line access to Nexus workflows running on a server.
4
+ It connects to a running Nexus instance and allows listing and executing workflows.
5
+ """
6
+
7
+ import argparse
8
+ import json
9
+ import sys
10
+ from typing import Any, Dict, Optional
11
+
12
+ import requests
13
+
14
+
15
+ class NexusCLI:
16
+ """Command-line interface for Nexus workflows."""
17
+
18
+ def __init__(self, base_url: str = "http://localhost:8000"):
19
+ """Initialize CLI with Nexus server URL.
20
+
21
+ Args:
22
+ base_url: Base URL of the Nexus server
23
+ """
24
+ self.base_url = base_url.rstrip("/")
25
+
26
+ def list_workflows(self) -> None:
27
+ """List all available workflows."""
28
+ try:
29
+ response = requests.get(f"{self.base_url}/workflows", timeout=5)
30
+ response.raise_for_status()
31
+
32
+ workflows = response.json()
33
+
34
+ if not workflows:
35
+ print("No workflows available.")
36
+ return
37
+
38
+ print("Available workflows:")
39
+ for workflow_name in sorted(workflows.keys()):
40
+ print(f" - {workflow_name}")
41
+
42
+ except requests.RequestException as e:
43
+ print(f"Error connecting to Nexus server: {e}", file=sys.stderr)
44
+ sys.exit(1)
45
+ except json.JSONDecodeError as e:
46
+ print(f"Error parsing server response: {e}", file=sys.stderr)
47
+ sys.exit(1)
48
+
49
+ def run_workflow(
50
+ self, workflow_name: str, parameters: Optional[Dict[str, Any]] = None
51
+ ) -> None:
52
+ """Execute a workflow with optional parameters.
53
+
54
+ Args:
55
+ workflow_name: Name of the workflow to execute
56
+ parameters: Optional parameters for the workflow
57
+ """
58
+ try:
59
+ payload = {"parameters": parameters or {}}
60
+
61
+ response = requests.post(
62
+ f"{self.base_url}/workflows/{workflow_name}", json=payload, timeout=30
63
+ )
64
+ response.raise_for_status()
65
+
66
+ result = response.json()
67
+
68
+ # Handle enterprise workflow execution format
69
+ if "outputs" in result:
70
+ # Extract results from each node
71
+ for node_name, node_result in result["outputs"].items():
72
+ if "result" in node_result:
73
+ node_output = node_result["result"]
74
+ # Print meaningful output
75
+ for key, value in node_output.items():
76
+ print(f"{key}: {value}")
77
+ else:
78
+ # Handle direct result format
79
+ print(json.dumps(result, indent=2))
80
+
81
+ except requests.RequestException as e:
82
+ print(f"Error executing workflow: {e}", file=sys.stderr)
83
+ sys.exit(1)
84
+ except json.JSONDecodeError as e:
85
+ print(f"Error parsing execution result: {e}", file=sys.stderr)
86
+ sys.exit(1)
87
+
88
+ def parse_parameters(self, param_strings: list) -> Dict[str, Any]:
89
+ """Parse parameter strings in key=value format.
90
+
91
+ Args:
92
+ param_strings: List of parameter strings in "key=value" format
93
+
94
+ Returns:
95
+ Dictionary of parsed parameters
96
+ """
97
+ parameters = {}
98
+
99
+ for param_str in param_strings:
100
+ if "=" not in param_str:
101
+ print(
102
+ f"Invalid parameter format: {param_str}. Use key=value format.",
103
+ file=sys.stderr,
104
+ )
105
+ sys.exit(1)
106
+
107
+ key, value = param_str.split("=", 1)
108
+
109
+ # Try to parse as JSON for complex values, otherwise use as string
110
+ try:
111
+ parameters[key] = json.loads(value)
112
+ except json.JSONDecodeError:
113
+ parameters[key] = value
114
+
115
+ return parameters
116
+
117
+
118
+ def main():
119
+ """Main CLI entry point."""
120
+ parser = argparse.ArgumentParser(
121
+ description="Nexus CLI - Command-line interface for workflow orchestration",
122
+ formatter_class=argparse.RawDescriptionHelpFormatter,
123
+ epilog="""
124
+ Examples:
125
+ python -m kailash.nexus.cli list
126
+ python -m kailash.nexus.cli run my-workflow
127
+ python -m kailash.nexus.cli run my-workflow --param name=value --param count=5
128
+
129
+ # Connect to different server:
130
+ python -m kailash.nexus.cli --url http://localhost:8001 list
131
+ """,
132
+ )
133
+
134
+ parser.add_argument(
135
+ "--url",
136
+ default="http://localhost:8000",
137
+ help="Base URL of the Nexus server (default: http://localhost:8000)",
138
+ )
139
+
140
+ subparsers = parser.add_subparsers(dest="command", help="Available commands")
141
+
142
+ # List command
143
+ list_parser = subparsers.add_parser("list", help="List available workflows")
144
+
145
+ # Run command
146
+ run_parser = subparsers.add_parser("run", help="Execute a workflow")
147
+ run_parser.add_argument("workflow", help="Name of the workflow to execute")
148
+ run_parser.add_argument(
149
+ "--param",
150
+ action="append",
151
+ default=[],
152
+ help="Workflow parameters in key=value format (can be used multiple times)",
153
+ )
154
+
155
+ args = parser.parse_args()
156
+
157
+ if not args.command:
158
+ parser.print_help()
159
+ sys.exit(1)
160
+
161
+ # Initialize CLI client
162
+ cli = NexusCLI(base_url=args.url)
163
+
164
+ # Execute command
165
+ if args.command == "list":
166
+ cli.list_workflows()
167
+ elif args.command == "run":
168
+ parameters = cli.parse_parameters(args.param)
169
+ cli.run_workflow(args.workflow, parameters)
170
+ else:
171
+ parser.print_help()
172
+ sys.exit(1)
173
+
174
+
175
+ if __name__ == "__main__":
176
+ main()
kailash/nodes/__init__.py CHANGED
@@ -1,7 +1,11 @@
1
1
  """Node system for the Kailash SDK."""
2
2
 
3
- # Import all node modules to ensure registration
4
- from kailash.nodes import (
3
+ # Import all node modules to ensure registration - fixed circular import
4
+ from kailash.nodes.base import Node, NodeParameter, NodeRegistry, register_node
5
+ from kailash.nodes.base_cycle_aware import CycleAwareNode
6
+ from kailash.nodes.code import PythonCodeNode
7
+
8
+ from . import (
5
9
  ai,
6
10
  alerts,
7
11
  api,
@@ -19,9 +23,6 @@ from kailash.nodes import (
19
23
  transaction,
20
24
  transform,
21
25
  )
22
- from kailash.nodes.base import Node, NodeParameter, NodeRegistry, register_node
23
- from kailash.nodes.base_cycle_aware import CycleAwareNode
24
- from kailash.nodes.code import PythonCodeNode
25
26
 
26
27
  # Compatibility alias - AsyncNode is now just Node
27
28
  AsyncNode = Node
kailash/nodes/base.py CHANGED
@@ -1427,11 +1427,35 @@ class NodeRegistry:
1427
1427
  - Factory methods: Dynamic node creation
1428
1428
  """
1429
1429
  if node_name not in cls._nodes:
1430
- available_nodes = ", ".join(sorted(cls._nodes.keys()))
1431
- raise NodeConfigurationError(
1432
- f"Node '{node_name}' not found in registry. "
1433
- f"Available nodes: {available_nodes}"
1434
- )
1430
+ available_nodes = sorted(cls._nodes.keys())
1431
+
1432
+ # Provide more helpful error message based on registry state
1433
+ if not available_nodes:
1434
+ # Registry is empty - likely a test isolation or import issue
1435
+ raise NodeConfigurationError(
1436
+ f"Node '{node_name}' not found in registry. "
1437
+ f"The node registry is empty. This usually means:\n"
1438
+ f" 1. Nodes haven't been imported yet (try: import kailash.nodes)\n"
1439
+ f" 2. Test isolation cleared the registry without re-importing\n"
1440
+ f" 3. The node module containing '{node_name}' wasn't imported\n"
1441
+ f"Common nodes: PythonCodeNode, CSVReaderNode, SQLDatabaseNode, HTTPRequestNode"
1442
+ )
1443
+ else:
1444
+ # Registry has nodes but not the requested one
1445
+ nodes_list = ", ".join(available_nodes)
1446
+
1447
+ # Try to suggest similar node names
1448
+ similar = [
1449
+ n
1450
+ for n in available_nodes
1451
+ if node_name.lower() in n.lower() or n.lower() in node_name.lower()
1452
+ ]
1453
+ suggestion = f"\nDid you mean: {', '.join(similar)}?" if similar else ""
1454
+
1455
+ raise NodeConfigurationError(
1456
+ f"Node '{node_name}' not found in registry. "
1457
+ f"Available nodes: {nodes_list}{suggestion}"
1458
+ )
1435
1459
  return cls._nodes[node_name]
1436
1460
 
1437
1461
  @classmethod