qyro 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qyro/__init__.py +17 -0
- qyro/adapters/__init__.py +4 -0
- qyro/adapters/language_adapters/__init__.py +4 -0
- qyro/adapters/language_adapters/c/__init__.py +4 -0
- qyro/adapters/language_adapters/python/__init__.py +4 -0
- qyro/adapters/language_adapters/python/python_adapter.py +584 -0
- qyro/cli/__init__.py +8 -0
- qyro/cli/__main__.py +5 -0
- qyro/cli/cli.py +392 -0
- qyro/cli/interactive.py +297 -0
- qyro/common/__init__.py +37 -0
- qyro/common/animation.py +82 -0
- qyro/common/builder.py +434 -0
- qyro/common/compiler.py +895 -0
- qyro/common/config.py +93 -0
- qyro/common/constants.py +99 -0
- qyro/common/errors.py +176 -0
- qyro/common/frontend.py +74 -0
- qyro/common/health.py +358 -0
- qyro/common/kafka_manager.py +192 -0
- qyro/common/logging.py +149 -0
- qyro/common/memory.py +147 -0
- qyro/common/metrics.py +301 -0
- qyro/common/monitoring.py +468 -0
- qyro/common/parser.py +91 -0
- qyro/common/platform.py +609 -0
- qyro/common/redis_memory.py +1108 -0
- qyro/common/rpc.py +287 -0
- qyro/common/sandbox.py +191 -0
- qyro/common/schema_loader.py +33 -0
- qyro/common/secure_sandbox.py +490 -0
- qyro/common/toolchain_validator.py +617 -0
- qyro/common/type_generator.py +176 -0
- qyro/common/validation.py +401 -0
- qyro/common/validator.py +204 -0
- qyro/gateway/__init__.py +8 -0
- qyro/gateway/gateway.py +303 -0
- qyro/orchestrator/__init__.py +8 -0
- qyro/orchestrator/orchestrator.py +1223 -0
- qyro-2.0.0.dist-info/METADATA +244 -0
- qyro-2.0.0.dist-info/RECORD +45 -0
- qyro-2.0.0.dist-info/WHEEL +5 -0
- qyro-2.0.0.dist-info/entry_points.txt +2 -0
- qyro-2.0.0.dist-info/licenses/LICENSE +21 -0
- qyro-2.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1108 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Redis-based Memory System for Qyro
|
|
3
|
+
|
|
4
|
+
This module provides a Redis-backed shared memory implementation with pub/sub
|
|
5
|
+
for real-time communication between Qyro modules. It replaces the old file-based
|
|
6
|
+
memory system with a more scalable and performant solution.
|
|
7
|
+
|
|
8
|
+
Features:
|
|
9
|
+
- Redis pub/sub for real-time state change notifications
|
|
10
|
+
- Thread-safe operations with connection pooling
|
|
11
|
+
- Automatic reconnection for transient failures
|
|
12
|
+
- Module state management
|
|
13
|
+
- Event broadcasting system
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
import json
|
|
17
|
+
import threading
|
|
18
|
+
import time
|
|
19
|
+
from typing import Any, Callable, Dict, List, Optional
|
|
20
|
+
from dataclasses import dataclass, asdict
|
|
21
|
+
from enum import Enum
|
|
22
|
+
|
|
23
|
+
try:
|
|
24
|
+
import redis
|
|
25
|
+
from redis import Redis, ConnectionPool
|
|
26
|
+
from redis.exceptions import (
|
|
27
|
+
ConnectionError as RedisConnectionError,
|
|
28
|
+
TimeoutError as RedisTimeoutError,
|
|
29
|
+
RedisError
|
|
30
|
+
)
|
|
31
|
+
except ImportError:
|
|
32
|
+
raise ImportError(
|
|
33
|
+
"redis library is required. Install with: pip install redis>=5.0.0"
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
from .errors import QyroError, MemoryError, ErrorCode, Result
|
|
37
|
+
from .logging import get_logger
|
|
38
|
+
|
|
39
|
+
logger = get_logger("qyro.redis_memory")
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
# Redis key and channel constants
|
|
43
|
+
class RedisKeys:
|
|
44
|
+
"""Redis key and channel name constants."""
|
|
45
|
+
# State storage
|
|
46
|
+
STATE = "qyro:state"
|
|
47
|
+
STATE_CHANGED = "qyro:state:changed"
|
|
48
|
+
|
|
49
|
+
# Broadcasting
|
|
50
|
+
BROADCAST = "qyro:broadcast"
|
|
51
|
+
|
|
52
|
+
# Events
|
|
53
|
+
EVENTS = "qyro:events"
|
|
54
|
+
|
|
55
|
+
# Module registry
|
|
56
|
+
MODULE_REGISTRY = "qyro:modules:registry"
|
|
57
|
+
MODULE_STATE_PREFIX = "qyro:modules:state:"
|
|
58
|
+
|
|
59
|
+
# Statistics
|
|
60
|
+
STATS = "qyro:stats"
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class EventType(Enum):
|
|
64
|
+
"""Standard event types for Qyro system."""
|
|
65
|
+
MODULE_REGISTERED = "module_registered"
|
|
66
|
+
MODULE_UNREGISTERED = "module_unregistered"
|
|
67
|
+
MODULE_STATE_CHANGED = "module_state_changed"
|
|
68
|
+
STATE_CHANGED = "state_changed"
|
|
69
|
+
BROADCAST = "broadcast"
|
|
70
|
+
ERROR = "error"
|
|
71
|
+
INFO = "info"
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@dataclass
|
|
75
|
+
class QyroEvent:
|
|
76
|
+
"""Structured event for Redis pub/sub."""
|
|
77
|
+
event_type: str
|
|
78
|
+
data: Dict[str, Any]
|
|
79
|
+
timestamp: float
|
|
80
|
+
source: Optional[str] = None
|
|
81
|
+
|
|
82
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
83
|
+
"""Convert event to dictionary."""
|
|
84
|
+
return asdict(self)
|
|
85
|
+
|
|
86
|
+
@classmethod
|
|
87
|
+
def from_dict(cls, data: Dict[str, Any]) -> 'QyroEvent':
|
|
88
|
+
"""Create event from dictionary."""
|
|
89
|
+
return cls(**data)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@dataclass
|
|
93
|
+
class ModuleMetadata:
|
|
94
|
+
"""Metadata for registered modules."""
|
|
95
|
+
name: str
|
|
96
|
+
version: str
|
|
97
|
+
language: str
|
|
98
|
+
pid: Optional[int] = None
|
|
99
|
+
registered_at: Optional[float] = None
|
|
100
|
+
metadata: Optional[Dict[str, Any]] = None
|
|
101
|
+
|
|
102
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
103
|
+
"""Convert metadata to dictionary."""
|
|
104
|
+
return asdict(self)
|
|
105
|
+
|
|
106
|
+
@classmethod
|
|
107
|
+
def from_dict(cls, data: Dict[str, Any]) -> 'ModuleMetadata':
|
|
108
|
+
"""Create metadata from dictionary."""
|
|
109
|
+
return cls(**data)
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
class RedisConnectionError(QyroError):
|
|
113
|
+
"""Redis connection error."""
|
|
114
|
+
def __init__(self, message: str = "Redis connection error"):
|
|
115
|
+
super().__init__(ErrorCode.MEMORY_NOT_FOUND, message)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
class RedisQyroMemory:
|
|
119
|
+
"""
|
|
120
|
+
Redis-based shared memory with pub/sub for real-time communication.
|
|
121
|
+
|
|
122
|
+
This class provides a thread-safe interface to Redis for storing and
|
|
123
|
+
sharing state across Qyro modules. It uses Redis pub/sub for real-time
|
|
124
|
+
notifications of state changes.
|
|
125
|
+
|
|
126
|
+
Redis Keys:
|
|
127
|
+
qyro:state - Main state storage (hash)
|
|
128
|
+
qyro:state:changed - Channel for state change notifications
|
|
129
|
+
qyro:broadcast - Channel for broadcasting messages
|
|
130
|
+
qyro:events - Channel for general events
|
|
131
|
+
qyro:modules:registry - Set of registered module names
|
|
132
|
+
qyro:modules:state:{module_name} - State for specific modules
|
|
133
|
+
|
|
134
|
+
Example:
|
|
135
|
+
>>> memory = RedisQyroMemory(host='localhost', port=6379)
|
|
136
|
+
>>> memory.write({'counter': 0, 'status': 'active'})
|
|
137
|
+
>>> state = memory.read()
|
|
138
|
+
>>> memory.subscribe_to_changes(lambda data: print(f"State changed: {data}"))
|
|
139
|
+
"""
|
|
140
|
+
|
|
141
|
+
def __init__(
|
|
142
|
+
self,
|
|
143
|
+
host: str = 'localhost',
|
|
144
|
+
port: int = 6379,
|
|
145
|
+
db: int = 0,
|
|
146
|
+
password: Optional[str] = None,
|
|
147
|
+
socket_timeout: float = 5.0,
|
|
148
|
+
socket_connect_timeout: float = 5.0,
|
|
149
|
+
max_connections: int = 50,
|
|
150
|
+
retry_on_timeout: bool = True,
|
|
151
|
+
decode_responses: bool = True
|
|
152
|
+
):
|
|
153
|
+
"""
|
|
154
|
+
Initialize Redis connection with connection pooling.
|
|
155
|
+
"""
|
|
156
|
+
import os
|
|
157
|
+
|
|
158
|
+
# Respect environment variables if arguments are defaults
|
|
159
|
+
if host == 'localhost':
|
|
160
|
+
host = os.environ.get('QYRO_REDIS_HOST', 'localhost')
|
|
161
|
+
|
|
162
|
+
if port == 6379:
|
|
163
|
+
port_str = os.environ.get('QYRO_REDIS_PORT')
|
|
164
|
+
if port_str:
|
|
165
|
+
try:
|
|
166
|
+
port = int(port_str)
|
|
167
|
+
except ValueError:
|
|
168
|
+
pass
|
|
169
|
+
|
|
170
|
+
self.host = host
|
|
171
|
+
self.port = port
|
|
172
|
+
|
|
173
|
+
self.db = db
|
|
174
|
+
self.password = password
|
|
175
|
+
|
|
176
|
+
# Thread safety
|
|
177
|
+
self._lock = threading.RLock()
|
|
178
|
+
self._write_lock = threading.Lock()
|
|
179
|
+
|
|
180
|
+
# Subscription management
|
|
181
|
+
self._pubsub_thread: Optional[threading.Thread] = None
|
|
182
|
+
self._pubsub_running = threading.Event()
|
|
183
|
+
self._state_callbacks: List[Callable[[Dict[str, Any]], None]] = []
|
|
184
|
+
self._event_callbacks: List[Callable[[QyroEvent], None]] = []
|
|
185
|
+
|
|
186
|
+
# Reconnection settings
|
|
187
|
+
self._max_retries = 3
|
|
188
|
+
self._retry_delay = 1.0
|
|
189
|
+
self._connected = False
|
|
190
|
+
|
|
191
|
+
# Initialize connection pools - separate for commands and pubsub
|
|
192
|
+
try:
|
|
193
|
+
self._command_pool = ConnectionPool(
|
|
194
|
+
host=host,
|
|
195
|
+
port=port,
|
|
196
|
+
db=db,
|
|
197
|
+
password=password,
|
|
198
|
+
socket_timeout=socket_timeout,
|
|
199
|
+
socket_connect_timeout=socket_connect_timeout,
|
|
200
|
+
max_connections=max_connections,
|
|
201
|
+
retry_on_timeout=retry_on_timeout,
|
|
202
|
+
decode_responses=decode_responses,
|
|
203
|
+
health_check_interval=30 # Health check every 30 seconds
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
# Create Redis clients using pools
|
|
207
|
+
self._redis = Redis(connection_pool=self._command_pool)
|
|
208
|
+
|
|
209
|
+
# Separate pool for pubsub connections
|
|
210
|
+
self._pubsub_pool = ConnectionPool(
|
|
211
|
+
host=host,
|
|
212
|
+
port=port,
|
|
213
|
+
db=db,
|
|
214
|
+
password=password,
|
|
215
|
+
socket_timeout=socket_timeout,
|
|
216
|
+
socket_connect_timeout=socket_connect_timeout,
|
|
217
|
+
max_connections=5, # Smaller pool for pubsub
|
|
218
|
+
retry_on_timeout=retry_on_timeout,
|
|
219
|
+
decode_responses=decode_responses
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
# Test connection
|
|
223
|
+
self._test_connection()
|
|
224
|
+
self._connected = True
|
|
225
|
+
|
|
226
|
+
logger.info(
|
|
227
|
+
"redis_memory_initialized",
|
|
228
|
+
host=host,
|
|
229
|
+
port=port,
|
|
230
|
+
db=db
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
except RedisConnectionError as e:
|
|
234
|
+
logger.error("redis_connection_failed", error=str(e))
|
|
235
|
+
raise RedisConnectionError(
|
|
236
|
+
f"Failed to connect to Redis at {host}:{port}: {e}"
|
|
237
|
+
)
|
|
238
|
+
except Exception as e:
|
|
239
|
+
logger.error("redis_init_failed", error=str(e))
|
|
240
|
+
raise RedisConnectionError(
|
|
241
|
+
f"Failed to initialize Redis memory: {e}"
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
def _test_connection(self) -> None:
|
|
245
|
+
"""
|
|
246
|
+
Test Redis connection with retry logic.
|
|
247
|
+
|
|
248
|
+
Raises:
|
|
249
|
+
RedisConnectionError: If connection fails after retries
|
|
250
|
+
"""
|
|
251
|
+
for attempt in range(self._max_retries):
|
|
252
|
+
try:
|
|
253
|
+
self._redis.ping()
|
|
254
|
+
return
|
|
255
|
+
except (RedisConnectionError, RedisTimeoutError) as e:
|
|
256
|
+
if attempt < self._max_retries - 1:
|
|
257
|
+
logger.warning(
|
|
258
|
+
"redis_connection_retry",
|
|
259
|
+
attempt=attempt + 1,
|
|
260
|
+
max_retries=self._max_retries,
|
|
261
|
+
error=str(e)
|
|
262
|
+
)
|
|
263
|
+
time.sleep(self._retry_delay * (attempt + 1))
|
|
264
|
+
else:
|
|
265
|
+
raise
|
|
266
|
+
|
|
267
|
+
def _ensure_connection(self) -> None:
|
|
268
|
+
"""
|
|
269
|
+
Ensure Redis connection is active, reconnect if necessary.
|
|
270
|
+
|
|
271
|
+
This method is called before each operation to handle transient
|
|
272
|
+
connection failures gracefully.
|
|
273
|
+
"""
|
|
274
|
+
try:
|
|
275
|
+
self._redis.ping()
|
|
276
|
+
except (RedisConnectionError, RedisTimeoutError) as e:
|
|
277
|
+
logger.warning("redis_connection_lost", error=str(e))
|
|
278
|
+
try:
|
|
279
|
+
self._test_connection()
|
|
280
|
+
logger.info("redis_connection_restored")
|
|
281
|
+
except Exception as reconnect_error:
|
|
282
|
+
logger.error("redis_reconnection_failed", error=str(reconnect_error))
|
|
283
|
+
raise RedisConnectionError(
|
|
284
|
+
f"Failed to reconnect to Redis: {reconnect_error}"
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
def read(self) -> Dict[str, Any]:
|
|
288
|
+
"""
|
|
289
|
+
Read the current state from Redis.
|
|
290
|
+
|
|
291
|
+
Returns:
|
|
292
|
+
Dictionary containing the current state. Returns empty dict
|
|
293
|
+
if no state exists.
|
|
294
|
+
|
|
295
|
+
Raises:
|
|
296
|
+
RedisConnectionError: If Redis connection fails
|
|
297
|
+
"""
|
|
298
|
+
with self._lock:
|
|
299
|
+
self._ensure_connection()
|
|
300
|
+
|
|
301
|
+
try:
|
|
302
|
+
# Read state from hash
|
|
303
|
+
state_data = self._redis.hgetall(RedisKeys.STATE)
|
|
304
|
+
|
|
305
|
+
if not state_data:
|
|
306
|
+
logger.debug("redis_state_empty")
|
|
307
|
+
return {}
|
|
308
|
+
|
|
309
|
+
# Parse JSON values
|
|
310
|
+
state = {}
|
|
311
|
+
for key, value in state_data.items():
|
|
312
|
+
try:
|
|
313
|
+
state[key] = json.loads(value)
|
|
314
|
+
except json.JSONDecodeError:
|
|
315
|
+
state[key] = value
|
|
316
|
+
|
|
317
|
+
logger.debug("redis_state_read", keys=list(state.keys()))
|
|
318
|
+
return state
|
|
319
|
+
|
|
320
|
+
except RedisError as e:
|
|
321
|
+
logger.error("redis_read_failed", error=str(e))
|
|
322
|
+
raise RedisConnectionError(f"Failed to read state: {e}")
|
|
323
|
+
|
|
324
|
+
def read_field(self, key: str) -> Any:
|
|
325
|
+
"""
|
|
326
|
+
Read a specific field from the state without loading the entire state.
|
|
327
|
+
|
|
328
|
+
Args:
|
|
329
|
+
key: Field name to read
|
|
330
|
+
|
|
331
|
+
Returns:
|
|
332
|
+
Value of the field, or None if not found
|
|
333
|
+
"""
|
|
334
|
+
with self._lock:
|
|
335
|
+
self._ensure_connection()
|
|
336
|
+
|
|
337
|
+
try:
|
|
338
|
+
value = self._redis.hget(RedisKeys.STATE, key)
|
|
339
|
+
if value is None:
|
|
340
|
+
return None
|
|
341
|
+
|
|
342
|
+
try:
|
|
343
|
+
return json.loads(value)
|
|
344
|
+
except json.JSONDecodeError:
|
|
345
|
+
return value
|
|
346
|
+
|
|
347
|
+
except RedisError as e:
|
|
348
|
+
logger.error("redis_read_field_failed", error=str(e), key=key)
|
|
349
|
+
raise RedisConnectionError(f"Failed to read field {key}: {e}")
|
|
350
|
+
|
|
351
|
+
def read_fields(self, keys: List[str]) -> Dict[str, Any]:
|
|
352
|
+
"""
|
|
353
|
+
Read multiple specific fields from the state without loading the entire state.
|
|
354
|
+
|
|
355
|
+
Args:
|
|
356
|
+
keys: List of field names to read
|
|
357
|
+
|
|
358
|
+
Returns:
|
|
359
|
+
Dictionary containing the requested fields
|
|
360
|
+
"""
|
|
361
|
+
with self._lock:
|
|
362
|
+
self._ensure_connection()
|
|
363
|
+
|
|
364
|
+
try:
|
|
365
|
+
values = self._redis.hmget(RedisKeys.STATE, keys)
|
|
366
|
+
result = {}
|
|
367
|
+
|
|
368
|
+
for i, key in enumerate(keys):
|
|
369
|
+
value = values[i]
|
|
370
|
+
if value is not None:
|
|
371
|
+
try:
|
|
372
|
+
result[key] = json.loads(value)
|
|
373
|
+
except json.JSONDecodeError:
|
|
374
|
+
result[key] = value
|
|
375
|
+
|
|
376
|
+
logger.debug("redis_fields_read", keys=keys)
|
|
377
|
+
return result
|
|
378
|
+
|
|
379
|
+
except RedisError as e:
|
|
380
|
+
logger.error("redis_read_fields_failed", error=str(e), keys=keys)
|
|
381
|
+
raise RedisConnectionError(f"Failed to read fields {keys}: {e}")
|
|
382
|
+
|
|
383
|
+
def write(self, data: Dict[str, Any]) -> None:
|
|
384
|
+
"""
|
|
385
|
+
Write state to Redis and publish change notification.
|
|
386
|
+
|
|
387
|
+
This method writes the provided data to Redis and publishes a
|
|
388
|
+
notification on the state change channel. All subscribers will
|
|
389
|
+
receive the new state.
|
|
390
|
+
|
|
391
|
+
Args:
|
|
392
|
+
data: Dictionary containing state to write
|
|
393
|
+
|
|
394
|
+
Raises:
|
|
395
|
+
RedisConnectionError: If Redis connection fails
|
|
396
|
+
ValueError: If data is not a dictionary
|
|
397
|
+
"""
|
|
398
|
+
if not isinstance(data, dict):
|
|
399
|
+
raise ValueError("Data must be a dictionary")
|
|
400
|
+
|
|
401
|
+
with self._write_lock:
|
|
402
|
+
self._ensure_connection()
|
|
403
|
+
|
|
404
|
+
try:
|
|
405
|
+
# Use pipeline for atomic operation
|
|
406
|
+
with self._redis.pipeline() as pipe:
|
|
407
|
+
# Write new state as hash (Partial Update) - DO NOT DELETE FIRST
|
|
408
|
+
if data:
|
|
409
|
+
serialized_data = {
|
|
410
|
+
key: json.dumps(value) if not isinstance(value, str) else value
|
|
411
|
+
for key, value in data.items()
|
|
412
|
+
}
|
|
413
|
+
# Use HSET to update specific fields without affecting others
|
|
414
|
+
pipe.hset(RedisKeys.STATE, mapping=serialized_data)
|
|
415
|
+
|
|
416
|
+
# Publish state change notification
|
|
417
|
+
notification = json.dumps({
|
|
418
|
+
'timestamp': time.time(),
|
|
419
|
+
'keys': list(data.keys()) if data else []
|
|
420
|
+
})
|
|
421
|
+
pipe.publish(RedisKeys.STATE_CHANGED, notification)
|
|
422
|
+
|
|
423
|
+
# Execute pipeline
|
|
424
|
+
pipe.execute()
|
|
425
|
+
|
|
426
|
+
logger.debug("redis_state_written", keys=list(data.keys()) if data else [])
|
|
427
|
+
|
|
428
|
+
except RedisError as e:
|
|
429
|
+
logger.error("redis_write_failed", error=str(e))
|
|
430
|
+
raise RedisConnectionError(f"Failed to write state: {e}")
|
|
431
|
+
|
|
432
|
+
def update_field(self, key: str, value: Any) -> None:
|
|
433
|
+
"""
|
|
434
|
+
Update a single field in the state without affecting other fields.
|
|
435
|
+
|
|
436
|
+
Args:
|
|
437
|
+
key: Field name to update
|
|
438
|
+
value: Value to set for the field
|
|
439
|
+
|
|
440
|
+
Raises:
|
|
441
|
+
RedisConnectionError: If Redis connection fails
|
|
442
|
+
"""
|
|
443
|
+
with self._write_lock:
|
|
444
|
+
self._ensure_connection()
|
|
445
|
+
|
|
446
|
+
try:
|
|
447
|
+
# Serialize the value
|
|
448
|
+
serialized_value = json.dumps(value) if not isinstance(value, str) else value
|
|
449
|
+
|
|
450
|
+
# Update only the specific field
|
|
451
|
+
self._redis.hset(RedisKeys.STATE, key, serialized_value)
|
|
452
|
+
|
|
453
|
+
# Publish state change notification
|
|
454
|
+
notification = json.dumps({
|
|
455
|
+
'timestamp': time.time(),
|
|
456
|
+
'keys': [key]
|
|
457
|
+
})
|
|
458
|
+
self._redis.publish(RedisKeys.STATE_CHANGED, notification)
|
|
459
|
+
|
|
460
|
+
logger.debug("redis_field_updated", key=key)
|
|
461
|
+
|
|
462
|
+
except RedisError as e:
|
|
463
|
+
logger.error("redis_field_update_failed", error=str(e))
|
|
464
|
+
raise RedisConnectionError(f"Failed to update field: {e}")
|
|
465
|
+
|
|
466
|
+
def update_fields(self, data: Dict[str, Any]) -> None:
|
|
467
|
+
"""
|
|
468
|
+
Update multiple fields in the state without affecting other fields.
|
|
469
|
+
|
|
470
|
+
Args:
|
|
471
|
+
data: Dictionary containing fields to update
|
|
472
|
+
|
|
473
|
+
Raises:
|
|
474
|
+
RedisConnectionError: If Redis connection fails
|
|
475
|
+
ValueError: If data is not a dictionary
|
|
476
|
+
"""
|
|
477
|
+
if not isinstance(data, dict):
|
|
478
|
+
raise ValueError("Data must be a dictionary")
|
|
479
|
+
|
|
480
|
+
with self._write_lock:
|
|
481
|
+
self._ensure_connection()
|
|
482
|
+
|
|
483
|
+
try:
|
|
484
|
+
# Use pipeline for atomic operation
|
|
485
|
+
with self._redis.pipeline() as pipe:
|
|
486
|
+
# Update specific fields without affecting others
|
|
487
|
+
if data:
|
|
488
|
+
serialized_data = {
|
|
489
|
+
key: json.dumps(value) if not isinstance(value, str) else value
|
|
490
|
+
for key, value in data.items()
|
|
491
|
+
}
|
|
492
|
+
pipe.hset(RedisKeys.STATE, mapping=serialized_data)
|
|
493
|
+
|
|
494
|
+
# Publish state change notification
|
|
495
|
+
notification = json.dumps({
|
|
496
|
+
'timestamp': time.time(),
|
|
497
|
+
'keys': list(data.keys()) if data else []
|
|
498
|
+
})
|
|
499
|
+
pipe.publish(RedisKeys.STATE_CHANGED, notification)
|
|
500
|
+
|
|
501
|
+
# Execute pipeline
|
|
502
|
+
pipe.execute()
|
|
503
|
+
|
|
504
|
+
logger.debug("redis_fields_updated", keys=list(data.keys()) if data else [])
|
|
505
|
+
|
|
506
|
+
except RedisError as e:
|
|
507
|
+
logger.error("redis_fields_update_failed", error=str(e))
|
|
508
|
+
raise RedisConnectionError(f"Failed to update fields: {e}")
|
|
509
|
+
|
|
510
|
+
def subscribe_to_changes(self, callback: Callable[[Dict[str, Any]], None]) -> None:
|
|
511
|
+
"""
|
|
512
|
+
Subscribe to state change notifications.
|
|
513
|
+
|
|
514
|
+
This method starts a background thread that listens for state
|
|
515
|
+
change notifications and invokes the provided callback when
|
|
516
|
+
changes occur.
|
|
517
|
+
|
|
518
|
+
Args:
|
|
519
|
+
callback: Function to call when state changes. Receives the
|
|
520
|
+
new state as a dictionary.
|
|
521
|
+
|
|
522
|
+
Note:
|
|
523
|
+
Multiple callbacks can be registered. Each will be called
|
|
524
|
+
when a state change occurs.
|
|
525
|
+
"""
|
|
526
|
+
with self._lock:
|
|
527
|
+
if callback in self._state_callbacks:
|
|
528
|
+
logger.warning("callback_already_registered")
|
|
529
|
+
return
|
|
530
|
+
|
|
531
|
+
self._state_callbacks.append(callback)
|
|
532
|
+
|
|
533
|
+
# Start pub/sub thread if not already running
|
|
534
|
+
if not self._pubsub_running.is_set():
|
|
535
|
+
self._start_pubsub_thread()
|
|
536
|
+
|
|
537
|
+
logger.info("state_change_subscribed", callbacks=len(self._state_callbacks))
|
|
538
|
+
|
|
539
|
+
def _start_pubsub_thread(self) -> None:
|
|
540
|
+
"""Start the background pub/sub listener thread."""
|
|
541
|
+
if self._pubsub_running.is_set():
|
|
542
|
+
return
|
|
543
|
+
|
|
544
|
+
self._pubsub_running.set()
|
|
545
|
+
self._pubsub_thread = threading.Thread(
|
|
546
|
+
target=self._pubsub_listener,
|
|
547
|
+
daemon=True,
|
|
548
|
+
name="RedisPubSubListener"
|
|
549
|
+
)
|
|
550
|
+
self._pubsub_thread.start()
|
|
551
|
+
logger.info("pubsub_thread_started")
|
|
552
|
+
|
|
553
|
+
def _pubsub_listener(self) -> None:
|
|
554
|
+
"""
|
|
555
|
+
Background thread that listens for pub/sub messages.
|
|
556
|
+
|
|
557
|
+
This thread runs in the background and processes messages from
|
|
558
|
+
Redis pub/sub channels. It handles both state changes and
|
|
559
|
+
general events.
|
|
560
|
+
"""
|
|
561
|
+
# Create separate connection for pub/sub using dedicated pool
|
|
562
|
+
pubsub_redis = Redis(connection_pool=self._pubsub_pool)
|
|
563
|
+
pubsub = pubsub_redis.pubsub()
|
|
564
|
+
|
|
565
|
+
# Subscribe to channels
|
|
566
|
+
pubsub.subscribe(RedisKeys.STATE_CHANGED)
|
|
567
|
+
pubsub.subscribe(RedisKeys.EVENTS)
|
|
568
|
+
|
|
569
|
+
logger.info("pubsub_listener_started")
|
|
570
|
+
|
|
571
|
+
try:
|
|
572
|
+
while self._pubsub_running.is_set():
|
|
573
|
+
try:
|
|
574
|
+
# Get message with timeout
|
|
575
|
+
message = pubsub.get_message(timeout=1.0)
|
|
576
|
+
|
|
577
|
+
if message is None:
|
|
578
|
+
continue
|
|
579
|
+
|
|
580
|
+
if message['type'] == 'message':
|
|
581
|
+
channel = message['channel']
|
|
582
|
+
data = message['data']
|
|
583
|
+
|
|
584
|
+
if channel == RedisKeys.STATE_CHANGED:
|
|
585
|
+
self._handle_state_change(data)
|
|
586
|
+
elif channel == RedisKeys.EVENTS:
|
|
587
|
+
self._handle_event(data)
|
|
588
|
+
|
|
589
|
+
except (RedisConnectionError, RedisTimeoutError) as e:
|
|
590
|
+
logger.warning("pubsub_connection_error", error=str(e))
|
|
591
|
+
time.sleep(1.0)
|
|
592
|
+
except Exception as e:
|
|
593
|
+
logger.error("pubsub_listener_error", error=str(e))
|
|
594
|
+
time.sleep(1.0)
|
|
595
|
+
|
|
596
|
+
finally:
|
|
597
|
+
pubsub.close()
|
|
598
|
+
logger.info("pubsub_listener_stopped")
|
|
599
|
+
|
|
600
|
+
def _handle_state_change(self, data: str) -> None:
|
|
601
|
+
"""
|
|
602
|
+
Handle state change notification.
|
|
603
|
+
|
|
604
|
+
Args:
|
|
605
|
+
data: JSON-encoded notification data
|
|
606
|
+
"""
|
|
607
|
+
try:
|
|
608
|
+
notification = json.loads(data)
|
|
609
|
+
state = self.read()
|
|
610
|
+
|
|
611
|
+
# Call all registered callbacks
|
|
612
|
+
for callback in self._state_callbacks:
|
|
613
|
+
try:
|
|
614
|
+
callback(state)
|
|
615
|
+
except Exception as e:
|
|
616
|
+
logger.error("state_callback_error", error=str(e))
|
|
617
|
+
|
|
618
|
+
logger.debug("state_change_handled", notification=notification)
|
|
619
|
+
|
|
620
|
+
except json.JSONDecodeError as e:
|
|
621
|
+
logger.error("state_change_decode_error", error=str(e))
|
|
622
|
+
|
|
623
|
+
def _handle_event(self, data: str) -> None:
|
|
624
|
+
"""
|
|
625
|
+
Handle general event notification.
|
|
626
|
+
|
|
627
|
+
Args:
|
|
628
|
+
data: JSON-encoded event data
|
|
629
|
+
"""
|
|
630
|
+
try:
|
|
631
|
+
event_dict = json.loads(data)
|
|
632
|
+
event = QyroEvent.from_dict(event_dict)
|
|
633
|
+
|
|
634
|
+
# Call all registered event callbacks
|
|
635
|
+
for callback in self._event_callbacks:
|
|
636
|
+
try:
|
|
637
|
+
callback(event)
|
|
638
|
+
except Exception as e:
|
|
639
|
+
logger.error("event_callback_error", error=str(e))
|
|
640
|
+
|
|
641
|
+
logger.debug("event_handled", event_type=event.event_type)
|
|
642
|
+
|
|
643
|
+
except json.JSONDecodeError as e:
|
|
644
|
+
logger.error("event_decode_error", error=str(e))
|
|
645
|
+
|
|
646
|
+
def publish_event(self, event_type: str, data: Dict[str, Any], source: Optional[str] = None) -> None:
|
|
647
|
+
"""
|
|
648
|
+
Publish an event to the events channel.
|
|
649
|
+
|
|
650
|
+
Args:
|
|
651
|
+
event_type: Type of event (e.g., 'module_registered', 'error')
|
|
652
|
+
data: Event data payload
|
|
653
|
+
source: Optional source identifier
|
|
654
|
+
|
|
655
|
+
Raises:
|
|
656
|
+
RedisConnectionError: If Redis connection fails
|
|
657
|
+
"""
|
|
658
|
+
self._ensure_connection()
|
|
659
|
+
|
|
660
|
+
try:
|
|
661
|
+
event = QyroEvent(
|
|
662
|
+
event_type=event_type,
|
|
663
|
+
data=data,
|
|
664
|
+
timestamp=time.time(),
|
|
665
|
+
source=source
|
|
666
|
+
)
|
|
667
|
+
|
|
668
|
+
self._redis.publish(RedisKeys.EVENTS, json.dumps(event.to_dict()))
|
|
669
|
+
|
|
670
|
+
logger.debug("event_published", event_type=event_type)
|
|
671
|
+
|
|
672
|
+
except RedisError as e:
|
|
673
|
+
logger.error("event_publish_failed", error=str(e))
|
|
674
|
+
raise RedisConnectionError(f"Failed to publish event: {e}")
|
|
675
|
+
|
|
676
|
+
def subscribe_to_events(self, callback: Callable[[QyroEvent], None]) -> None:
|
|
677
|
+
"""
|
|
678
|
+
Subscribe to general events.
|
|
679
|
+
|
|
680
|
+
Args:
|
|
681
|
+
callback: Function to call when events are received. Receives
|
|
682
|
+
a QyroEvent object.
|
|
683
|
+
"""
|
|
684
|
+
with self._lock:
|
|
685
|
+
if callback in self._event_callbacks:
|
|
686
|
+
logger.warning("event_callback_already_registered")
|
|
687
|
+
return
|
|
688
|
+
|
|
689
|
+
self._event_callbacks.append(callback)
|
|
690
|
+
|
|
691
|
+
# Start pub/sub thread if not already running
|
|
692
|
+
if not self._pubsub_running.is_set():
|
|
693
|
+
self._start_pubsub_thread()
|
|
694
|
+
|
|
695
|
+
logger.info("events_subscribed", callbacks=len(self._event_callbacks))
|
|
696
|
+
|
|
697
|
+
def broadcast(self, message: Dict[str, Any]) -> None:
|
|
698
|
+
"""
|
|
699
|
+
Broadcast a message to all modules.
|
|
700
|
+
|
|
701
|
+
Args:
|
|
702
|
+
message: Message to broadcast
|
|
703
|
+
|
|
704
|
+
Raises:
|
|
705
|
+
RedisConnectionError: If Redis connection fails
|
|
706
|
+
"""
|
|
707
|
+
self._ensure_connection()
|
|
708
|
+
|
|
709
|
+
try:
|
|
710
|
+
self._redis.publish(RedisKeys.BROADCAST, json.dumps(message))
|
|
711
|
+
logger.debug("message_broadcast")
|
|
712
|
+
|
|
713
|
+
except RedisError as e:
|
|
714
|
+
logger.error("broadcast_failed", error=str(e))
|
|
715
|
+
raise RedisConnectionError(f"Failed to broadcast message: {e}")
|
|
716
|
+
|
|
717
|
+
def subscribe_to_broadcasts(self, callback: Callable[[Dict[str, Any]], None]) -> None:
|
|
718
|
+
"""
|
|
719
|
+
Subscribe to broadcast messages.
|
|
720
|
+
|
|
721
|
+
Args:
|
|
722
|
+
callback: Function to call when broadcasts are received
|
|
723
|
+
"""
|
|
724
|
+
# Create separate pubsub connection for broadcasts using dedicated pool
|
|
725
|
+
pubsub_redis = Redis(connection_pool=self._pubsub_pool)
|
|
726
|
+
pubsub = pubsub_redis.pubsub()
|
|
727
|
+
pubsub.subscribe(RedisKeys.BROADCAST)
|
|
728
|
+
|
|
729
|
+
def broadcast_listener():
|
|
730
|
+
try:
|
|
731
|
+
for message in pubsub.listen():
|
|
732
|
+
if message['type'] == 'message':
|
|
733
|
+
try:
|
|
734
|
+
data = json.loads(message['data'])
|
|
735
|
+
callback(data)
|
|
736
|
+
except json.JSONDecodeError as e:
|
|
737
|
+
logger.error("broadcast_decode_error", error=str(e))
|
|
738
|
+
except Exception as e:
|
|
739
|
+
logger.error("broadcast_listener_error", error=str(e))
|
|
740
|
+
finally:
|
|
741
|
+
pubsub.close()
|
|
742
|
+
|
|
743
|
+
thread = threading.Thread(target=broadcast_listener, daemon=True)
|
|
744
|
+
thread.start()
|
|
745
|
+
logger.info("broadcast_subscribed")
|
|
746
|
+
|
|
747
|
+
def set_module_state(self, module_name: str, state: Dict[str, Any]) -> None:
|
|
748
|
+
"""
|
|
749
|
+
Set state for a specific module.
|
|
750
|
+
|
|
751
|
+
Args:
|
|
752
|
+
module_name: Name of the module
|
|
753
|
+
state: Module state dictionary
|
|
754
|
+
|
|
755
|
+
Raises:
|
|
756
|
+
RedisConnectionError: If Redis connection fails
|
|
757
|
+
"""
|
|
758
|
+
self._ensure_connection()
|
|
759
|
+
|
|
760
|
+
try:
|
|
761
|
+
key = f"{RedisKeys.MODULE_STATE_PREFIX}{module_name}"
|
|
762
|
+
self._redis.set(key, json.dumps(state))
|
|
763
|
+
|
|
764
|
+
# Publish module state change event
|
|
765
|
+
self.publish_event(
|
|
766
|
+
EventType.MODULE_STATE_CHANGED.value,
|
|
767
|
+
{'module': module_name, 'state': state},
|
|
768
|
+
source=module_name
|
|
769
|
+
)
|
|
770
|
+
|
|
771
|
+
logger.debug("module_state_set", module=module_name)
|
|
772
|
+
|
|
773
|
+
except RedisError as e:
|
|
774
|
+
logger.error("module_state_set_failed", error=str(e))
|
|
775
|
+
raise RedisConnectionError(f"Failed to set module state: {e}")
|
|
776
|
+
|
|
777
|
+
def get_module_state(self, module_name: str) -> Dict[str, Any]:
|
|
778
|
+
"""
|
|
779
|
+
Get state for a specific module.
|
|
780
|
+
|
|
781
|
+
Args:
|
|
782
|
+
module_name: Name of the module
|
|
783
|
+
|
|
784
|
+
Returns:
|
|
785
|
+
Module state dictionary. Returns empty dict if module has no state.
|
|
786
|
+
|
|
787
|
+
Raises:
|
|
788
|
+
RedisConnectionError: If Redis connection fails
|
|
789
|
+
"""
|
|
790
|
+
self._ensure_connection()
|
|
791
|
+
|
|
792
|
+
try:
|
|
793
|
+
key = f"{RedisKeys.MODULE_STATE_PREFIX}{module_name}"
|
|
794
|
+
data = self._redis.get(key)
|
|
795
|
+
|
|
796
|
+
if data is None:
|
|
797
|
+
return {}
|
|
798
|
+
|
|
799
|
+
return json.loads(data)
|
|
800
|
+
|
|
801
|
+
except RedisError as e:
|
|
802
|
+
logger.error("module_state_get_failed", error=str(e))
|
|
803
|
+
raise RedisConnectionError(f"Failed to get module state: {e}")
|
|
804
|
+
except json.JSONDecodeError as e:
|
|
805
|
+
logger.error("module_state_decode_error", error=str(e))
|
|
806
|
+
return {}
|
|
807
|
+
|
|
808
|
+
def register_module(self, module_name: str, metadata: Dict[str, Any]) -> None:
|
|
809
|
+
"""
|
|
810
|
+
Register a module with the system.
|
|
811
|
+
|
|
812
|
+
Args:
|
|
813
|
+
module_name: Name of the module
|
|
814
|
+
metadata: Module metadata (version, language, pid, etc.)
|
|
815
|
+
|
|
816
|
+
Raises:
|
|
817
|
+
RedisConnectionError: If Redis connection fails
|
|
818
|
+
"""
|
|
819
|
+
self._ensure_connection()
|
|
820
|
+
|
|
821
|
+
try:
|
|
822
|
+
# Create module metadata
|
|
823
|
+
module_metadata = ModuleMetadata(
|
|
824
|
+
name=module_name,
|
|
825
|
+
version=metadata.get('version', '1.0.0'),
|
|
826
|
+
language=metadata.get('language', 'unknown'),
|
|
827
|
+
pid=metadata.get('pid'),
|
|
828
|
+
registered_at=time.time(),
|
|
829
|
+
metadata=metadata.get('metadata', {})
|
|
830
|
+
)
|
|
831
|
+
|
|
832
|
+
# Add to registry
|
|
833
|
+
self._redis.sadd(RedisKeys.MODULE_REGISTRY, module_name)
|
|
834
|
+
|
|
835
|
+
# Store metadata
|
|
836
|
+
metadata_key = f"{RedisKeys.MODULE_STATE_PREFIX}{module_name}:metadata"
|
|
837
|
+
self._redis.set(metadata_key, json.dumps(module_metadata.to_dict()))
|
|
838
|
+
|
|
839
|
+
# Publish registration event
|
|
840
|
+
self.publish_event(
|
|
841
|
+
EventType.MODULE_REGISTERED.value,
|
|
842
|
+
module_metadata.to_dict(),
|
|
843
|
+
source=module_name
|
|
844
|
+
)
|
|
845
|
+
|
|
846
|
+
logger.info("module_registered", module=module_name, metadata=metadata)
|
|
847
|
+
|
|
848
|
+
except RedisError as e:
|
|
849
|
+
logger.error("module_register_failed", error=str(e))
|
|
850
|
+
raise RedisConnectionError(f"Failed to register module: {e}")
|
|
851
|
+
|
|
852
|
+
def unregister_module(self, module_name: str) -> None:
|
|
853
|
+
"""
|
|
854
|
+
Unregister a module from the system.
|
|
855
|
+
|
|
856
|
+
Args:
|
|
857
|
+
module_name: Name of the module
|
|
858
|
+
|
|
859
|
+
Raises:
|
|
860
|
+
RedisConnectionError: If Redis connection fails
|
|
861
|
+
"""
|
|
862
|
+
self._ensure_connection()
|
|
863
|
+
|
|
864
|
+
try:
|
|
865
|
+
# Remove from registry
|
|
866
|
+
self._redis.srem(RedisKeys.MODULE_REGISTRY, module_name)
|
|
867
|
+
|
|
868
|
+
# Remove metadata
|
|
869
|
+
metadata_key = f"{RedisKeys.MODULE_STATE_PREFIX}{module_name}:metadata"
|
|
870
|
+
self._redis.delete(metadata_key)
|
|
871
|
+
|
|
872
|
+
# Remove state
|
|
873
|
+
state_key = f"{RedisKeys.MODULE_STATE_PREFIX}{module_name}"
|
|
874
|
+
self._redis.delete(state_key)
|
|
875
|
+
|
|
876
|
+
# Publish unregistration event
|
|
877
|
+
self.publish_event(
|
|
878
|
+
EventType.MODULE_UNREGISTERED.value,
|
|
879
|
+
{'module': module_name},
|
|
880
|
+
source=module_name
|
|
881
|
+
)
|
|
882
|
+
|
|
883
|
+
logger.info("module_unregistered", module=module_name)
|
|
884
|
+
|
|
885
|
+
except RedisError as e:
|
|
886
|
+
logger.error("module_unregister_failed", error=str(e))
|
|
887
|
+
raise RedisConnectionError(f"Failed to unregister module: {e}")
|
|
888
|
+
|
|
889
|
+
def get_registered_modules(self) -> List[str]:
|
|
890
|
+
"""
|
|
891
|
+
Get list of registered modules.
|
|
892
|
+
|
|
893
|
+
Returns:
|
|
894
|
+
List of module names
|
|
895
|
+
|
|
896
|
+
Raises:
|
|
897
|
+
RedisConnectionError: If Redis connection fails
|
|
898
|
+
"""
|
|
899
|
+
self._ensure_connection()
|
|
900
|
+
|
|
901
|
+
try:
|
|
902
|
+
modules = self._redis.smembers(RedisKeys.MODULE_REGISTRY)
|
|
903
|
+
return sorted(list(modules)) if modules else []
|
|
904
|
+
|
|
905
|
+
except RedisError as e:
|
|
906
|
+
logger.error("get_modules_failed", error=str(e))
|
|
907
|
+
raise RedisConnectionError(f"Failed to get registered modules: {e}")
|
|
908
|
+
|
|
909
|
+
def get_module_metadata(self, module_name: str) -> Optional[Dict[str, Any]]:
|
|
910
|
+
"""
|
|
911
|
+
Get metadata for a specific module.
|
|
912
|
+
|
|
913
|
+
Args:
|
|
914
|
+
module_name: Name of the module
|
|
915
|
+
|
|
916
|
+
Returns:
|
|
917
|
+
Module metadata dictionary or None if not found
|
|
918
|
+
"""
|
|
919
|
+
self._ensure_connection()
|
|
920
|
+
|
|
921
|
+
try:
|
|
922
|
+
metadata_key = f"{RedisKeys.MODULE_STATE_PREFIX}{module_name}:metadata"
|
|
923
|
+
data = self._redis.get(metadata_key)
|
|
924
|
+
|
|
925
|
+
if data is None:
|
|
926
|
+
return None
|
|
927
|
+
|
|
928
|
+
return json.loads(data)
|
|
929
|
+
|
|
930
|
+
except RedisError as e:
|
|
931
|
+
logger.error("get_module_metadata_failed", error=str(e))
|
|
932
|
+
return None
|
|
933
|
+
except json.JSONDecodeError:
|
|
934
|
+
return None
|
|
935
|
+
|
|
936
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
937
|
+
"""
|
|
938
|
+
Get memory and Redis statistics.
|
|
939
|
+
|
|
940
|
+
Returns:
|
|
941
|
+
Dictionary containing statistics about the Redis memory system
|
|
942
|
+
"""
|
|
943
|
+
self._ensure_connection()
|
|
944
|
+
|
|
945
|
+
try:
|
|
946
|
+
# Get Redis info
|
|
947
|
+
info = self._redis.info()
|
|
948
|
+
|
|
949
|
+
# Get state size
|
|
950
|
+
state_size = self._redis.hlen(RedisKeys.STATE)
|
|
951
|
+
|
|
952
|
+
# Get module count
|
|
953
|
+
module_count = self._redis.scard(RedisKeys.MODULE_REGISTRY)
|
|
954
|
+
|
|
955
|
+
# Get connection pool stats
|
|
956
|
+
pool_stats = {
|
|
957
|
+
'max_connections': self._pool.max_connections,
|
|
958
|
+
'created_connections': self._pool.created_connections,
|
|
959
|
+
'available_connections': self._pool._available_connections if hasattr(self._pool, '_available_connections') else 0
|
|
960
|
+
}
|
|
961
|
+
|
|
962
|
+
return {
|
|
963
|
+
'redis': {
|
|
964
|
+
'host': self.host,
|
|
965
|
+
'port': self.port,
|
|
966
|
+
'db': self.db,
|
|
967
|
+
'connected': self._connected,
|
|
968
|
+
'memory_used': info.get('used_memory_human', 'N/A'),
|
|
969
|
+
'total_keys': info.get('db0', {}).get('keys', 0) if 'db0' in info else 0
|
|
970
|
+
},
|
|
971
|
+
'state': {
|
|
972
|
+
'keys': state_size,
|
|
973
|
+
'modules': module_count
|
|
974
|
+
},
|
|
975
|
+
'connection_pool': pool_stats,
|
|
976
|
+
'subscriptions': {
|
|
977
|
+
'state_callbacks': len(self._state_callbacks),
|
|
978
|
+
'event_callbacks': len(self._event_callbacks),
|
|
979
|
+
'pubsub_running': self._pubsub_running.is_set()
|
|
980
|
+
}
|
|
981
|
+
}
|
|
982
|
+
|
|
983
|
+
except RedisError as e:
|
|
984
|
+
logger.error("get_stats_failed", error=str(e))
|
|
985
|
+
return {
|
|
986
|
+
'error': str(e),
|
|
987
|
+
'connected': False
|
|
988
|
+
}
|
|
989
|
+
|
|
990
|
+
def clear_state(self) -> None:
|
|
991
|
+
"""
|
|
992
|
+
Clear all state from Redis.
|
|
993
|
+
|
|
994
|
+
This removes the main state but preserves module registry and
|
|
995
|
+
module states.
|
|
996
|
+
|
|
997
|
+
Raises:
|
|
998
|
+
RedisConnectionError: If Redis connection fails
|
|
999
|
+
"""
|
|
1000
|
+
self._ensure_connection()
|
|
1001
|
+
|
|
1002
|
+
try:
|
|
1003
|
+
self._redis.delete(RedisKeys.STATE)
|
|
1004
|
+
logger.info("state_cleared")
|
|
1005
|
+
|
|
1006
|
+
except RedisError as e:
|
|
1007
|
+
logger.error("clear_state_failed", error=str(e))
|
|
1008
|
+
raise RedisConnectionError(f"Failed to clear state: {e}")
|
|
1009
|
+
|
|
1010
|
+
def clear_all(self) -> None:
|
|
1011
|
+
"""
|
|
1012
|
+
Clear all Qyro data from Redis.
|
|
1013
|
+
|
|
1014
|
+
This removes all state, module registry, and module states.
|
|
1015
|
+
Use with caution!
|
|
1016
|
+
|
|
1017
|
+
Raises:
|
|
1018
|
+
RedisConnectionError: If Redis connection fails
|
|
1019
|
+
"""
|
|
1020
|
+
self._ensure_connection()
|
|
1021
|
+
|
|
1022
|
+
try:
|
|
1023
|
+
# Get all Qyro keys
|
|
1024
|
+
keys = self._redis.keys('qyro:*')
|
|
1025
|
+
|
|
1026
|
+
if keys:
|
|
1027
|
+
self._redis.delete(*keys)
|
|
1028
|
+
|
|
1029
|
+
logger.info("all_data_cleared", keys_deleted=len(keys) if keys else 0)
|
|
1030
|
+
|
|
1031
|
+
except RedisError as e:
|
|
1032
|
+
logger.error("clear_all_failed", error=str(e))
|
|
1033
|
+
raise RedisConnectionError(f"Failed to clear all data: {e}")
|
|
1034
|
+
|
|
1035
|
+
def close(self) -> None:
|
|
1036
|
+
"""
|
|
1037
|
+
Close Redis connections and cleanup resources.
|
|
1038
|
+
|
|
1039
|
+
This method stops the pub/sub listener thread and closes
|
|
1040
|
+
all Redis connections.
|
|
1041
|
+
"""
|
|
1042
|
+
with self._lock:
|
|
1043
|
+
# Stop pub/sub thread
|
|
1044
|
+
if self._pubsub_running.is_set():
|
|
1045
|
+
self._pubsub_running.clear()
|
|
1046
|
+
|
|
1047
|
+
# Wait for thread to finish (with timeout)
|
|
1048
|
+
if self._pubsub_thread and self._pubsub_thread.is_alive():
|
|
1049
|
+
self._pubsub_thread.join(timeout=2.0)
|
|
1050
|
+
|
|
1051
|
+
# Clear callbacks
|
|
1052
|
+
self._state_callbacks.clear()
|
|
1053
|
+
self._event_callbacks.clear()
|
|
1054
|
+
|
|
1055
|
+
# Close connection pools
|
|
1056
|
+
try:
|
|
1057
|
+
if hasattr(self._command_pool, 'disconnect'):
|
|
1058
|
+
self._command_pool.disconnect()
|
|
1059
|
+
if hasattr(self._pubsub_pool, 'disconnect'):
|
|
1060
|
+
self._pubsub_pool.disconnect()
|
|
1061
|
+
logger.info("redis_connections_closed")
|
|
1062
|
+
except Exception as e:
|
|
1063
|
+
logger.error("close_connections_failed", error=str(e))
|
|
1064
|
+
|
|
1065
|
+
# Explicitly clean up Redis client references
|
|
1066
|
+
self._redis = None
|
|
1067
|
+
self._pubsub_thread = None
|
|
1068
|
+
|
|
1069
|
+
self._connected = False
|
|
1070
|
+
logger.info("redis_memory_closed")
|
|
1071
|
+
|
|
1072
|
+
def __enter__(self):
|
|
1073
|
+
"""Context manager entry."""
|
|
1074
|
+
return self
|
|
1075
|
+
|
|
1076
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
1077
|
+
"""Context manager exit."""
|
|
1078
|
+
self.close()
|
|
1079
|
+
return False
|
|
1080
|
+
|
|
1081
|
+
def __del__(self):
|
|
1082
|
+
"""Destructor to ensure connections are closed."""
|
|
1083
|
+
try:
|
|
1084
|
+
self.close()
|
|
1085
|
+
except Exception:
|
|
1086
|
+
pass
|
|
1087
|
+
|
|
1088
|
+
|
|
1089
|
+
# Convenience function for creating a memory instance
|
|
1090
|
+
def create_redis_memory(
|
|
1091
|
+
host: str = 'localhost',
|
|
1092
|
+
port: int = 6379,
|
|
1093
|
+
db: int = 0,
|
|
1094
|
+
password: Optional[str] = None
|
|
1095
|
+
) -> RedisQyroMemory:
|
|
1096
|
+
"""
|
|
1097
|
+
Create a RedisQyroMemory instance with default settings.
|
|
1098
|
+
|
|
1099
|
+
Args:
|
|
1100
|
+
host: Redis server hostname
|
|
1101
|
+
port: Redis server port
|
|
1102
|
+
db: Redis database number
|
|
1103
|
+
password: Redis password (optional)
|
|
1104
|
+
|
|
1105
|
+
Returns:
|
|
1106
|
+
Configured RedisQyroMemory instance
|
|
1107
|
+
"""
|
|
1108
|
+
return RedisQyroMemory(host=host, port=port, db=db, password=password)
|