osism 0.20250804.0__py3-none-any.whl → 0.20250824.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,304 @@
1
+ # SPDX-License-Identifier: Apache-2.0
2
+
3
+ """
4
+ Event bridge for sharing events between RabbitMQ listener and WebSocket manager.
5
+ This module provides a Redis-based way to forward events from the listener service
6
+ to the WebSocket manager across different containers.
7
+ """
8
+
9
+ import threading
10
+ import queue
11
+ import logging
12
+ import json
13
+ import os
14
+ from typing import Dict, Any
15
+
16
+ try:
17
+ import redis
18
+
19
+ REDIS_AVAILABLE = True
20
+ except ImportError:
21
+ REDIS_AVAILABLE = False
22
+
23
+ logger = logging.getLogger("osism.event_bridge")
24
+
25
+
26
+ class EventBridge:
27
+ """Redis-based bridge for forwarding events between RabbitMQ listener and WebSocket manager across containers."""
28
+
29
+ def __init__(self):
30
+ self._event_queue = queue.Queue()
31
+ self._websocket_manager = None
32
+ self._processor_thread = None
33
+ self._subscriber_thread = None
34
+ self._shutdown_event = threading.Event()
35
+ self._redis_client = None
36
+ self._redis_subscriber = None
37
+
38
+ # Initialize Redis connection
39
+ self._init_redis()
40
+
41
+ def _init_redis(self):
42
+ """Initialize Redis connection."""
43
+ if not REDIS_AVAILABLE:
44
+ logger.warning(
45
+ "Redis not available - event bridge will use local queue only"
46
+ )
47
+ return
48
+
49
+ try:
50
+ redis_host = os.getenv("REDIS_HOST", "redis")
51
+ redis_port = int(os.getenv("REDIS_PORT", "6379"))
52
+ redis_db = int(os.getenv("REDIS_DB", "0"))
53
+
54
+ self._redis_client = redis.Redis(
55
+ host=redis_host,
56
+ port=redis_port,
57
+ db=redis_db,
58
+ decode_responses=True,
59
+ socket_connect_timeout=10,
60
+ socket_timeout=None, # No timeout for blocking operations
61
+ health_check_interval=30,
62
+ )
63
+
64
+ # Test connection
65
+ self._redis_client.ping()
66
+ logger.info(f"Connected to Redis at {redis_host}:{redis_port}")
67
+
68
+ # Create subscriber for WebSocket manager (API container)
69
+ self._redis_subscriber = self._redis_client.pubsub()
70
+
71
+ except Exception as e:
72
+ logger.error(f"Failed to connect to Redis: {e}")
73
+ self._redis_client = None
74
+ self._redis_subscriber = None
75
+
76
+ def set_websocket_manager(self, websocket_manager):
77
+ """Set the WebSocket manager instance and start Redis subscriber."""
78
+ self._websocket_manager = websocket_manager
79
+ logger.info("WebSocket manager connected to event bridge")
80
+
81
+ # Start Redis subscriber thread if Redis is available
82
+ if self._redis_client and not self._subscriber_thread:
83
+ self._start_redis_subscriber()
84
+
85
+ # Start local processor thread if not already running
86
+ if not self._processor_thread or not self._processor_thread.is_alive():
87
+ self._start_processor_thread()
88
+
89
+ def add_event(self, event_type: str, payload: Dict[str, Any]) -> None:
90
+ """Add an event to be forwarded to WebSocket clients via Redis."""
91
+ try:
92
+ event_data = {"event_type": event_type, "payload": payload}
93
+
94
+ if self._redis_client:
95
+ # Publish to Redis for cross-container communication
96
+ try:
97
+ message = json.dumps(event_data)
98
+ subscribers = self._redis_client.publish("osism:events", message)
99
+ logger.info(
100
+ f"Published event to Redis: {event_type} (subscribers: {subscribers})"
101
+ )
102
+
103
+ if subscribers == 0:
104
+ logger.warning(f"No Redis subscribers for event: {event_type}")
105
+
106
+ except Exception as redis_error:
107
+ logger.error(f"Failed to publish event to Redis: {redis_error}")
108
+ # Try to reconnect Redis
109
+ try:
110
+ self._init_redis()
111
+ if self._redis_client:
112
+ message = json.dumps(event_data)
113
+ subscribers = self._redis_client.publish(
114
+ "osism:events", message
115
+ )
116
+ logger.info(
117
+ f"Published event to Redis after reconnect: {event_type} (subscribers: {subscribers})"
118
+ )
119
+ else:
120
+ raise Exception("Redis reconnection failed")
121
+ except Exception as reconnect_error:
122
+ logger.error(f"Redis reconnection failed: {reconnect_error}")
123
+ # Fallback to local queue
124
+ self._event_queue.put_nowait(event_data)
125
+ logger.debug(
126
+ f"Added event to local fallback queue: {event_type}"
127
+ )
128
+ else:
129
+ # Local queue fallback
130
+ self._event_queue.put_nowait(event_data)
131
+ logger.debug(f"Added event to local queue: {event_type}")
132
+
133
+ except queue.Full:
134
+ logger.warning("Event bridge queue is full, dropping event")
135
+ except Exception as e:
136
+ logger.error(f"Error adding event to bridge: {e}")
137
+
138
+ def _start_redis_subscriber(self):
139
+ """Start Redis subscriber thread for receiving events from other containers."""
140
+ self._subscriber_thread = threading.Thread(
141
+ target=self._redis_subscriber_loop, name="RedisEventSubscriber", daemon=True
142
+ )
143
+ self._subscriber_thread.start()
144
+ logger.info("Started Redis event subscriber thread")
145
+
146
+ def _start_processor_thread(self):
147
+ """Start the background thread that processes local events."""
148
+ self._processor_thread = threading.Thread(
149
+ target=self._process_events, name="EventBridgeProcessor", daemon=True
150
+ )
151
+ self._processor_thread.start()
152
+ logger.info("Started event bridge processor thread")
153
+
154
+ def _redis_subscriber_loop(self):
155
+ """Redis subscriber loop for receiving events from other containers with auto-reconnect."""
156
+ retry_count = 0
157
+ max_retries = 5
158
+ retry_delay = 5 # seconds
159
+
160
+ while not self._shutdown_event.is_set() and retry_count < max_retries:
161
+ try:
162
+ if not self._redis_subscriber:
163
+ logger.error("Redis subscriber not available")
164
+ return
165
+
166
+ logger.info(
167
+ f"Starting Redis subscriber (attempt {retry_count + 1}/{max_retries})"
168
+ )
169
+ self._redis_subscriber.subscribe("osism:events")
170
+ logger.info("Subscribed to Redis events channel")
171
+ retry_count = 0 # Reset retry count on successful connection
172
+
173
+ # Use get_message with timeout instead of listen() to avoid hanging
174
+ while not self._shutdown_event.is_set():
175
+ try:
176
+ # Check for messages with timeout
177
+ message = self._redis_subscriber.get_message(timeout=10.0)
178
+
179
+ if message is None:
180
+ continue # Timeout, check shutdown and continue
181
+
182
+ if message["type"] == "message":
183
+ try:
184
+ event_data = json.loads(message["data"])
185
+ logger.info(
186
+ f"Received event from Redis: {event_data.get('event_type')}"
187
+ )
188
+
189
+ if self._websocket_manager:
190
+ # Process event directly
191
+ self._process_single_event(event_data)
192
+ else:
193
+ # Add to local queue for later processing
194
+ self._event_queue.put_nowait(event_data)
195
+
196
+ except json.JSONDecodeError as e:
197
+ logger.error(
198
+ f"Failed to decode Redis event message: {e}"
199
+ )
200
+ except Exception as e:
201
+ logger.error(f"Error processing Redis event: {e}")
202
+
203
+ except Exception as get_msg_error:
204
+ logger.error(f"Error getting Redis message: {get_msg_error}")
205
+ break # Break inner loop to trigger reconnect
206
+
207
+ except Exception as e:
208
+ retry_count += 1
209
+ logger.error(
210
+ f"Redis subscriber error (attempt {retry_count}/{max_retries}): {e}"
211
+ )
212
+
213
+ if retry_count < max_retries:
214
+ logger.info(
215
+ f"Retrying Redis subscription in {retry_delay} seconds..."
216
+ )
217
+ self._shutdown_event.wait(retry_delay)
218
+
219
+ # Recreate Redis connection
220
+ try:
221
+ self._init_redis()
222
+ except Exception as init_error:
223
+ logger.error(f"Failed to reinitialize Redis: {init_error}")
224
+
225
+ finally:
226
+ if self._redis_subscriber:
227
+ try:
228
+ self._redis_subscriber.close()
229
+ except Exception:
230
+ pass # Ignore errors during cleanup
231
+
232
+ if retry_count >= max_retries:
233
+ logger.error("Max Redis reconnection attempts reached, giving up")
234
+ else:
235
+ logger.info("Redis subscriber stopped")
236
+
237
+ def _process_single_event(self, event_data: Dict[str, Any]):
238
+ """Process a single event with WebSocket manager."""
239
+ if not self._websocket_manager:
240
+ logger.warning("No WebSocket manager available, dropping event")
241
+ return
242
+
243
+ try:
244
+ import asyncio
245
+
246
+ # Create new event loop for this thread
247
+ loop = asyncio.new_event_loop()
248
+ asyncio.set_event_loop(loop)
249
+
250
+ # Process the event
251
+ loop.run_until_complete(
252
+ self._websocket_manager.broadcast_event_from_notification(
253
+ event_data["event_type"], event_data["payload"]
254
+ )
255
+ )
256
+
257
+ loop.close()
258
+ logger.debug(f"Processed event via bridge: {event_data['event_type']}")
259
+
260
+ except Exception as e:
261
+ logger.error(f"Error processing event via bridge: {e}")
262
+
263
+ def _process_events(self):
264
+ """Background thread that processes events from the local queue."""
265
+ logger.info("Event bridge processor started")
266
+
267
+ while not self._shutdown_event.is_set():
268
+ try:
269
+ # Get event with timeout to check shutdown periodically
270
+ try:
271
+ event_data = self._event_queue.get(timeout=1.0)
272
+ except queue.Empty:
273
+ continue
274
+
275
+ self._process_single_event(event_data)
276
+ self._event_queue.task_done()
277
+
278
+ except Exception as e:
279
+ logger.error(f"Unexpected error in event bridge processor: {e}")
280
+
281
+ logger.info("Event bridge processor stopped")
282
+
283
+ def shutdown(self):
284
+ """Shutdown the event bridge."""
285
+ logger.info("Shutting down event bridge")
286
+ self._shutdown_event.set()
287
+
288
+ # Close Redis subscriber
289
+ if self._redis_subscriber:
290
+ try:
291
+ self._redis_subscriber.close()
292
+ except Exception as e:
293
+ logger.error(f"Error closing Redis subscriber: {e}")
294
+
295
+ # Wait for threads to finish
296
+ if self._processor_thread and self._processor_thread.is_alive():
297
+ self._processor_thread.join(timeout=5.0)
298
+
299
+ if self._subscriber_thread and self._subscriber_thread.is_alive():
300
+ self._subscriber_thread.join(timeout=5.0)
301
+
302
+
303
+ # Global event bridge instance
304
+ event_bridge = EventBridge()
@@ -14,6 +14,41 @@ import requests
14
14
  from osism.tasks import netbox
15
15
  from osism import settings
16
16
 
17
+ # Multiple exchanges for different OpenStack services
18
+ EXCHANGES_CONFIG = {
19
+ "ironic": {
20
+ "exchange": "ironic",
21
+ "routing_key": "ironic_versioned_notifications.info",
22
+ "queue": "osism-listener-ironic",
23
+ },
24
+ "nova": {
25
+ "exchange": "nova",
26
+ "routing_key": "nova_versioned_notifications.info",
27
+ "queue": "osism-listener-nova",
28
+ },
29
+ "neutron": {
30
+ "exchange": "neutron",
31
+ "routing_key": "neutron_versioned_notifications.info",
32
+ "queue": "osism-listener-neutron",
33
+ },
34
+ "cinder": {
35
+ "exchange": "cinder",
36
+ "routing_key": "cinder_versioned_notifications.info",
37
+ "queue": "osism-listener-cinder",
38
+ },
39
+ "keystone": {
40
+ "exchange": "keystone",
41
+ "routing_key": "keystone_versioned_notifications.info",
42
+ "queue": "osism-listener-keystone",
43
+ },
44
+ "glance": {
45
+ "exchange": "glance",
46
+ "routing_key": "glance_versioned_notifications.info",
47
+ "queue": "osism-listener-glance",
48
+ },
49
+ }
50
+
51
+ # Legacy constants for backward compatibility
17
52
  EXCHANGE_NAME = "ironic"
18
53
  ROUTING_KEY = "ironic_versioned_notifications.info"
19
54
  QUEUE_NAME = "osism-listener-ironic"
@@ -138,39 +173,115 @@ class NotificationsDump(ConsumerMixin):
138
173
  self.baremetal_events = BaremetalEvents()
139
174
  self.osism_api_session: None | requests.Session = None
140
175
  self.osism_baremetal_api_url: None | str = None
176
+ self.websocket_manager = None
177
+
141
178
  if settings.OSISM_API_URL:
142
179
  logger.info("Setting up OSISM API")
143
180
  self.osism_api_session = requests.Session()
144
181
  self.osism_baremetal_api_url = (
145
182
  settings.OSISM_API_URL.rstrip("/") + "/notifications/baremetal"
146
183
  )
184
+
185
+ # Import event_bridge for WebSocket forwarding
186
+ try:
187
+ from osism.services.event_bridge import event_bridge
188
+
189
+ self.event_bridge = event_bridge
190
+ logger.info("Event bridge connected to RabbitMQ listener")
191
+ except ImportError:
192
+ logger.warning("Event bridge not available")
193
+ self.event_bridge = None
194
+
147
195
  return
148
196
 
149
197
  def get_consumers(self, consumer, channel):
150
- exchange = Exchange(EXCHANGE_NAME, type="topic", durable=False)
151
- queue = Queue(
152
- QUEUE_NAME,
153
- exchange,
154
- routing_key=ROUTING_KEY,
155
- durable=False,
156
- auto_delete=True,
157
- no_ack=True,
158
- )
159
- return [consumer(queue, callbacks=[self.on_message])]
198
+ consumers = []
199
+
200
+ # Create consumers for all configured exchanges
201
+ for service_name, config in EXCHANGES_CONFIG.items():
202
+ try:
203
+ exchange = Exchange(config["exchange"], type="topic", durable=False)
204
+ queue = Queue(
205
+ config["queue"],
206
+ exchange,
207
+ routing_key=config["routing_key"],
208
+ durable=False,
209
+ auto_delete=True,
210
+ no_ack=True,
211
+ )
212
+ consumers.append(consumer(queue, callbacks=[self.on_message]))
213
+ logger.info(
214
+ f"Configured consumer for {service_name} exchange: {config['exchange']}"
215
+ )
216
+ except Exception as e:
217
+ logger.error(f"Failed to configure consumer for {service_name}: {e}")
218
+
219
+ if not consumers:
220
+ logger.warning(
221
+ "No consumers configured, falling back to legacy ironic consumer"
222
+ )
223
+ # Fallback to legacy configuration
224
+ exchange = Exchange(EXCHANGE_NAME, type="topic", durable=False)
225
+ queue = Queue(
226
+ QUEUE_NAME,
227
+ exchange,
228
+ routing_key=ROUTING_KEY,
229
+ durable=False,
230
+ auto_delete=True,
231
+ no_ack=True,
232
+ )
233
+ consumers.append(consumer(queue, callbacks=[self.on_message]))
234
+
235
+ return consumers
160
236
 
161
237
  def on_message(self, body, message):
162
238
  data = json.loads(body["oslo.message"])
163
- logger.debug(
164
- data["event_type"]
165
- + ": "
166
- + str(
167
- {
239
+
240
+ # Log event with service type detection
241
+ event_type = data.get("event_type", "")
242
+ service_type = event_type.split(".")[0] if event_type else "unknown"
243
+
244
+ # Enhanced logging for different event types
245
+ payload_info = {}
246
+ if "payload" in data:
247
+ payload = data["payload"]
248
+
249
+ # Extract relevant info based on service type
250
+ if service_type == "baremetal" and "ironic_object.data" in payload:
251
+ ironic_data = payload["ironic_object.data"]
252
+ payload_info = {
168
253
  k: v
169
- for k, v in data["payload"]["ironic_object.data"].items()
170
- if k in ["provision_state", "power_state"]
254
+ for k, v in ironic_data.items()
255
+ if k in ["name", "provision_state", "power_state"]
171
256
  }
172
- )
173
- )
257
+ elif service_type in ["compute", "nova"] and "nova_object.data" in payload:
258
+ nova_data = payload["nova_object.data"]
259
+ payload_info = {
260
+ k: v
261
+ for k, v in nova_data.items()
262
+ if k in ["uuid", "host", "state", "task_state"]
263
+ }
264
+ elif service_type in ["network", "neutron"]:
265
+ # Neutron events might have different structures
266
+ payload_info = {"service": "neutron"}
267
+ else:
268
+ # Generic payload info
269
+ payload_info = {"service": service_type}
270
+
271
+ logger.debug(f"{event_type}: {payload_info}")
272
+ logger.info(f"Received {service_type} event: {event_type}")
273
+
274
+ # Send event to WebSocket clients via event bridge
275
+ if self.event_bridge:
276
+ try:
277
+ logger.debug(f"Forwarding event to WebSocket via bridge: {event_type}")
278
+ self.event_bridge.add_event(data["event_type"], data["payload"])
279
+ logger.debug(f"Successfully forwarded event to bridge: {event_type}")
280
+ except Exception as e:
281
+ logger.error(f"Error forwarding event to bridge: {e}")
282
+ logger.error(
283
+ f"Event data was: {data['event_type']} - {data.get('payload', {}).get('ironic_object.data', {}).get('name', 'unknown')}"
284
+ )
174
285
 
175
286
  if self.osism_api_session:
176
287
  tries = 1