osism 0.20250823.0__py3-none-any.whl → 0.20250824.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- osism/api.py +101 -3
- osism/services/event_bridge.py +304 -0
- osism/services/listener.py +130 -19
- osism/services/websocket_manager.py +271 -0
- osism/tasks/openstack.py +14 -13
- {osism-0.20250823.0.dist-info → osism-0.20250824.1.dist-info}/METADATA +2 -1
- {osism-0.20250823.0.dist-info → osism-0.20250824.1.dist-info}/RECORD +13 -11
- osism-0.20250824.1.dist-info/licenses/AUTHORS +1 -0
- osism-0.20250824.1.dist-info/pbr.json +1 -0
- osism-0.20250823.0.dist-info/licenses/AUTHORS +0 -1
- osism-0.20250823.0.dist-info/pbr.json +0 -1
- {osism-0.20250823.0.dist-info → osism-0.20250824.1.dist-info}/WHEEL +0 -0
- {osism-0.20250823.0.dist-info → osism-0.20250824.1.dist-info}/entry_points.txt +0 -0
- {osism-0.20250823.0.dist-info → osism-0.20250824.1.dist-info}/licenses/LICENSE +0 -0
- {osism-0.20250823.0.dist-info → osism-0.20250824.1.dist-info}/top_level.txt +0 -0
osism/api.py
CHANGED
@@ -3,16 +3,28 @@
|
|
3
3
|
import datetime
|
4
4
|
from logging.config import dictConfig
|
5
5
|
import logging
|
6
|
+
import json
|
6
7
|
from typing import Optional, Dict, Any
|
7
8
|
from uuid import UUID
|
8
9
|
|
9
|
-
from fastapi import
|
10
|
+
from fastapi import (
|
11
|
+
FastAPI,
|
12
|
+
Header,
|
13
|
+
Request,
|
14
|
+
Response,
|
15
|
+
HTTPException,
|
16
|
+
status,
|
17
|
+
WebSocket,
|
18
|
+
WebSocketDisconnect,
|
19
|
+
)
|
10
20
|
from pydantic import BaseModel, Field
|
11
21
|
from starlette.middleware.cors import CORSMiddleware
|
12
22
|
|
13
23
|
from osism.tasks import reconciler, openstack
|
14
24
|
from osism import utils
|
15
25
|
from osism.services.listener import BaremetalEvents
|
26
|
+
from osism.services.websocket_manager import websocket_manager
|
27
|
+
from osism.services.event_bridge import event_bridge
|
16
28
|
|
17
29
|
|
18
30
|
class NotificationBaremetal(BaseModel):
|
@@ -95,6 +107,10 @@ app.add_middleware(
|
|
95
107
|
"Content-Type",
|
96
108
|
"Authorization",
|
97
109
|
"X-Hook-Signature",
|
110
|
+
"Sec-WebSocket-Protocol",
|
111
|
+
"Sec-WebSocket-Key",
|
112
|
+
"Sec-WebSocket-Version",
|
113
|
+
"Sec-WebSocket-Extensions",
|
98
114
|
],
|
99
115
|
)
|
100
116
|
|
@@ -103,6 +119,9 @@ logger = logging.getLogger("osism.api")
|
|
103
119
|
|
104
120
|
baremetal_events = BaremetalEvents()
|
105
121
|
|
122
|
+
# Connect event bridge to WebSocket manager
|
123
|
+
event_bridge.set_websocket_manager(websocket_manager)
|
124
|
+
|
106
125
|
|
107
126
|
class DeviceSearchResult(BaseModel):
|
108
127
|
result: str = Field(..., description="Operation result status")
|
@@ -110,11 +129,13 @@ class DeviceSearchResult(BaseModel):
|
|
110
129
|
|
111
130
|
|
112
131
|
class BaremetalNode(BaseModel):
|
113
|
-
uuid: str = Field(
|
132
|
+
uuid: Optional[str] = Field(None, description="Unique identifier of the node")
|
114
133
|
name: Optional[str] = Field(None, description="Name of the node")
|
115
134
|
power_state: Optional[str] = Field(None, description="Current power state")
|
116
135
|
provision_state: Optional[str] = Field(None, description="Current provision state")
|
117
|
-
maintenance: bool = Field(
|
136
|
+
maintenance: Optional[bool] = Field(
|
137
|
+
None, description="Whether node is in maintenance mode"
|
138
|
+
)
|
118
139
|
instance_uuid: Optional[str] = Field(
|
119
140
|
None, description="UUID of associated instance"
|
120
141
|
)
|
@@ -177,6 +198,16 @@ async def v1() -> Dict[str, str]:
|
|
177
198
|
return {"result": "ok"}
|
178
199
|
|
179
200
|
|
201
|
+
@app.get("/v1/events", tags=["events"])
|
202
|
+
async def events_info() -> Dict[str, str]:
|
203
|
+
"""Events endpoint info - WebSocket available at /v1/events/openstack."""
|
204
|
+
return {
|
205
|
+
"result": "ok",
|
206
|
+
"websocket_endpoint": "/v1/events/openstack",
|
207
|
+
"description": "Real-time OpenStack events via WebSocket",
|
208
|
+
}
|
209
|
+
|
210
|
+
|
180
211
|
class SinkResponse(BaseModel):
|
181
212
|
result: str = Field(..., description="Operation result status")
|
182
213
|
|
@@ -348,6 +379,73 @@ def process_netbox_webhook(webhook_input: WebhookNetboxData) -> None:
|
|
348
379
|
logger.info(f"Ignoring change for unmanaged device {name}")
|
349
380
|
|
350
381
|
|
382
|
+
@app.websocket("/v1/events/openstack")
|
383
|
+
async def websocket_openstack_events(websocket: WebSocket):
|
384
|
+
"""WebSocket endpoint for streaming all OpenStack events in real-time.
|
385
|
+
|
386
|
+
Supports events from all OpenStack services: Ironic, Nova, Neutron, Cinder, Glance, Keystone
|
387
|
+
|
388
|
+
Clients can send filter messages in JSON format:
|
389
|
+
{
|
390
|
+
"action": "set_filters",
|
391
|
+
"event_filters": ["baremetal.node.power_set.end", "compute.instance.create.end", "network.port.create.end"],
|
392
|
+
"node_filters": ["server-01", "server-02"],
|
393
|
+
"service_filters": ["baremetal", "compute", "network"]
|
394
|
+
}
|
395
|
+
"""
|
396
|
+
await websocket_manager.connect(websocket)
|
397
|
+
try:
|
398
|
+
# Keep the connection alive and listen for client messages
|
399
|
+
while True:
|
400
|
+
try:
|
401
|
+
# Receive messages from client for filtering configuration
|
402
|
+
data = await websocket.receive_text()
|
403
|
+
logger.debug(f"Received WebSocket message: {data}")
|
404
|
+
|
405
|
+
try:
|
406
|
+
message = json.loads(data)
|
407
|
+
if message.get("action") == "set_filters":
|
408
|
+
event_filters = message.get("event_filters")
|
409
|
+
node_filters = message.get("node_filters")
|
410
|
+
service_filters = message.get("service_filters")
|
411
|
+
|
412
|
+
await websocket_manager.update_filters(
|
413
|
+
websocket,
|
414
|
+
event_filters=event_filters,
|
415
|
+
node_filters=node_filters,
|
416
|
+
service_filters=service_filters,
|
417
|
+
)
|
418
|
+
|
419
|
+
# Send acknowledgment
|
420
|
+
response = {
|
421
|
+
"type": "filter_update",
|
422
|
+
"status": "success",
|
423
|
+
"event_filters": event_filters,
|
424
|
+
"node_filters": node_filters,
|
425
|
+
"service_filters": service_filters,
|
426
|
+
}
|
427
|
+
await websocket.send_text(json.dumps(response))
|
428
|
+
|
429
|
+
except json.JSONDecodeError:
|
430
|
+
logger.warning(
|
431
|
+
f"Invalid JSON received from WebSocket client: {data}"
|
432
|
+
)
|
433
|
+
except Exception as e:
|
434
|
+
logger.error(f"Error processing WebSocket filter message: {e}")
|
435
|
+
|
436
|
+
except WebSocketDisconnect:
|
437
|
+
logger.info("WebSocket client disconnected")
|
438
|
+
break
|
439
|
+
except Exception as e:
|
440
|
+
logger.error(f"Error handling WebSocket message: {e}")
|
441
|
+
break
|
442
|
+
|
443
|
+
except WebSocketDisconnect:
|
444
|
+
logger.info("WebSocket client disconnected")
|
445
|
+
finally:
|
446
|
+
await websocket_manager.disconnect(websocket)
|
447
|
+
|
448
|
+
|
351
449
|
@app.post(
|
352
450
|
"/v1/webhook/netbox",
|
353
451
|
response_model=WebhookNetboxResponse,
|
@@ -0,0 +1,304 @@
|
|
1
|
+
# SPDX-License-Identifier: Apache-2.0
|
2
|
+
|
3
|
+
"""
|
4
|
+
Event bridge for sharing events between RabbitMQ listener and WebSocket manager.
|
5
|
+
This module provides a Redis-based way to forward events from the listener service
|
6
|
+
to the WebSocket manager across different containers.
|
7
|
+
"""
|
8
|
+
|
9
|
+
import threading
|
10
|
+
import queue
|
11
|
+
import logging
|
12
|
+
import json
|
13
|
+
import os
|
14
|
+
from typing import Dict, Any
|
15
|
+
|
16
|
+
try:
|
17
|
+
import redis
|
18
|
+
|
19
|
+
REDIS_AVAILABLE = True
|
20
|
+
except ImportError:
|
21
|
+
REDIS_AVAILABLE = False
|
22
|
+
|
23
|
+
logger = logging.getLogger("osism.event_bridge")
|
24
|
+
|
25
|
+
|
26
|
+
class EventBridge:
|
27
|
+
"""Redis-based bridge for forwarding events between RabbitMQ listener and WebSocket manager across containers."""
|
28
|
+
|
29
|
+
def __init__(self):
|
30
|
+
self._event_queue = queue.Queue()
|
31
|
+
self._websocket_manager = None
|
32
|
+
self._processor_thread = None
|
33
|
+
self._subscriber_thread = None
|
34
|
+
self._shutdown_event = threading.Event()
|
35
|
+
self._redis_client = None
|
36
|
+
self._redis_subscriber = None
|
37
|
+
|
38
|
+
# Initialize Redis connection
|
39
|
+
self._init_redis()
|
40
|
+
|
41
|
+
def _init_redis(self):
|
42
|
+
"""Initialize Redis connection."""
|
43
|
+
if not REDIS_AVAILABLE:
|
44
|
+
logger.warning(
|
45
|
+
"Redis not available - event bridge will use local queue only"
|
46
|
+
)
|
47
|
+
return
|
48
|
+
|
49
|
+
try:
|
50
|
+
redis_host = os.getenv("REDIS_HOST", "redis")
|
51
|
+
redis_port = int(os.getenv("REDIS_PORT", "6379"))
|
52
|
+
redis_db = int(os.getenv("REDIS_DB", "0"))
|
53
|
+
|
54
|
+
self._redis_client = redis.Redis(
|
55
|
+
host=redis_host,
|
56
|
+
port=redis_port,
|
57
|
+
db=redis_db,
|
58
|
+
decode_responses=True,
|
59
|
+
socket_connect_timeout=10,
|
60
|
+
socket_timeout=None, # No timeout for blocking operations
|
61
|
+
health_check_interval=30,
|
62
|
+
)
|
63
|
+
|
64
|
+
# Test connection
|
65
|
+
self._redis_client.ping()
|
66
|
+
logger.info(f"Connected to Redis at {redis_host}:{redis_port}")
|
67
|
+
|
68
|
+
# Create subscriber for WebSocket manager (API container)
|
69
|
+
self._redis_subscriber = self._redis_client.pubsub()
|
70
|
+
|
71
|
+
except Exception as e:
|
72
|
+
logger.error(f"Failed to connect to Redis: {e}")
|
73
|
+
self._redis_client = None
|
74
|
+
self._redis_subscriber = None
|
75
|
+
|
76
|
+
def set_websocket_manager(self, websocket_manager):
|
77
|
+
"""Set the WebSocket manager instance and start Redis subscriber."""
|
78
|
+
self._websocket_manager = websocket_manager
|
79
|
+
logger.info("WebSocket manager connected to event bridge")
|
80
|
+
|
81
|
+
# Start Redis subscriber thread if Redis is available
|
82
|
+
if self._redis_client and not self._subscriber_thread:
|
83
|
+
self._start_redis_subscriber()
|
84
|
+
|
85
|
+
# Start local processor thread if not already running
|
86
|
+
if not self._processor_thread or not self._processor_thread.is_alive():
|
87
|
+
self._start_processor_thread()
|
88
|
+
|
89
|
+
def add_event(self, event_type: str, payload: Dict[str, Any]) -> None:
|
90
|
+
"""Add an event to be forwarded to WebSocket clients via Redis."""
|
91
|
+
try:
|
92
|
+
event_data = {"event_type": event_type, "payload": payload}
|
93
|
+
|
94
|
+
if self._redis_client:
|
95
|
+
# Publish to Redis for cross-container communication
|
96
|
+
try:
|
97
|
+
message = json.dumps(event_data)
|
98
|
+
subscribers = self._redis_client.publish("osism:events", message)
|
99
|
+
logger.info(
|
100
|
+
f"Published event to Redis: {event_type} (subscribers: {subscribers})"
|
101
|
+
)
|
102
|
+
|
103
|
+
if subscribers == 0:
|
104
|
+
logger.warning(f"No Redis subscribers for event: {event_type}")
|
105
|
+
|
106
|
+
except Exception as redis_error:
|
107
|
+
logger.error(f"Failed to publish event to Redis: {redis_error}")
|
108
|
+
# Try to reconnect Redis
|
109
|
+
try:
|
110
|
+
self._init_redis()
|
111
|
+
if self._redis_client:
|
112
|
+
message = json.dumps(event_data)
|
113
|
+
subscribers = self._redis_client.publish(
|
114
|
+
"osism:events", message
|
115
|
+
)
|
116
|
+
logger.info(
|
117
|
+
f"Published event to Redis after reconnect: {event_type} (subscribers: {subscribers})"
|
118
|
+
)
|
119
|
+
else:
|
120
|
+
raise Exception("Redis reconnection failed")
|
121
|
+
except Exception as reconnect_error:
|
122
|
+
logger.error(f"Redis reconnection failed: {reconnect_error}")
|
123
|
+
# Fallback to local queue
|
124
|
+
self._event_queue.put_nowait(event_data)
|
125
|
+
logger.debug(
|
126
|
+
f"Added event to local fallback queue: {event_type}"
|
127
|
+
)
|
128
|
+
else:
|
129
|
+
# Local queue fallback
|
130
|
+
self._event_queue.put_nowait(event_data)
|
131
|
+
logger.debug(f"Added event to local queue: {event_type}")
|
132
|
+
|
133
|
+
except queue.Full:
|
134
|
+
logger.warning("Event bridge queue is full, dropping event")
|
135
|
+
except Exception as e:
|
136
|
+
logger.error(f"Error adding event to bridge: {e}")
|
137
|
+
|
138
|
+
def _start_redis_subscriber(self):
|
139
|
+
"""Start Redis subscriber thread for receiving events from other containers."""
|
140
|
+
self._subscriber_thread = threading.Thread(
|
141
|
+
target=self._redis_subscriber_loop, name="RedisEventSubscriber", daemon=True
|
142
|
+
)
|
143
|
+
self._subscriber_thread.start()
|
144
|
+
logger.info("Started Redis event subscriber thread")
|
145
|
+
|
146
|
+
def _start_processor_thread(self):
|
147
|
+
"""Start the background thread that processes local events."""
|
148
|
+
self._processor_thread = threading.Thread(
|
149
|
+
target=self._process_events, name="EventBridgeProcessor", daemon=True
|
150
|
+
)
|
151
|
+
self._processor_thread.start()
|
152
|
+
logger.info("Started event bridge processor thread")
|
153
|
+
|
154
|
+
def _redis_subscriber_loop(self):
|
155
|
+
"""Redis subscriber loop for receiving events from other containers with auto-reconnect."""
|
156
|
+
retry_count = 0
|
157
|
+
max_retries = 5
|
158
|
+
retry_delay = 5 # seconds
|
159
|
+
|
160
|
+
while not self._shutdown_event.is_set() and retry_count < max_retries:
|
161
|
+
try:
|
162
|
+
if not self._redis_subscriber:
|
163
|
+
logger.error("Redis subscriber not available")
|
164
|
+
return
|
165
|
+
|
166
|
+
logger.info(
|
167
|
+
f"Starting Redis subscriber (attempt {retry_count + 1}/{max_retries})"
|
168
|
+
)
|
169
|
+
self._redis_subscriber.subscribe("osism:events")
|
170
|
+
logger.info("Subscribed to Redis events channel")
|
171
|
+
retry_count = 0 # Reset retry count on successful connection
|
172
|
+
|
173
|
+
# Use get_message with timeout instead of listen() to avoid hanging
|
174
|
+
while not self._shutdown_event.is_set():
|
175
|
+
try:
|
176
|
+
# Check for messages with timeout
|
177
|
+
message = self._redis_subscriber.get_message(timeout=10.0)
|
178
|
+
|
179
|
+
if message is None:
|
180
|
+
continue # Timeout, check shutdown and continue
|
181
|
+
|
182
|
+
if message["type"] == "message":
|
183
|
+
try:
|
184
|
+
event_data = json.loads(message["data"])
|
185
|
+
logger.info(
|
186
|
+
f"Received event from Redis: {event_data.get('event_type')}"
|
187
|
+
)
|
188
|
+
|
189
|
+
if self._websocket_manager:
|
190
|
+
# Process event directly
|
191
|
+
self._process_single_event(event_data)
|
192
|
+
else:
|
193
|
+
# Add to local queue for later processing
|
194
|
+
self._event_queue.put_nowait(event_data)
|
195
|
+
|
196
|
+
except json.JSONDecodeError as e:
|
197
|
+
logger.error(
|
198
|
+
f"Failed to decode Redis event message: {e}"
|
199
|
+
)
|
200
|
+
except Exception as e:
|
201
|
+
logger.error(f"Error processing Redis event: {e}")
|
202
|
+
|
203
|
+
except Exception as get_msg_error:
|
204
|
+
logger.error(f"Error getting Redis message: {get_msg_error}")
|
205
|
+
break # Break inner loop to trigger reconnect
|
206
|
+
|
207
|
+
except Exception as e:
|
208
|
+
retry_count += 1
|
209
|
+
logger.error(
|
210
|
+
f"Redis subscriber error (attempt {retry_count}/{max_retries}): {e}"
|
211
|
+
)
|
212
|
+
|
213
|
+
if retry_count < max_retries:
|
214
|
+
logger.info(
|
215
|
+
f"Retrying Redis subscription in {retry_delay} seconds..."
|
216
|
+
)
|
217
|
+
self._shutdown_event.wait(retry_delay)
|
218
|
+
|
219
|
+
# Recreate Redis connection
|
220
|
+
try:
|
221
|
+
self._init_redis()
|
222
|
+
except Exception as init_error:
|
223
|
+
logger.error(f"Failed to reinitialize Redis: {init_error}")
|
224
|
+
|
225
|
+
finally:
|
226
|
+
if self._redis_subscriber:
|
227
|
+
try:
|
228
|
+
self._redis_subscriber.close()
|
229
|
+
except Exception:
|
230
|
+
pass # Ignore errors during cleanup
|
231
|
+
|
232
|
+
if retry_count >= max_retries:
|
233
|
+
logger.error("Max Redis reconnection attempts reached, giving up")
|
234
|
+
else:
|
235
|
+
logger.info("Redis subscriber stopped")
|
236
|
+
|
237
|
+
def _process_single_event(self, event_data: Dict[str, Any]):
|
238
|
+
"""Process a single event with WebSocket manager."""
|
239
|
+
if not self._websocket_manager:
|
240
|
+
logger.warning("No WebSocket manager available, dropping event")
|
241
|
+
return
|
242
|
+
|
243
|
+
try:
|
244
|
+
import asyncio
|
245
|
+
|
246
|
+
# Create new event loop for this thread
|
247
|
+
loop = asyncio.new_event_loop()
|
248
|
+
asyncio.set_event_loop(loop)
|
249
|
+
|
250
|
+
# Process the event
|
251
|
+
loop.run_until_complete(
|
252
|
+
self._websocket_manager.broadcast_event_from_notification(
|
253
|
+
event_data["event_type"], event_data["payload"]
|
254
|
+
)
|
255
|
+
)
|
256
|
+
|
257
|
+
loop.close()
|
258
|
+
logger.debug(f"Processed event via bridge: {event_data['event_type']}")
|
259
|
+
|
260
|
+
except Exception as e:
|
261
|
+
logger.error(f"Error processing event via bridge: {e}")
|
262
|
+
|
263
|
+
def _process_events(self):
|
264
|
+
"""Background thread that processes events from the local queue."""
|
265
|
+
logger.info("Event bridge processor started")
|
266
|
+
|
267
|
+
while not self._shutdown_event.is_set():
|
268
|
+
try:
|
269
|
+
# Get event with timeout to check shutdown periodically
|
270
|
+
try:
|
271
|
+
event_data = self._event_queue.get(timeout=1.0)
|
272
|
+
except queue.Empty:
|
273
|
+
continue
|
274
|
+
|
275
|
+
self._process_single_event(event_data)
|
276
|
+
self._event_queue.task_done()
|
277
|
+
|
278
|
+
except Exception as e:
|
279
|
+
logger.error(f"Unexpected error in event bridge processor: {e}")
|
280
|
+
|
281
|
+
logger.info("Event bridge processor stopped")
|
282
|
+
|
283
|
+
def shutdown(self):
|
284
|
+
"""Shutdown the event bridge."""
|
285
|
+
logger.info("Shutting down event bridge")
|
286
|
+
self._shutdown_event.set()
|
287
|
+
|
288
|
+
# Close Redis subscriber
|
289
|
+
if self._redis_subscriber:
|
290
|
+
try:
|
291
|
+
self._redis_subscriber.close()
|
292
|
+
except Exception as e:
|
293
|
+
logger.error(f"Error closing Redis subscriber: {e}")
|
294
|
+
|
295
|
+
# Wait for threads to finish
|
296
|
+
if self._processor_thread and self._processor_thread.is_alive():
|
297
|
+
self._processor_thread.join(timeout=5.0)
|
298
|
+
|
299
|
+
if self._subscriber_thread and self._subscriber_thread.is_alive():
|
300
|
+
self._subscriber_thread.join(timeout=5.0)
|
301
|
+
|
302
|
+
|
303
|
+
# Global event bridge instance
|
304
|
+
event_bridge = EventBridge()
|
osism/services/listener.py
CHANGED
@@ -14,6 +14,41 @@ import requests
|
|
14
14
|
from osism.tasks import netbox
|
15
15
|
from osism import settings
|
16
16
|
|
17
|
+
# Multiple exchanges for different OpenStack services
|
18
|
+
EXCHANGES_CONFIG = {
|
19
|
+
"ironic": {
|
20
|
+
"exchange": "ironic",
|
21
|
+
"routing_key": "ironic_versioned_notifications.info",
|
22
|
+
"queue": "osism-listener-ironic",
|
23
|
+
},
|
24
|
+
"nova": {
|
25
|
+
"exchange": "nova",
|
26
|
+
"routing_key": "nova_versioned_notifications.info",
|
27
|
+
"queue": "osism-listener-nova",
|
28
|
+
},
|
29
|
+
"neutron": {
|
30
|
+
"exchange": "neutron",
|
31
|
+
"routing_key": "neutron_versioned_notifications.info",
|
32
|
+
"queue": "osism-listener-neutron",
|
33
|
+
},
|
34
|
+
"cinder": {
|
35
|
+
"exchange": "cinder",
|
36
|
+
"routing_key": "cinder_versioned_notifications.info",
|
37
|
+
"queue": "osism-listener-cinder",
|
38
|
+
},
|
39
|
+
"keystone": {
|
40
|
+
"exchange": "keystone",
|
41
|
+
"routing_key": "keystone_versioned_notifications.info",
|
42
|
+
"queue": "osism-listener-keystone",
|
43
|
+
},
|
44
|
+
"glance": {
|
45
|
+
"exchange": "glance",
|
46
|
+
"routing_key": "glance_versioned_notifications.info",
|
47
|
+
"queue": "osism-listener-glance",
|
48
|
+
},
|
49
|
+
}
|
50
|
+
|
51
|
+
# Legacy constants for backward compatibility
|
17
52
|
EXCHANGE_NAME = "ironic"
|
18
53
|
ROUTING_KEY = "ironic_versioned_notifications.info"
|
19
54
|
QUEUE_NAME = "osism-listener-ironic"
|
@@ -138,39 +173,115 @@ class NotificationsDump(ConsumerMixin):
|
|
138
173
|
self.baremetal_events = BaremetalEvents()
|
139
174
|
self.osism_api_session: None | requests.Session = None
|
140
175
|
self.osism_baremetal_api_url: None | str = None
|
176
|
+
self.websocket_manager = None
|
177
|
+
|
141
178
|
if settings.OSISM_API_URL:
|
142
179
|
logger.info("Setting up OSISM API")
|
143
180
|
self.osism_api_session = requests.Session()
|
144
181
|
self.osism_baremetal_api_url = (
|
145
182
|
settings.OSISM_API_URL.rstrip("/") + "/notifications/baremetal"
|
146
183
|
)
|
184
|
+
|
185
|
+
# Import event_bridge for WebSocket forwarding
|
186
|
+
try:
|
187
|
+
from osism.services.event_bridge import event_bridge
|
188
|
+
|
189
|
+
self.event_bridge = event_bridge
|
190
|
+
logger.info("Event bridge connected to RabbitMQ listener")
|
191
|
+
except ImportError:
|
192
|
+
logger.warning("Event bridge not available")
|
193
|
+
self.event_bridge = None
|
194
|
+
|
147
195
|
return
|
148
196
|
|
149
197
|
def get_consumers(self, consumer, channel):
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
198
|
+
consumers = []
|
199
|
+
|
200
|
+
# Create consumers for all configured exchanges
|
201
|
+
for service_name, config in EXCHANGES_CONFIG.items():
|
202
|
+
try:
|
203
|
+
exchange = Exchange(config["exchange"], type="topic", durable=False)
|
204
|
+
queue = Queue(
|
205
|
+
config["queue"],
|
206
|
+
exchange,
|
207
|
+
routing_key=config["routing_key"],
|
208
|
+
durable=False,
|
209
|
+
auto_delete=True,
|
210
|
+
no_ack=True,
|
211
|
+
)
|
212
|
+
consumers.append(consumer(queue, callbacks=[self.on_message]))
|
213
|
+
logger.info(
|
214
|
+
f"Configured consumer for {service_name} exchange: {config['exchange']}"
|
215
|
+
)
|
216
|
+
except Exception as e:
|
217
|
+
logger.error(f"Failed to configure consumer for {service_name}: {e}")
|
218
|
+
|
219
|
+
if not consumers:
|
220
|
+
logger.warning(
|
221
|
+
"No consumers configured, falling back to legacy ironic consumer"
|
222
|
+
)
|
223
|
+
# Fallback to legacy configuration
|
224
|
+
exchange = Exchange(EXCHANGE_NAME, type="topic", durable=False)
|
225
|
+
queue = Queue(
|
226
|
+
QUEUE_NAME,
|
227
|
+
exchange,
|
228
|
+
routing_key=ROUTING_KEY,
|
229
|
+
durable=False,
|
230
|
+
auto_delete=True,
|
231
|
+
no_ack=True,
|
232
|
+
)
|
233
|
+
consumers.append(consumer(queue, callbacks=[self.on_message]))
|
234
|
+
|
235
|
+
return consumers
|
160
236
|
|
161
237
|
def on_message(self, body, message):
|
162
238
|
data = json.loads(body["oslo.message"])
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
239
|
+
|
240
|
+
# Log event with service type detection
|
241
|
+
event_type = data.get("event_type", "")
|
242
|
+
service_type = event_type.split(".")[0] if event_type else "unknown"
|
243
|
+
|
244
|
+
# Enhanced logging for different event types
|
245
|
+
payload_info = {}
|
246
|
+
if "payload" in data:
|
247
|
+
payload = data["payload"]
|
248
|
+
|
249
|
+
# Extract relevant info based on service type
|
250
|
+
if service_type == "baremetal" and "ironic_object.data" in payload:
|
251
|
+
ironic_data = payload["ironic_object.data"]
|
252
|
+
payload_info = {
|
168
253
|
k: v
|
169
|
-
for k, v in
|
170
|
-
if k in ["provision_state", "power_state"]
|
254
|
+
for k, v in ironic_data.items()
|
255
|
+
if k in ["name", "provision_state", "power_state"]
|
171
256
|
}
|
172
|
-
|
173
|
-
|
257
|
+
elif service_type in ["compute", "nova"] and "nova_object.data" in payload:
|
258
|
+
nova_data = payload["nova_object.data"]
|
259
|
+
payload_info = {
|
260
|
+
k: v
|
261
|
+
for k, v in nova_data.items()
|
262
|
+
if k in ["uuid", "host", "state", "task_state"]
|
263
|
+
}
|
264
|
+
elif service_type in ["network", "neutron"]:
|
265
|
+
# Neutron events might have different structures
|
266
|
+
payload_info = {"service": "neutron"}
|
267
|
+
else:
|
268
|
+
# Generic payload info
|
269
|
+
payload_info = {"service": service_type}
|
270
|
+
|
271
|
+
logger.debug(f"{event_type}: {payload_info}")
|
272
|
+
logger.info(f"Received {service_type} event: {event_type}")
|
273
|
+
|
274
|
+
# Send event to WebSocket clients via event bridge
|
275
|
+
if self.event_bridge:
|
276
|
+
try:
|
277
|
+
logger.debug(f"Forwarding event to WebSocket via bridge: {event_type}")
|
278
|
+
self.event_bridge.add_event(data["event_type"], data["payload"])
|
279
|
+
logger.debug(f"Successfully forwarded event to bridge: {event_type}")
|
280
|
+
except Exception as e:
|
281
|
+
logger.error(f"Error forwarding event to bridge: {e}")
|
282
|
+
logger.error(
|
283
|
+
f"Event data was: {data['event_type']} - {data.get('payload', {}).get('ironic_object.data', {}).get('name', 'unknown')}"
|
284
|
+
)
|
174
285
|
|
175
286
|
if self.osism_api_session:
|
176
287
|
tries = 1
|
@@ -0,0 +1,271 @@
|
|
1
|
+
# SPDX-License-Identifier: Apache-2.0
|
2
|
+
|
3
|
+
import asyncio
|
4
|
+
import json
|
5
|
+
import logging
|
6
|
+
from datetime import datetime
|
7
|
+
from typing import Dict, Any, Optional, List
|
8
|
+
from uuid import uuid4
|
9
|
+
|
10
|
+
from fastapi import WebSocket, WebSocketDisconnect
|
11
|
+
|
12
|
+
logger = logging.getLogger("osism.websocket")
|
13
|
+
|
14
|
+
|
15
|
+
class EventMessage:
|
16
|
+
"""Represents an event message for WebSocket streaming."""
|
17
|
+
|
18
|
+
def __init__(
|
19
|
+
self,
|
20
|
+
event_type: str,
|
21
|
+
source: str,
|
22
|
+
data: Dict[str, Any],
|
23
|
+
node_name: Optional[str] = None,
|
24
|
+
):
|
25
|
+
self.id = str(uuid4())
|
26
|
+
self.timestamp = datetime.utcnow().isoformat() + "Z"
|
27
|
+
self.event_type = event_type
|
28
|
+
self.source = source
|
29
|
+
self.node_name = node_name
|
30
|
+
self.data = data
|
31
|
+
|
32
|
+
def to_dict(self) -> Dict[str, Any]:
|
33
|
+
"""Convert event message to dictionary for JSON serialization."""
|
34
|
+
return {
|
35
|
+
"id": self.id,
|
36
|
+
"timestamp": self.timestamp,
|
37
|
+
"event_type": self.event_type,
|
38
|
+
"source": self.source,
|
39
|
+
"node_name": self.node_name,
|
40
|
+
"data": self.data,
|
41
|
+
}
|
42
|
+
|
43
|
+
def to_json(self) -> str:
|
44
|
+
"""Convert event message to JSON string."""
|
45
|
+
return json.dumps(self.to_dict())
|
46
|
+
|
47
|
+
|
48
|
+
class WebSocketConnection:
|
49
|
+
"""Represents a WebSocket connection with filtering options."""
|
50
|
+
|
51
|
+
def __init__(self, websocket: WebSocket):
|
52
|
+
self.websocket = websocket
|
53
|
+
self.event_filters: List[str] = [] # List of event types to filter
|
54
|
+
self.node_filters: List[str] = [] # List of node names to filter
|
55
|
+
self.service_filters: List[str] = [] # List of service types to filter
|
56
|
+
|
57
|
+
def matches_filters(self, event: "EventMessage") -> bool:
|
58
|
+
"""Check if event matches this connection's filters."""
|
59
|
+
# If no filters are set, pass all events
|
60
|
+
if (
|
61
|
+
not self.event_filters
|
62
|
+
and not self.node_filters
|
63
|
+
and not self.service_filters
|
64
|
+
):
|
65
|
+
return True
|
66
|
+
|
67
|
+
# Check event type filters
|
68
|
+
event_match = not self.event_filters or event.event_type in self.event_filters
|
69
|
+
|
70
|
+
# Check node filters
|
71
|
+
node_match = not self.node_filters or (
|
72
|
+
event.node_name is not None and event.node_name in self.node_filters
|
73
|
+
)
|
74
|
+
|
75
|
+
# Check service filters
|
76
|
+
service_type = event.event_type.split(".")[0] if event.event_type else "unknown"
|
77
|
+
service_match = not self.service_filters or service_type in self.service_filters
|
78
|
+
|
79
|
+
return event_match and node_match and service_match
|
80
|
+
|
81
|
+
|
82
|
+
class WebSocketManager:
|
83
|
+
"""Manages WebSocket connections and event broadcasting."""
|
84
|
+
|
85
|
+
def __init__(self):
|
86
|
+
# Store active WebSocket connections with filtering support
|
87
|
+
self.connections: Dict[WebSocket, WebSocketConnection] = {}
|
88
|
+
# Event queue for broadcasting
|
89
|
+
self.event_queue: asyncio.Queue = asyncio.Queue()
|
90
|
+
# Background task for event broadcasting
|
91
|
+
self._broadcaster_task: Optional[asyncio.Task] = None
|
92
|
+
# Lock for thread-safe operations
|
93
|
+
self._lock = asyncio.Lock()
|
94
|
+
|
95
|
+
async def connect(self, websocket: WebSocket) -> None:
|
96
|
+
"""Accept a new WebSocket connection."""
|
97
|
+
await websocket.accept()
|
98
|
+
async with self._lock:
|
99
|
+
self.connections[websocket] = WebSocketConnection(websocket)
|
100
|
+
logger.info(f"WebSocket connected. Total connections: {len(self.connections)}")
|
101
|
+
|
102
|
+
# Start broadcaster if this is the first connection
|
103
|
+
if not self._broadcaster_task or self._broadcaster_task.done():
|
104
|
+
self._broadcaster_task = asyncio.create_task(self._broadcast_events())
|
105
|
+
|
106
|
+
async def disconnect(self, websocket: WebSocket) -> None:
|
107
|
+
"""Remove a WebSocket connection."""
|
108
|
+
async with self._lock:
|
109
|
+
self.connections.pop(websocket, None)
|
110
|
+
logger.info(
|
111
|
+
f"WebSocket disconnected. Total connections: {len(self.connections)}"
|
112
|
+
)
|
113
|
+
|
114
|
+
async def update_filters(
|
115
|
+
self,
|
116
|
+
websocket: WebSocket,
|
117
|
+
event_filters: Optional[List[str]] = None,
|
118
|
+
node_filters: Optional[List[str]] = None,
|
119
|
+
service_filters: Optional[List[str]] = None,
|
120
|
+
) -> None:
|
121
|
+
"""Update filters for a specific WebSocket connection."""
|
122
|
+
async with self._lock:
|
123
|
+
if websocket in self.connections:
|
124
|
+
connection = self.connections[websocket]
|
125
|
+
if event_filters is not None:
|
126
|
+
connection.event_filters = event_filters
|
127
|
+
if node_filters is not None:
|
128
|
+
connection.node_filters = node_filters
|
129
|
+
if service_filters is not None:
|
130
|
+
connection.service_filters = service_filters
|
131
|
+
logger.debug(
|
132
|
+
f"Updated filters for WebSocket: events={connection.event_filters}, "
|
133
|
+
f"nodes={connection.node_filters}, services={connection.service_filters}"
|
134
|
+
)
|
135
|
+
|
136
|
+
async def add_event(self, event: EventMessage) -> None:
|
137
|
+
"""Add an event to the broadcast queue."""
|
138
|
+
await self.event_queue.put(event)
|
139
|
+
|
140
|
+
async def broadcast_event_from_notification(
|
141
|
+
self, event_type: str, payload: Dict[str, Any]
|
142
|
+
) -> None:
|
143
|
+
"""Create and broadcast an event from RabbitMQ notification."""
|
144
|
+
try:
|
145
|
+
logger.info(f"Processing event for WebSocket broadcast: {event_type}")
|
146
|
+
logger.debug(f"Active WebSocket connections: {len(self.connections)}")
|
147
|
+
|
148
|
+
# Extract relevant identifiers from different service types
|
149
|
+
node_name = None
|
150
|
+
resource_id = None
|
151
|
+
service_type = event_type.split(".")[0] if event_type else "unknown"
|
152
|
+
|
153
|
+
# Extract identifiers based on service type
|
154
|
+
if service_type == "baremetal" and "ironic_object.data" in payload:
|
155
|
+
ironic_data = payload["ironic_object.data"]
|
156
|
+
node_name = ironic_data.get("name")
|
157
|
+
resource_id = ironic_data.get("uuid")
|
158
|
+
elif service_type in ["compute", "nova"] and "nova_object.data" in payload:
|
159
|
+
nova_data = payload["nova_object.data"]
|
160
|
+
node_name = nova_data.get("host") or nova_data.get("name")
|
161
|
+
resource_id = nova_data.get("uuid")
|
162
|
+
elif service_type in ["network", "neutron"]:
|
163
|
+
# Neutron events may have different payload structures
|
164
|
+
if "neutron_object.data" in payload:
|
165
|
+
neutron_data = payload["neutron_object.data"]
|
166
|
+
resource_id = neutron_data.get("id") or neutron_data.get("uuid")
|
167
|
+
node_name = neutron_data.get("name") or neutron_data.get(
|
168
|
+
"device_id"
|
169
|
+
)
|
170
|
+
elif service_type == "volume" and "cinder_object.data" in payload:
|
171
|
+
cinder_data = payload["cinder_object.data"]
|
172
|
+
resource_id = cinder_data.get("id") or cinder_data.get("uuid")
|
173
|
+
node_name = cinder_data.get("name") or cinder_data.get("display_name")
|
174
|
+
elif service_type == "image" and "glance_object.data" in payload:
|
175
|
+
glance_data = payload["glance_object.data"]
|
176
|
+
resource_id = glance_data.get("id") or glance_data.get("uuid")
|
177
|
+
node_name = glance_data.get("name")
|
178
|
+
elif service_type == "identity" and "keystone_object.data" in payload:
|
179
|
+
keystone_data = payload["keystone_object.data"]
|
180
|
+
resource_id = keystone_data.get("id") or keystone_data.get("uuid")
|
181
|
+
node_name = keystone_data.get("name")
|
182
|
+
|
183
|
+
# Create event message with enhanced metadata
|
184
|
+
event_data = payload.copy()
|
185
|
+
event_data["service_type"] = service_type
|
186
|
+
event_data["resource_id"] = resource_id
|
187
|
+
|
188
|
+
event = EventMessage(
|
189
|
+
event_type=event_type,
|
190
|
+
source="openstack",
|
191
|
+
data=event_data,
|
192
|
+
node_name=node_name,
|
193
|
+
)
|
194
|
+
|
195
|
+
await self.add_event(event)
|
196
|
+
logger.info(
|
197
|
+
f"Added {service_type} event to WebSocket queue: {event_type} for resource {node_name or resource_id}"
|
198
|
+
)
|
199
|
+
logger.debug(f"Event queue size: {self.event_queue.qsize()}")
|
200
|
+
|
201
|
+
except Exception as e:
|
202
|
+
logger.error(f"Error creating event from notification: {e}")
|
203
|
+
|
204
|
+
async def _broadcast_events(self) -> None:
|
205
|
+
"""Background task to broadcast events to all connected clients."""
|
206
|
+
logger.info("Starting WebSocket event broadcaster")
|
207
|
+
|
208
|
+
while True:
|
209
|
+
try:
|
210
|
+
# Wait for an event
|
211
|
+
event = await self.event_queue.get()
|
212
|
+
|
213
|
+
if not self.connections:
|
214
|
+
# No connections, skip broadcasting
|
215
|
+
continue
|
216
|
+
|
217
|
+
# Broadcast to filtered connections
|
218
|
+
message = event.to_json()
|
219
|
+
disconnected_connections = set()
|
220
|
+
sent_count = 0
|
221
|
+
|
222
|
+
async with self._lock:
|
223
|
+
connections_copy = dict(self.connections)
|
224
|
+
|
225
|
+
for websocket, connection in connections_copy.items():
|
226
|
+
try:
|
227
|
+
# Check if event matches connection filters
|
228
|
+
if connection.matches_filters(event):
|
229
|
+
await websocket.send_text(message)
|
230
|
+
sent_count += 1
|
231
|
+
except WebSocketDisconnect:
|
232
|
+
disconnected_connections.add(websocket)
|
233
|
+
except Exception as e:
|
234
|
+
logger.error(f"Error sending message to WebSocket: {e}")
|
235
|
+
disconnected_connections.add(websocket)
|
236
|
+
|
237
|
+
# Remove disconnected connections
|
238
|
+
if disconnected_connections:
|
239
|
+
async with self._lock:
|
240
|
+
for websocket in disconnected_connections:
|
241
|
+
self.connections.pop(websocket, None)
|
242
|
+
logger.info(
|
243
|
+
f"Removed {len(disconnected_connections)} disconnected WebSocket(s). "
|
244
|
+
f"Active connections: {len(self.connections)}"
|
245
|
+
)
|
246
|
+
|
247
|
+
logger.info(
|
248
|
+
f"Broadcasted event {event.event_type} to {sent_count}/{len(self.connections)} connection(s)"
|
249
|
+
)
|
250
|
+
|
251
|
+
except asyncio.CancelledError:
|
252
|
+
logger.info("WebSocket broadcaster task cancelled")
|
253
|
+
break
|
254
|
+
except Exception as e:
|
255
|
+
logger.error(f"Error in WebSocket broadcaster: {e}")
|
256
|
+
# Continue broadcasting even if there's an error
|
257
|
+
|
258
|
+
async def send_heartbeat(self) -> None:
|
259
|
+
"""Send heartbeat to all connected clients."""
|
260
|
+
if not self.connections:
|
261
|
+
return
|
262
|
+
|
263
|
+
heartbeat_event = EventMessage(
|
264
|
+
event_type="heartbeat", source="osism", data={"message": "ping"}
|
265
|
+
)
|
266
|
+
|
267
|
+
await self.add_event(heartbeat_event)
|
268
|
+
|
269
|
+
|
270
|
+
# Global WebSocket manager instance
|
271
|
+
websocket_manager = WebSocketManager()
|
osism/tasks/openstack.py
CHANGED
@@ -84,20 +84,21 @@ def get_baremetal_nodes():
|
|
84
84
|
# Convert generator to list and extract relevant fields
|
85
85
|
node_list = []
|
86
86
|
for node in nodes:
|
87
|
+
# OpenStack SDK returns Resource objects, not dicts - use attribute access
|
87
88
|
node_info = {
|
88
|
-
"uuid": node
|
89
|
-
"name": node
|
90
|
-
"power_state": node
|
91
|
-
"provision_state": node
|
92
|
-
"maintenance": node
|
93
|
-
"instance_uuid": node
|
94
|
-
"driver": node
|
95
|
-
"resource_class": node
|
96
|
-
"properties": node
|
97
|
-
"extra": node
|
98
|
-
"last_error": node
|
99
|
-
"created_at": node
|
100
|
-
"updated_at": node
|
89
|
+
"uuid": getattr(node, "uuid", None) or getattr(node, "id", None),
|
90
|
+
"name": getattr(node, "name", None),
|
91
|
+
"power_state": getattr(node, "power_state", None),
|
92
|
+
"provision_state": getattr(node, "provision_state", None),
|
93
|
+
"maintenance": getattr(node, "maintenance", None),
|
94
|
+
"instance_uuid": getattr(node, "instance_uuid", None),
|
95
|
+
"driver": getattr(node, "driver", None),
|
96
|
+
"resource_class": getattr(node, "resource_class", None),
|
97
|
+
"properties": getattr(node, "properties", {}),
|
98
|
+
"extra": getattr(node, "extra", {}),
|
99
|
+
"last_error": getattr(node, "last_error", None),
|
100
|
+
"created_at": getattr(node, "created_at", None),
|
101
|
+
"updated_at": getattr(node, "updated_at", None),
|
101
102
|
}
|
102
103
|
node_list.append(node_info)
|
103
104
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: osism
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.20250824.1
|
4
4
|
Summary: OSISM manager interface
|
5
5
|
Home-page: https://github.com/osism/python-osism
|
6
6
|
Author: OSISM GmbH
|
@@ -56,6 +56,7 @@ Requires-Dist: transitions==0.9.3
|
|
56
56
|
Requires-Dist: uvicorn[standard]==0.35.0
|
57
57
|
Requires-Dist: validators==0.35.0
|
58
58
|
Requires-Dist: watchdog==6.0.0
|
59
|
+
Requires-Dist: websockets==15.0.1
|
59
60
|
Provides-Extra: ansible
|
60
61
|
Requires-Dist: ansible-runner==2.4.1; extra == "ansible"
|
61
62
|
Requires-Dist: ansible-core==2.19.0; extra == "ansible"
|
@@ -1,6 +1,6 @@
|
|
1
1
|
osism/__init__.py,sha256=1UiNTBus0V0f2AbZQzAtVtu6zkfCCrw0OTq--NwFAqY,341
|
2
2
|
osism/__main__.py,sha256=ILe4gu61xEISiBsxanqTQIdSkV-YhpZXTRlguCYyssk,141
|
3
|
-
osism/api.py,sha256=
|
3
|
+
osism/api.py,sha256=wVPLhPPdBcFx6vss5pX1Y2JxbaNSMh_Bqo4AB529QWI,16945
|
4
4
|
osism/main.py,sha256=Dt2-9sLXcS-Ny4DAz7hrha-KRc7zd7BFUTRdfs_X8z4,893
|
5
5
|
osism/settings.py,sha256=VZT1muZVYWM5Ov1eFRC7a4ZGYIdI2AFmudCm0wZ1C2Q,1898
|
6
6
|
osism/commands/__init__.py,sha256=Ag4wX_DCgXRdoLn6t069jqb3DdRylsX2nyYkiyCx4uk,456
|
@@ -34,7 +34,9 @@ osism/data/__init__.py,sha256=izXdh0J3vPLQI7kBhJI7ibJQzPqU_nlONP0L4Cf_k6A,1504
|
|
34
34
|
osism/data/enums.py,sha256=gItIjOK6xWuOZSkMxpMdYLRyt4ezyhzkqA7BGiah2o0,10030
|
35
35
|
osism/data/playbooks.py,sha256=M3T3ajV-8Lt-orsRO3jAoukhaoYFr4EZ2dzYXQjt1kg,728
|
36
36
|
osism/services/__init__.py,sha256=bG7Ffen4LvQtgnYPFEpFccsWs81t4zqqeqn9ZeirH6E,38
|
37
|
-
osism/services/
|
37
|
+
osism/services/event_bridge.py,sha256=roV90o9UgTnwoVbXnPR3muBk04IriVYCO_fewZ46Mq8,12016
|
38
|
+
osism/services/listener.py,sha256=O8Xq5fEEVoNFIgFPE7GqfVqx6C4QkdWhUPUGzODFnws,14211
|
39
|
+
osism/services/websocket_manager.py,sha256=F147kWOg8PAvbVG4aVYQVtK4mFMfPVtHxxYJXaqiAjg,11051
|
38
40
|
osism/tasks/__init__.py,sha256=iAUs-ttUMw1nZElL631sT1ke29RvTjQjlhWPl_kGrEw,9003
|
39
41
|
osism/tasks/ansible.py,sha256=-gUe6uZFhPLI3DGZHlpQlghuDKpp4Drn5IEctlV6Sv8,1300
|
40
42
|
osism/tasks/ceph.py,sha256=eIQkah3Kj4INtOkF9kTjHbXJ3_J2lg48EWJKfHc-UYw,615
|
@@ -42,7 +44,7 @@ osism/tasks/conductor.py,sha256=WBLsoPtr0iGUzRGERs0Xt7CMYrnHQVEwNV9qXBssI3s,274
|
|
42
44
|
osism/tasks/kolla.py,sha256=wJQpWn_01iWLkr7l7T7RNrQGfRgsgmYi4WQlTmNGvew,618
|
43
45
|
osism/tasks/kubernetes.py,sha256=VzXq_VrYU_CLm4cOruqnE3Kq2ydfO9glZ3p0bp3OYoc,625
|
44
46
|
osism/tasks/netbox.py,sha256=QGQGz3s0V8WvPvhEJWwo0H24aLFaZrSl-voN-axzRwY,5846
|
45
|
-
osism/tasks/openstack.py,sha256=
|
47
|
+
osism/tasks/openstack.py,sha256=v9kkwKIr9nsedUgSQYSDW0kZBAGoE9MjKithXXeRm_I,7385
|
46
48
|
osism/tasks/reconciler.py,sha256=PnGWfvfmomzbgddvyCdxul-z5ZLXxWAmrQyRCN874-s,1958
|
47
49
|
osism/tasks/conductor/__init__.py,sha256=eAiaM69sVbTTDam7gCLyjF7wBCt7rd__pRFu7VdY-f8,1930
|
48
50
|
osism/tasks/conductor/config.py,sha256=n1H9_8DY90p5E4mygzKyJUl8G3WdDuGHFTp-SrmZmgU,4543
|
@@ -62,11 +64,11 @@ osism/tasks/conductor/sonic/interface.py,sha256=M876LHdFqGxUfTizzDusdzvCkDI0vCgq
|
|
62
64
|
osism/tasks/conductor/sonic/sync.py,sha256=fpgsQVwq6Hb7eeDHhLkAqx5BkaK3Ce_m_WvmWEsJyOo,9182
|
63
65
|
osism/utils/__init__.py,sha256=370UHVU5BFy-1wDAxBFaRjSA-zR0KNadJPWQ6zcYRf0,7806
|
64
66
|
osism/utils/ssh.py,sha256=nxeEgwjJWvQCybKDp-NelMeWyODCYpaXFCBchAv4-bg,8691
|
65
|
-
osism-0.
|
66
|
-
osism-0.
|
67
|
-
osism-0.
|
68
|
-
osism-0.
|
69
|
-
osism-0.
|
70
|
-
osism-0.
|
71
|
-
osism-0.
|
72
|
-
osism-0.
|
67
|
+
osism-0.20250824.1.dist-info/licenses/AUTHORS,sha256=oWotd63qsnNR945QLJP9mEXaXNtCMaesfo8ZNuLjwpU,39
|
68
|
+
osism-0.20250824.1.dist-info/licenses/LICENSE,sha256=tAkwu8-AdEyGxGoSvJ2gVmQdcicWw3j1ZZueVV74M-E,11357
|
69
|
+
osism-0.20250824.1.dist-info/METADATA,sha256=euXrln8TtOYB_fMaByv11BY0sNMaN3mjtSt_IYMvevA,2971
|
70
|
+
osism-0.20250824.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
71
|
+
osism-0.20250824.1.dist-info/entry_points.txt,sha256=h9YS3gfPc5ueU9ZXtCc60e8p4NQEuvtIH_zE0cfVqy0,4439
|
72
|
+
osism-0.20250824.1.dist-info/pbr.json,sha256=9hAzFUnge6s4hnmBQ2eKo9TQlUJoLcBNpoC0DfrZRjo,47
|
73
|
+
osism-0.20250824.1.dist-info/top_level.txt,sha256=8L8dsI9hcaGHsdnR4k_LN9EM78EhwrXRFHyAryPXZtY,6
|
74
|
+
osism-0.20250824.1.dist-info/RECORD,,
|
@@ -0,0 +1 @@
|
|
1
|
+
Christian Berendt <berendt@osism.tech>
|
@@ -0,0 +1 @@
|
|
1
|
+
{"git_version": "2d93840", "is_release": false}
|
@@ -1 +0,0 @@
|
|
1
|
-
renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
|
@@ -1 +0,0 @@
|
|
1
|
-
{"git_version": "eaafb18", "is_release": false}
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|