cgse-core 0.17.2__py3-none-any.whl → 0.17.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,35 +0,0 @@
1
- import multiprocessing
2
-
3
- from egse.async_control import AsyncControlServer
4
- from egse.confman import AsyncConfigurationManagerProtocol
5
- from egse.log import logging
6
- from egse.settings import Settings
7
-
8
- logger = logging.getLogger("egse.confman")
9
-
10
- settings = Settings.load("Configuration Manager Control Server")
11
-
12
- PROCESS_NAME = "cm_acs"
13
-
14
-
15
- class AsyncConfigurationManager(AsyncControlServer):
16
- def __init__(self):
17
- multiprocessing.current_process().name = PROCESS_NAME
18
-
19
- super().__init__()
20
-
21
- self.logger = logger
22
- self.service_name = PROCESS_NAME
23
- self.service_type = settings.SERVICE_TYPE
24
-
25
- self.device_protocol = AsyncConfigurationManagerProtocol(self)
26
-
27
- self.logger.debug(f"Binding ZeroMQ socket to {self.device_protocol.get_bind_address()}")
28
-
29
- self.device_protocol.bind(self.dev_ctrl_cmd_sock)
30
-
31
- self.poller.register(self.dev_ctrl_cmd_sock, zmq.POLLIN)
32
-
33
- self.set_hk_delay(10.0)
34
-
35
- self.logger.info(f"CM housekeeping saved every {self.hk_delay / 1000:.1f} seconds.")
File without changes
egse/metricshub/server.py DELETED
@@ -1,271 +0,0 @@
1
- # async_gateway.py
2
- import asyncio
3
- import time
4
- from datetime import datetime
5
-
6
- import zmq
7
- import zmq.asyncio
8
- from influxdb_client_3 import InfluxDBClient3
9
- from influxdb_client_3 import Point
10
-
11
-
12
- class AsyncMetricsHub:
13
- def __init__(
14
- self,
15
- influx_client, # FIXME: should be a plugin
16
- zmq_endpoint="tcp://*:5555", # FIXME: should also come from a plugin
17
- batch_size=500,
18
- flush_interval=2.0,
19
- ):
20
- self.influx_client = influx_client
21
- self.batch_size = batch_size
22
- self.flush_interval = flush_interval
23
-
24
- # AsyncIO ZeroMQ context
25
- self.context = zmq.asyncio.Context()
26
- self.socket = self.context.socket(zmq.PULL)
27
- self.socket.bind(zmq_endpoint)
28
-
29
- # Configure socket
30
- self.socket.setsockopt(zmq.RCVHWM, 10000)
31
-
32
- # AsyncIO queue for data points
33
- self.data_queue = asyncio.Queue(maxsize=10000)
34
-
35
- # Stats
36
- self.stats = {"received": 0, "written": 0, "errors": 0, "queue_size": 0}
37
-
38
- print(f"🚀 AsyncIO Gateway listening on {zmq_endpoint}")
39
-
40
- async def start(self):
41
- """Start the gateway with concurrent tasks"""
42
- try:
43
- # Run receiver and batch processor concurrently
44
- await asyncio.gather(
45
- self._receive_data(), self._batch_processor(), self._stats_reporter(), return_exceptions=True
46
- )
47
- except Exception as e:
48
- print(f"❌ Gateway error: {e}")
49
- finally:
50
- await self._cleanup()
51
-
52
- async def _receive_data(self):
53
- """Receive data from ZeroMQ and add to queue"""
54
- print("📡 Starting data receiver...")
55
-
56
- while True:
57
- try:
58
- # Receive message (non-blocking with timeout)
59
- message = await self.socket.recv_json()
60
-
61
- # Process message
62
- point = await self._process_message(message)
63
- if point:
64
- # Add to queue (non-blocking)
65
- try:
66
- await asyncio.wait_for(self.data_queue.put(point), timeout=0.1)
67
- self.stats["received"] += 1
68
- except asyncio.TimeoutError:
69
- # Queue full, drop point
70
- self.stats["errors"] += 1
71
- print("⚠️ Queue full, dropping point")
72
-
73
- except Exception as e:
74
- print(f"❌ Error receiving data: {e}")
75
- self.stats["errors"] += 1
76
- await asyncio.sleep(0.1)
77
-
78
- async def _process_message(self, message):
79
- """Process incoming message asynchronously"""
80
- try:
81
- # Validate message
82
- if "device_id" not in message or "data" not in message:
83
- return None
84
-
85
- # Create InfluxDB point
86
- point = Point("sensor_data")
87
- point.tag("device_id", message["device_id"])
88
-
89
- # Add timestamp
90
- if "timestamp" in message:
91
- point.time(message["timestamp"])
92
-
93
- # Add sensor data
94
- sensor_data = message["data"]
95
- for key, value in sensor_data.items():
96
- if isinstance(value, (int, float)):
97
- point.field(key, value)
98
- else:
99
- point.tag(key, str(value))
100
-
101
- return point
102
-
103
- except Exception as e:
104
- print(f"❌ Error processing message: {e}")
105
- return None
106
-
107
- async def _batch_processor(self):
108
- """Batch processor using asyncio"""
109
- print("📦 Starting batch processor...")
110
-
111
- batch = []
112
- last_flush = time.time()
113
-
114
- while True:
115
- try:
116
- # Try to get data points for batch
117
- timeout = max(0.1, self.flush_interval - (time.time() - last_flush))
118
-
119
- try:
120
- # Get point with timeout
121
- point = await asyncio.wait_for(self.data_queue.get(), timeout=timeout)
122
- batch.append(point)
123
-
124
- except asyncio.TimeoutError:
125
- # No data received, check if we should flush anyway
126
- pass
127
-
128
- # Update queue size stat
129
- self.stats["queue_size"] = self.data_queue.qsize()
130
-
131
- # Check flush conditions
132
- should_flush = len(batch) >= self.batch_size or (
133
- batch and time.time() - last_flush >= self.flush_interval
134
- )
135
-
136
- if should_flush and batch:
137
- await self._flush_batch(batch)
138
- batch.clear()
139
- last_flush = time.time()
140
-
141
- except Exception as e:
142
- print(f"❌ Batch processor error: {e}")
143
- await asyncio.sleep(1)
144
-
145
- async def _flush_batch(self, batch):
146
- """Flush batch to InfluxDB asynchronously"""
147
- if not batch:
148
- return
149
-
150
- try:
151
- start_time = time.time()
152
-
153
- # Run InfluxDB write in thread pool to avoid blocking
154
- loop = asyncio.get_event_loop()
155
- await loop.run_in_executor(None, self.influx_client.write, batch)
156
-
157
- write_time = time.time() - start_time
158
- self.stats["written"] += len(batch)
159
-
160
- points_per_second = len(batch) / write_time if write_time > 0 else 0
161
- print(f"✅ Wrote {len(batch)} points ({points_per_second:.0f} pts/s)")
162
-
163
- except Exception as e:
164
- print(f"❌ Failed to write batch: {e}")
165
- self.stats["errors"] += len(batch)
166
-
167
- async def _stats_reporter(self):
168
- """Periodic stats reporting"""
169
- while True:
170
- await asyncio.sleep(10) # Report every 10 seconds
171
- print(
172
- f"📊 Stats: Received={self.stats['received']}, "
173
- f"Written={self.stats['written']}, "
174
- f"Queue={self.stats['queue_size']}, "
175
- f"Errors={self.stats['errors']}"
176
- )
177
-
178
- async def _cleanup(self):
179
- """Cleanup resources"""
180
- print("🧹 Cleaning up...")
181
-
182
- # Flush remaining data
183
- remaining_batch = []
184
- while not self.data_queue.empty():
185
- try:
186
- point = await asyncio.wait_for(self.data_queue.get(), timeout=0.1)
187
- remaining_batch.append(point)
188
- except asyncio.TimeoutError:
189
- break
190
-
191
- if remaining_batch:
192
- await self._flush_batch(remaining_batch)
193
-
194
- self.socket.close()
195
- self.context.term()
196
- print("✅ Cleanup complete")
197
-
198
-
199
- # Device process (also async)
200
- class AsyncDeviceDataSender:
201
- def __init__(self, gateway_endpoint="tcp://localhost:5555", device_id="device_001"):
202
- self.device_id = device_id
203
- self.context = zmq.asyncio.Context()
204
- self.socket = self.context.socket(zmq.PUSH)
205
- self.socket.connect(gateway_endpoint)
206
- self.socket.setsockopt(zmq.LINGER, 0)
207
- self.socket.setsockopt(zmq.SNDHWM, 1000)
208
-
209
- async def send_data_point(self, sensor_data):
210
- """Send data point asynchronously"""
211
- message = {"device_id": self.device_id, "timestamp": datetime.utcnow().isoformat(), "data": sensor_data}
212
-
213
- try:
214
- await self.socket.send_json(message, flags=zmq.NOBLOCK)
215
- return True
216
- except zmq.Again:
217
- # Queue full
218
- return False
219
-
220
- async def close(self):
221
- self.socket.close()
222
- self.context.term()
223
-
224
-
225
- # Usage examples
226
- async def device_data_loop():
227
- """Example device data collection loop"""
228
- sender = AsyncDeviceDataSender(device_id="sensor_001")
229
-
230
- try:
231
- while True:
232
- # Your sensor reading logic here
233
- sensor_data = {
234
- "temperature": 23.5 + (time.time() % 10), # Mock data
235
- "humidity": 45.0 + (time.time() % 5),
236
- "pressure": 1013.25,
237
- }
238
-
239
- success = await sender.send_data_point(sensor_data)
240
- if not success:
241
- print("⚠️ Failed to send data point")
242
-
243
- await asyncio.sleep(0.1) # 10Hz
244
-
245
- except KeyboardInterrupt:
246
- print("🛑 Stopping device loop...")
247
- finally:
248
- await sender.close()
249
-
250
-
251
- async def run_gateway():
252
- """Run the gateway service"""
253
- # Initialize InfluxDB client
254
- influx_client = InfluxDBClient3(host="your-influxdb-host", token="your-token", database="your-database")
255
-
256
- gateway = AsyncInfluxGateway(influx_client=influx_client, batch_size=250, flush_interval=1.0)
257
-
258
- await gateway.start()
259
-
260
-
261
- # Main execution
262
- if __name__ == "__main__":
263
- try:
264
- # Run gateway
265
- asyncio.run(run_gateway())
266
-
267
- # Or run device simulator
268
- # asyncio.run(device_data_loop())
269
-
270
- except KeyboardInterrupt:
271
- print("👋 Goodbye!")
egse/notifyhub/test.py DELETED
@@ -1,303 +0,0 @@
1
- import asyncio
2
- import logging
3
- import random
4
- import time
5
- import uuid
6
-
7
- import pytest
8
-
9
- from .services import ServiceMessaging
10
-
11
-
12
- class UserService:
13
- def __init__(self):
14
- # Compose with messaging capability
15
- self.messaging = ServiceMessaging("user-service")
16
- self.users = {} # Simple in-memory store
17
- self.logger = logging.getLogger("egse.user-service")
18
-
19
- async def create_user(self, email: str, name: str) -> str:
20
- """Create a user and publish event"""
21
- user_id = str(uuid.uuid4())
22
-
23
- # Business logic
24
- user_data = {"id": user_id, "email": email, "name": name, "created_at": time.time()}
25
- self.users[user_id] = user_data
26
-
27
- self.logger.info(f"Created user: {name} ({email})")
28
-
29
- # Publish event
30
- await self.messaging.publish_event("user_created", {"user_id": user_id, "email": email, "name": name})
31
-
32
- return user_id
33
-
34
- async def update_user(self, user_id: str, updates: dict):
35
- """Update user and publish event"""
36
- if user_id not in self.users:
37
- raise ValueError("User not found")
38
-
39
- # Business logic
40
- self.users[user_id].update(updates)
41
-
42
- # Publish event
43
- await self.messaging.publish_event("user_updated", {"user_id": user_id, "updates": updates})
44
-
45
- async def close(self):
46
- await self.messaging.disconnect()
47
-
48
-
49
- class EmailService:
50
- def __init__(self):
51
- # Compose with messaging capability - subscribe to events we care about
52
- self.messaging = ServiceMessaging(
53
- "email-service", subscriptions=["user_created", "user_updated", "order_placed"]
54
- )
55
-
56
- # Register handlers
57
- self.messaging.register_handler("user_created", self.handle_user_created)
58
- self.messaging.register_handler("user_updated", self.handle_user_updated)
59
- self.messaging.register_handler("order_placed", self.handle_order_placed)
60
-
61
- self.logger = logging.getLogger("egse.email-service")
62
-
63
- async def handle_user_created(self, event_data: dict):
64
- """Handle new user creation"""
65
- user_data = event_data["data"]
66
- email = user_data["email"]
67
- name = user_data["name"]
68
-
69
- self.logger.info(f"Sending welcome email to {name}")
70
-
71
- # Simulate email sending
72
- await self._send_email(to=email, subject="Welcome!", template="welcome", data={"name": name})
73
-
74
- async def handle_user_updated(self, event_data: dict):
75
- """Handle user updates"""
76
- user_data = event_data["data"]
77
- user_id = user_data["user_id"]
78
-
79
- self.logger.info(f"User {user_id} was updated")
80
- # Could send update notification email
81
-
82
- async def handle_order_placed(self, event_data: dict):
83
- """Handle order placement"""
84
- order_data = event_data["data"]
85
- customer_email = order_data["customer_email"]
86
- order_id = order_data["order_id"]
87
-
88
- await self._send_email(
89
- to=customer_email, subject="Order Confirmation", template="order_confirmation", data={"order_id": order_id}
90
- )
91
-
92
- async def _send_email(self, to: str, subject: str, template: str, data: dict):
93
- """Simulate sending email"""
94
- await asyncio.sleep(0.1) # Simulate network delay
95
- self.logger.info(f"✉️ Sent '{subject}' to {to}")
96
-
97
- async def start(self):
98
- """Start listening for events"""
99
- await self.messaging.start_listening()
100
-
101
- async def close(self):
102
- await self.messaging.disconnect()
103
-
104
-
105
- # Order Service - both publishes and subscribes
106
- class OrderService:
107
- def __init__(self):
108
- # This service both publishes and subscribes
109
- self.messaging = ServiceMessaging(
110
- "order-service",
111
- subscriptions=["user_created"], # Maybe we want to track new users
112
- )
113
-
114
- # Register handlers
115
- self.messaging.register_handler("user_created", self.handle_new_user)
116
-
117
- self.orders = {}
118
- self.user_orders = {} # Track orders per user
119
- self.logger = logging.getLogger("egse.order-service")
120
-
121
- async def handle_new_user(self, event_data: dict):
122
- """Handle new user - could send promo code"""
123
- user_data = event_data["data"]
124
- user_id = user_data["user_id"]
125
-
126
- self.user_orders[user_id] = []
127
- self.logger.info(f"Tracking new user: {user_id}")
128
-
129
- async def place_order(self, user_id: str, items: list[dict], customer_email: str):
130
- """Place an order and publish event"""
131
- order_id = str(uuid.uuid4())
132
-
133
- # Business logic
134
- order_data = {
135
- "id": order_id,
136
- "user_id": user_id,
137
- "items": items,
138
- "customer_email": customer_email,
139
- "total": sum(item["price"] * item["quantity"] for item in items),
140
- "created_at": time.time(),
141
- }
142
-
143
- self.orders[order_id] = order_data
144
-
145
- if user_id in self.user_orders:
146
- self.user_orders[user_id].append(order_id)
147
-
148
- self.logger.info(f"Order placed: {order_id} for user {user_id}")
149
-
150
- # Publish event
151
- await self.messaging.publish_event(
152
- "order_placed",
153
- {"order_id": order_id, "user_id": user_id, "customer_email": customer_email, "total": order_data["total"]},
154
- )
155
-
156
- return order_id
157
-
158
- async def start(self):
159
- """Start listening for events"""
160
- await self.messaging.start_listening()
161
-
162
- async def close(self):
163
- await self.messaging.disconnect()
164
-
165
-
166
- # Analytics Service - subscribes to multiple events
167
- class AnalyticsService:
168
- def __init__(self):
169
- self.messaging = ServiceMessaging(
170
- "analytics-service", subscriptions=["user_created", "order_placed", "user_updated"]
171
- )
172
-
173
- # Register handlers
174
- self.messaging.register_handler("user_created", self.track_user_signup)
175
- self.messaging.register_handler("order_placed", self.track_order)
176
- self.messaging.register_handler("user_updated", self.track_user_update)
177
-
178
- self.metrics = {"users_created": 0, "orders_placed": 0, "total_revenue": 0.0}
179
- self.logger = logging.getLogger("egse.analytics-service")
180
-
181
- async def track_user_signup(self, event_data: dict):
182
- """Track user signup"""
183
- self.metrics["users_created"] += 1
184
- self.logger.info(f"📊 Users created: {self.metrics['users_created']}")
185
-
186
- async def track_order(self, event_data: dict):
187
- """Track order placement"""
188
- order_data = event_data["data"]
189
- total = order_data["total"]
190
-
191
- self.metrics["orders_placed"] += 1
192
- self.metrics["total_revenue"] += total
193
-
194
- self.logger.info(f"📊 Orders: {self.metrics['orders_placed']}, Revenue: ${self.metrics['total_revenue']:.2f}")
195
-
196
- async def track_user_update(self, event_data: dict):
197
- """Track user updates"""
198
- self.logger.info("📊 User update tracked")
199
-
200
- async def start(self):
201
- await self.messaging.start_listening()
202
-
203
- async def close(self):
204
- await self.messaging.disconnect()
205
-
206
-
207
- async def run_microservices_demo():
208
- """Run the complete microservices system"""
209
-
210
- # Create services
211
- user_service = UserService()
212
- email_service = EmailService()
213
- order_service = OrderService()
214
- analytics_service = AnalyticsService()
215
-
216
- # Start subscriber services
217
- tasks = [
218
- asyncio.create_task(email_service.start()),
219
- asyncio.create_task(order_service.start()),
220
- asyncio.create_task(analytics_service.start()),
221
- ]
222
-
223
- # Demo scenario
224
- async def demo_workflow():
225
- await asyncio.sleep(1) # Let services connect
226
-
227
- # Create some users
228
- user1_id = await user_service.create_user("alice@example.com", "Alice Smith")
229
- user2_id = await user_service.create_user("bob@example.com", "Bob Jones")
230
-
231
- await asyncio.sleep(0.5)
232
-
233
- # Place some orders
234
- await order_service.place_order(
235
- user1_id, [{"name": "Widget", "price": 29.99, "quantity": 2}], "alice@example.com"
236
- )
237
-
238
- await order_service.place_order(
239
- user2_id, [{"name": "Gadget", "price": 19.99, "quantity": 1}], "bob@example.com"
240
- )
241
-
242
- await asyncio.sleep(0.5)
243
-
244
- # Update a user
245
- await user_service.update_user(user1_id, {"name": "Alice Johnson"})
246
-
247
- await asyncio.sleep(2)
248
-
249
- # Stress test, send hundreds of notifications
250
-
251
- for x in range(100):
252
- # Place some orders
253
- await order_service.place_order(
254
- user1_id,
255
- [{"name": "Widget", "price": 29.99 + x * 3.14, "quantity": random.choice([1, 2, 3, 4])}],
256
- "alice@example.com",
257
- )
258
-
259
- await order_service.place_order(
260
- user2_id,
261
- [{"name": "Gadget", "price": 19.99 + x * 1.23, "quantity": random.choice([1, 2, 3, 4])}],
262
- "bob@example.com",
263
- )
264
-
265
- # Cleanup
266
- await user_service.close()
267
- await email_service.close()
268
- await order_service.close()
269
- await analytics_service.close()
270
-
271
- async def _cleanup_running_tasks():
272
- # Cancel all running tasks
273
- for task in tasks:
274
- if not task.done():
275
- print(f"Cancelling task {task.get_name()}.")
276
- task.cancel()
277
-
278
- # Wait for tasks to complete their cancellation
279
- if tasks:
280
- try:
281
- await asyncio.gather(*tasks, return_exceptions=True)
282
- except asyncio.CancelledError as exc:
283
- print(f"Caught {type(exc).__name__}: {exc}.")
284
- pass
285
-
286
- tasks.append(asyncio.create_task(demo_workflow()))
287
-
288
- try:
289
- await asyncio.gather(*tasks)
290
- except KeyboardInterrupt:
291
- print("Shutting down...")
292
-
293
- await _cleanup_running_tasks()
294
-
295
-
296
- @pytest.mark.asyncio
297
- def test_notify_hub(): ...
298
-
299
-
300
- if __name__ == "__main__":
301
- from egse.logger import setup_logging
302
-
303
- asyncio.run(run_microservices_demo())