nebu 0.1.19__py3-none-any.whl → 0.1.21__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nebu/processors/consumer.py +82 -18
- {nebu-0.1.19.dist-info → nebu-0.1.21.dist-info}/METADATA +1 -1
- {nebu-0.1.19.dist-info → nebu-0.1.21.dist-info}/RECORD +6 -6
- {nebu-0.1.19.dist-info → nebu-0.1.21.dist-info}/WHEEL +0 -0
- {nebu-0.1.19.dist-info → nebu-0.1.21.dist-info}/licenses/LICENSE +0 -0
- {nebu-0.1.19.dist-info → nebu-0.1.21.dist-info}/top_level.txt +0 -0
nebu/processors/consumer.py
CHANGED
@@ -10,6 +10,7 @@ from typing import Dict, TypeVar
|
|
10
10
|
|
11
11
|
import redis
|
12
12
|
import socks
|
13
|
+
from redis import ConnectionError, ResponseError
|
13
14
|
|
14
15
|
# Define TypeVar for generic models
|
15
16
|
T = TypeVar("T")
|
@@ -245,9 +246,12 @@ except Exception as e:
|
|
245
246
|
|
246
247
|
# Create consumer group if it doesn't exist
|
247
248
|
try:
|
249
|
+
# Assert types before use
|
250
|
+
assert isinstance(REDIS_STREAM, str)
|
251
|
+
assert isinstance(REDIS_CONSUMER_GROUP, str)
|
248
252
|
r.xgroup_create(REDIS_STREAM, REDIS_CONSUMER_GROUP, id="0", mkstream=True)
|
249
253
|
print(f"Created consumer group {REDIS_CONSUMER_GROUP} for stream {REDIS_STREAM}")
|
250
|
-
except
|
254
|
+
except ResponseError as e:
|
251
255
|
if "BUSYGROUP" in str(e):
|
252
256
|
print(f"Consumer group {REDIS_CONSUMER_GROUP} already exists")
|
253
257
|
else:
|
@@ -261,26 +265,38 @@ def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None
|
|
261
265
|
return_stream = None
|
262
266
|
user_id = None
|
263
267
|
|
268
|
+
print(f"Message data inner: {message_data}")
|
269
|
+
|
264
270
|
try:
|
265
|
-
#
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
271
|
+
# Extract the JSON string payload from the b'data' field
|
272
|
+
# Redis keys/fields might be bytes even with decode_responses=True
|
273
|
+
payload_str = message_data.get(b"data")
|
274
|
+
|
275
|
+
# decode_responses=True should handle decoding, but Redis can be tricky.
|
276
|
+
# If errors persist, we might need to re-add explicit decode checks.
|
277
|
+
if not payload_str or not isinstance(payload_str, str):
|
278
|
+
# Add a more specific check if needed later based on runtime errors
|
279
|
+
raise ValueError(
|
280
|
+
f"Missing or invalid 'data' field (expected string): {message_data}"
|
281
|
+
)
|
270
282
|
|
283
|
+
# Parse the JSON string into a dictionary
|
271
284
|
try:
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
285
|
+
raw_payload = json.loads(payload_str)
|
286
|
+
except json.JSONDecodeError as json_err:
|
287
|
+
raise ValueError(f"Failed to parse JSON payload: {json_err}") from json_err
|
288
|
+
|
289
|
+
# Validate that raw_payload is a dictionary as expected
|
290
|
+
if not isinstance(raw_payload, dict):
|
291
|
+
raise TypeError(
|
292
|
+
f"Expected parsed payload to be a dictionary, but got {type(raw_payload)}"
|
293
|
+
)
|
278
294
|
|
279
295
|
print(f"Raw payload: {raw_payload}")
|
280
296
|
|
281
297
|
# Extract fields from the parsed payload
|
282
298
|
# These fields are extracted for completeness and potential future use
|
283
|
-
|
299
|
+
kind = raw_payload.get("kind", "") # kind
|
284
300
|
msg_id = raw_payload.get("id", "") # msg_id
|
285
301
|
content_raw = raw_payload.get("content", {})
|
286
302
|
created_at = raw_payload.get("created_at", 0) # created_at
|
@@ -290,6 +306,31 @@ def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None
|
|
290
306
|
handle = raw_payload.get("handle") # handle
|
291
307
|
adapter = raw_payload.get("adapter") # adapter
|
292
308
|
|
309
|
+
# --- Health Check Logic based on kind ---
|
310
|
+
if kind == "HealthCheck":
|
311
|
+
print(f"Received HealthCheck message {message_id.decode('utf-8')}")
|
312
|
+
health_response = {
|
313
|
+
"kind": "StreamResponseMessage", # Respond with a standard message kind
|
314
|
+
"id": message_id.decode("utf-8"),
|
315
|
+
"content": {"status": "healthy", "checked_message_id": msg_id},
|
316
|
+
"status": "success",
|
317
|
+
"created_at": datetime.now().isoformat(),
|
318
|
+
"user_id": user_id, # Include user_id if available
|
319
|
+
}
|
320
|
+
if return_stream:
|
321
|
+
# Assert type again closer to usage for type checker clarity
|
322
|
+
assert isinstance(return_stream, str)
|
323
|
+
r.xadd(return_stream, {"data": json.dumps(health_response)})
|
324
|
+
print(f"Sent health check response to {return_stream}")
|
325
|
+
|
326
|
+
# Assert types again closer to usage for type checker clarity
|
327
|
+
assert isinstance(REDIS_STREAM, str)
|
328
|
+
assert isinstance(REDIS_CONSUMER_GROUP, str)
|
329
|
+
r.xack(REDIS_STREAM, REDIS_CONSUMER_GROUP, message_id)
|
330
|
+
print(f"Acknowledged HealthCheck message {message_id.decode('utf-8')}")
|
331
|
+
return # Exit early for health checks
|
332
|
+
# --- End Health Check Logic ---
|
333
|
+
|
293
334
|
# Parse the content field if it's a string
|
294
335
|
if isinstance(content_raw, str):
|
295
336
|
try:
|
@@ -310,7 +351,7 @@ def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None
|
|
310
351
|
content_model = local_namespace[content_type_name](**content)
|
311
352
|
print(f"Content model: {content_model}")
|
312
353
|
input_obj = local_namespace["V1StreamMessage"](
|
313
|
-
kind=
|
354
|
+
kind=kind,
|
314
355
|
id=msg_id,
|
315
356
|
content=content_model,
|
316
357
|
created_at=created_at,
|
@@ -324,7 +365,7 @@ def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None
|
|
324
365
|
print(f"Error creating content type model: {e}")
|
325
366
|
# Fallback to using raw content
|
326
367
|
input_obj = local_namespace["V1StreamMessage"](
|
327
|
-
kind=
|
368
|
+
kind=kind,
|
328
369
|
id=msg_id,
|
329
370
|
content=content,
|
330
371
|
created_at=created_at,
|
@@ -338,7 +379,7 @@ def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None
|
|
338
379
|
# Just use the raw content
|
339
380
|
print(f"Using raw content")
|
340
381
|
input_obj = local_namespace["V1StreamMessage"](
|
341
|
-
kind=
|
382
|
+
kind=kind,
|
342
383
|
id=msg_id,
|
343
384
|
content=content,
|
344
385
|
created_at=created_at,
|
@@ -383,12 +424,17 @@ def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None
|
|
383
424
|
|
384
425
|
# Send the result to the return stream
|
385
426
|
if return_stream:
|
427
|
+
# Assert type again closer to usage for type checker clarity
|
428
|
+
assert isinstance(return_stream, str)
|
386
429
|
r.xadd(return_stream, {"data": json.dumps(response)})
|
387
430
|
print(
|
388
431
|
f"Processed message {message_id.decode('utf-8')}, result sent to {return_stream}"
|
389
432
|
)
|
390
433
|
|
391
434
|
# Acknowledge the message
|
435
|
+
# Assert types again closer to usage for type checker clarity
|
436
|
+
assert isinstance(REDIS_STREAM, str)
|
437
|
+
assert isinstance(REDIS_CONSUMER_GROUP, str)
|
392
438
|
r.xack(REDIS_STREAM, REDIS_CONSUMER_GROUP, message_id)
|
393
439
|
|
394
440
|
except Exception as e:
|
@@ -410,11 +456,18 @@ def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None
|
|
410
456
|
|
411
457
|
# Send the error to the return stream
|
412
458
|
if return_stream:
|
459
|
+
# Assert type again closer to usage for type checker clarity
|
460
|
+
assert isinstance(return_stream, str)
|
413
461
|
r.xadd(return_stream, {"data": json.dumps(error_response)})
|
414
462
|
else:
|
463
|
+
# Assert type again closer to usage for type checker clarity
|
464
|
+
assert isinstance(REDIS_STREAM, str)
|
415
465
|
r.xadd(f"{REDIS_STREAM}.errors", {"data": json.dumps(error_response)})
|
416
466
|
|
417
467
|
# Still acknowledge the message so we don't reprocess it
|
468
|
+
# Assert types again closer to usage for type checker clarity
|
469
|
+
assert isinstance(REDIS_STREAM, str)
|
470
|
+
assert isinstance(REDIS_CONSUMER_GROUP, str)
|
418
471
|
r.xack(REDIS_STREAM, REDIS_CONSUMER_GROUP, message_id)
|
419
472
|
|
420
473
|
|
@@ -424,9 +477,14 @@ consumer_name = f"consumer-{os.getpid()}"
|
|
424
477
|
|
425
478
|
while True:
|
426
479
|
try:
|
480
|
+
# Assert types just before use in the loop
|
481
|
+
assert isinstance(REDIS_STREAM, str)
|
482
|
+
assert isinstance(REDIS_CONSUMER_GROUP, str)
|
483
|
+
|
427
484
|
# Read from stream with blocking
|
428
485
|
streams = {REDIS_STREAM: ">"} # '>' means read only new messages
|
429
|
-
|
486
|
+
# The type checker still struggles here, but the runtime types are asserted.
|
487
|
+
messages = r.xreadgroup( # type: ignore[arg-type]
|
430
488
|
REDIS_CONSUMER_GROUP, consumer_name, streams, count=1, block=5000
|
431
489
|
)
|
432
490
|
|
@@ -434,6 +492,12 @@ while True:
|
|
434
492
|
# No messages received, continue waiting
|
435
493
|
continue
|
436
494
|
|
495
|
+
# Assert that messages is a list (expected synchronous return type)
|
496
|
+
assert isinstance(
|
497
|
+
messages, list
|
498
|
+
), f"Expected list from xreadgroup, got {type(messages)}"
|
499
|
+
assert len(messages) > 0 # Ensure the list is not empty before indexing
|
500
|
+
|
437
501
|
stream_name, stream_messages = messages[0]
|
438
502
|
|
439
503
|
for message_id, message_data in stream_messages:
|
@@ -441,7 +505,7 @@ while True:
|
|
441
505
|
print(f"Message data: {message_data}")
|
442
506
|
process_message(message_id, message_data)
|
443
507
|
|
444
|
-
except
|
508
|
+
except ConnectionError as e:
|
445
509
|
print(f"Redis connection error: {e}")
|
446
510
|
time.sleep(5) # Wait before retrying
|
447
511
|
|
@@ -6,15 +6,15 @@ nebu/containers/container.py,sha256=yb7KaPTVXnEEAlrpdlUi4HNqF6P7z9bmwAILGlq6iqU,
|
|
6
6
|
nebu/containers/decorator.py,sha256=uFtzlAXRHYZECJ-NPusY7oN9GXvdHrHDd_JNrIGr8aQ,3244
|
7
7
|
nebu/containers/models.py,sha256=0j6NGy4yto-enRDh_4JH_ZTbHrLdSpuMOqNQPnIrwC4,6815
|
8
8
|
nebu/containers/server.py,sha256=yFa2Y9PzBn59E1HftKiv0iapPonli2rbGAiU6r-wwe0,2513
|
9
|
-
nebu/processors/consumer.py,sha256=
|
9
|
+
nebu/processors/consumer.py,sha256=47ssdvJU7CGur0JoehNrmNGKwAAzVDVp7UTnphApQlI,20080
|
10
10
|
nebu/processors/decorate.py,sha256=AeG1c1n8JtcexxAEf2sF2L2eKwVDaNQ5gvPs6EpazKo,34789
|
11
11
|
nebu/processors/default.py,sha256=W4slJenG59rvyTlJ7gRp58eFfXcNOTT2Hfi6zzJAobI,365
|
12
12
|
nebu/processors/models.py,sha256=GvnI8UJrQSjHo2snP07cPfisCH90cEGTY-PZV5_AtXI,3654
|
13
13
|
nebu/processors/processor.py,sha256=oy2YdI-cy6qQWxrZhpZahJV46oWZlu_Im-jm811R_oo,9667
|
14
14
|
nebu/redis/models.py,sha256=coPovAcVXnOU1Xh_fpJL4PO3QctgK9nBe5QYoqEcnxg,1230
|
15
15
|
nebu/services/service.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
16
|
-
nebu-0.1.
|
17
|
-
nebu-0.1.
|
18
|
-
nebu-0.1.
|
19
|
-
nebu-0.1.
|
20
|
-
nebu-0.1.
|
16
|
+
nebu-0.1.21.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
17
|
+
nebu-0.1.21.dist-info/METADATA,sha256=SmlvWXJgnKLKph_8UCEtgVtLz80EuKrcxRNm3hdEl_8,1678
|
18
|
+
nebu-0.1.21.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
19
|
+
nebu-0.1.21.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
|
20
|
+
nebu-0.1.21.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|