nebu 0.1.18__py3-none-any.whl → 0.1.20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,10 +6,11 @@ import sys
6
6
  import time
7
7
  import traceback
8
8
  from datetime import datetime
9
- from typing import Dict, TypeVar
9
+ from typing import Any, Dict, TypeVar, cast
10
10
 
11
11
  import redis
12
12
  import socks
13
+ from redis import ConnectionError, ResponseError
13
14
 
14
15
  # Define TypeVar for generic models
15
16
  T = TypeVar("T")
@@ -247,7 +248,7 @@ except Exception as e:
247
248
  try:
248
249
  r.xgroup_create(REDIS_STREAM, REDIS_CONSUMER_GROUP, id="0", mkstream=True)
249
250
  print(f"Created consumer group {REDIS_CONSUMER_GROUP} for stream {REDIS_STREAM}")
250
- except redis.exceptions.ResponseError as e:
251
+ except ResponseError as e:
251
252
  if "BUSYGROUP" in str(e):
252
253
  print(f"Consumer group {REDIS_CONSUMER_GROUP} already exists")
253
254
  else:
@@ -262,17 +263,19 @@ def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None
262
263
  user_id = None
263
264
 
264
265
  try:
265
- # Get the message content from field 'data'
266
- if b"data" not in message_data:
267
- print(f"Message {message_id} has no 'data' field")
268
- return
266
+ # Assign message_data directly to raw_payload.
267
+ # Cast to Dict[str, Any] to inform type checker of the expected type due to decode_responses=True.
268
+ raw_payload = cast(Dict[str, Any], message_data)
269
269
 
270
- # Parse the message data
271
- raw_payload = json.loads(message_data[b"data"].decode("utf-8"))
270
+ # Validate that raw_payload is a dictionary as expected - Removed, cast handles this for type checker
271
+ # if not isinstance(raw_payload, dict):
272
+ # raise TypeError(f"Expected message_data to be a dictionary, but got {type(raw_payload)}")
272
273
 
273
- # Extract fields from the Rust structure
274
+ print(f"Raw payload: {raw_payload}")
275
+
276
+ # Extract fields from the parsed payload
274
277
  # These fields are extracted for completeness and potential future use
275
- _ = raw_payload.get("kind", "") # kind
278
+ kind = raw_payload.get("kind", "") # kind
276
279
  msg_id = raw_payload.get("id", "") # msg_id
277
280
  content_raw = raw_payload.get("content", {})
278
281
  created_at = raw_payload.get("created_at", 0) # created_at
@@ -282,6 +285,31 @@ def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None
282
285
  handle = raw_payload.get("handle") # handle
283
286
  adapter = raw_payload.get("adapter") # adapter
284
287
 
288
+ # --- Health Check Logic based on kind ---
289
+ if kind == "HealthCheck":
290
+ print(f"Received HealthCheck message {message_id.decode('utf-8')}")
291
+ health_response = {
292
+ "kind": "StreamResponseMessage", # Respond with a standard message kind
293
+ "id": message_id.decode("utf-8"),
294
+ "content": {"status": "healthy", "checked_message_id": msg_id},
295
+ "status": "success",
296
+ "created_at": datetime.now().isoformat(),
297
+ "user_id": user_id, # Include user_id if available
298
+ }
299
+ if return_stream:
300
+ # Assert type again closer to usage for type checker clarity
301
+ assert isinstance(return_stream, str)
302
+ r.xadd(return_stream, {"data": json.dumps(health_response)})
303
+ print(f"Sent health check response to {return_stream}")
304
+
305
+ # Assert types again closer to usage for type checker clarity
306
+ assert isinstance(REDIS_STREAM, str)
307
+ assert isinstance(REDIS_CONSUMER_GROUP, str)
308
+ r.xack(REDIS_STREAM, REDIS_CONSUMER_GROUP, message_id)
309
+ print(f"Acknowledged HealthCheck message {message_id.decode('utf-8')}")
310
+ return # Exit early for health checks
311
+ # --- End Health Check Logic ---
312
+
285
313
  # Parse the content field if it's a string
286
314
  if isinstance(content_raw, str):
287
315
  try:
@@ -291,6 +319,8 @@ def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None
291
319
  else:
292
320
  content = content_raw
293
321
 
322
+ print(f"Content: {content}")
323
+
294
324
  # For StreamMessage, construct the proper input object
295
325
  if is_stream_message and "V1StreamMessage" in local_namespace:
296
326
  # If we have a content type, try to construct it
@@ -298,8 +328,9 @@ def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None
298
328
  # Try to create the content type model first
299
329
  try:
300
330
  content_model = local_namespace[content_type_name](**content)
331
+ print(f"Content model: {content_model}")
301
332
  input_obj = local_namespace["V1StreamMessage"](
302
- kind=_,
333
+ kind=kind,
303
334
  id=msg_id,
304
335
  content=content_model,
305
336
  created_at=created_at,
@@ -313,7 +344,7 @@ def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None
313
344
  print(f"Error creating content type model: {e}")
314
345
  # Fallback to using raw content
315
346
  input_obj = local_namespace["V1StreamMessage"](
316
- kind=_,
347
+ kind=kind,
317
348
  id=msg_id,
318
349
  content=content,
319
350
  created_at=created_at,
@@ -325,8 +356,9 @@ def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None
325
356
  )
326
357
  else:
327
358
  # Just use the raw content
359
+ print(f"Using raw content")
328
360
  input_obj = local_namespace["V1StreamMessage"](
329
- kind=_,
361
+ kind=kind,
330
362
  id=msg_id,
331
363
  content=content,
332
364
  created_at=created_at,
@@ -348,9 +380,11 @@ def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None
348
380
  print(f"Error creating input model: {e}, using raw content")
349
381
  input_obj = content
350
382
 
383
+ print(f"Input object: {input_obj}")
384
+
351
385
  # Execute the function
352
386
  result = target_function(input_obj)
353
-
387
+ print(f"Result: {result}")
354
388
  # If the result is a Pydantic model, convert to dict
355
389
  if hasattr(result, "model_dump"):
356
390
  result = result.model_dump()
@@ -365,14 +399,21 @@ def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None
365
399
  "user_id": user_id,
366
400
  }
367
401
 
402
+ print(f"Response: {response}")
403
+
368
404
  # Send the result to the return stream
369
405
  if return_stream:
406
+ # Assert type again closer to usage for type checker clarity
407
+ assert isinstance(return_stream, str)
370
408
  r.xadd(return_stream, {"data": json.dumps(response)})
371
409
  print(
372
410
  f"Processed message {message_id.decode('utf-8')}, result sent to {return_stream}"
373
411
  )
374
412
 
375
413
  # Acknowledge the message
414
+ # Assert types again closer to usage for type checker clarity
415
+ assert isinstance(REDIS_STREAM, str)
416
+ assert isinstance(REDIS_CONSUMER_GROUP, str)
376
417
  r.xack(REDIS_STREAM, REDIS_CONSUMER_GROUP, message_id)
377
418
 
378
419
  except Exception as e:
@@ -394,11 +435,18 @@ def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None
394
435
 
395
436
  # Send the error to the return stream
396
437
  if return_stream:
438
+ # Assert type again closer to usage for type checker clarity
439
+ assert isinstance(return_stream, str)
397
440
  r.xadd(return_stream, {"data": json.dumps(error_response)})
398
441
  else:
442
+ # Assert type again closer to usage for type checker clarity
443
+ assert isinstance(REDIS_STREAM, str)
399
444
  r.xadd(f"{REDIS_STREAM}.errors", {"data": json.dumps(error_response)})
400
445
 
401
446
  # Still acknowledge the message so we don't reprocess it
447
+ # Assert types again closer to usage for type checker clarity
448
+ assert isinstance(REDIS_STREAM, str)
449
+ assert isinstance(REDIS_CONSUMER_GROUP, str)
402
450
  r.xack(REDIS_STREAM, REDIS_CONSUMER_GROUP, message_id)
403
451
 
404
452
 
@@ -408,9 +456,13 @@ consumer_name = f"consumer-{os.getpid()}"
408
456
 
409
457
  while True:
410
458
  try:
459
+ # Assert types just before use in the loop
460
+ assert isinstance(REDIS_STREAM, str)
461
+ assert isinstance(REDIS_CONSUMER_GROUP, str)
462
+
411
463
  # Read from stream with blocking
412
464
  streams = {REDIS_STREAM: ">"} # '>' means read only new messages
413
- messages = r.xreadgroup(
465
+ messages = r.xreadgroup( # type: ignore[arg-type]
414
466
  REDIS_CONSUMER_GROUP, consumer_name, streams, count=1, block=5000
415
467
  )
416
468
 
@@ -418,12 +470,20 @@ while True:
418
470
  # No messages received, continue waiting
419
471
  continue
420
472
 
473
+ # Assert that messages is a list (expected synchronous return type)
474
+ assert isinstance(
475
+ messages, list
476
+ ), f"Expected list from xreadgroup, got {type(messages)}"
477
+ assert len(messages) > 0 # Ensure the list is not empty before indexing
478
+
421
479
  stream_name, stream_messages = messages[0]
422
480
 
423
481
  for message_id, message_data in stream_messages:
482
+ print(f"Processing message {message_id}")
483
+ print(f"Message data: {message_data}")
424
484
  process_message(message_id, message_data)
425
485
 
426
- except redis.exceptions.ConnectionError as e:
486
+ except ConnectionError as e:
427
487
  print(f"Redis connection error: {e}")
428
488
  time.sleep(5) # Wait before retrying
429
489
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nebu
3
- Version: 0.1.18
3
+ Version: 0.1.20
4
4
  Summary: A globally distributed container runtime
5
5
  Requires-Python: >=3.10.14
6
6
  Description-Content-Type: text/markdown
@@ -6,15 +6,15 @@ nebu/containers/container.py,sha256=yb7KaPTVXnEEAlrpdlUi4HNqF6P7z9bmwAILGlq6iqU,
6
6
  nebu/containers/decorator.py,sha256=uFtzlAXRHYZECJ-NPusY7oN9GXvdHrHDd_JNrIGr8aQ,3244
7
7
  nebu/containers/models.py,sha256=0j6NGy4yto-enRDh_4JH_ZTbHrLdSpuMOqNQPnIrwC4,6815
8
8
  nebu/containers/server.py,sha256=yFa2Y9PzBn59E1HftKiv0iapPonli2rbGAiU6r-wwe0,2513
9
- nebu/processors/consumer.py,sha256=0HJxRLoeRdN4xY6bjIxqr5bD5JpFSyKb5s-eS5oTy9s,16063
9
+ nebu/processors/consumer.py,sha256=kX4UT8Z5c_LWU3MOcM5qXSzHEA0CYjUZ4dRJfhUKP-M,19202
10
10
  nebu/processors/decorate.py,sha256=AeG1c1n8JtcexxAEf2sF2L2eKwVDaNQ5gvPs6EpazKo,34789
11
11
  nebu/processors/default.py,sha256=W4slJenG59rvyTlJ7gRp58eFfXcNOTT2Hfi6zzJAobI,365
12
12
  nebu/processors/models.py,sha256=GvnI8UJrQSjHo2snP07cPfisCH90cEGTY-PZV5_AtXI,3654
13
13
  nebu/processors/processor.py,sha256=oy2YdI-cy6qQWxrZhpZahJV46oWZlu_Im-jm811R_oo,9667
14
14
  nebu/redis/models.py,sha256=coPovAcVXnOU1Xh_fpJL4PO3QctgK9nBe5QYoqEcnxg,1230
15
15
  nebu/services/service.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
- nebu-0.1.18.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
17
- nebu-0.1.18.dist-info/METADATA,sha256=wF7klvNGu6yoVp726tHAmn3Ouo4Kt2XhuceGw7zXGzM,1678
18
- nebu-0.1.18.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
19
- nebu-0.1.18.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
20
- nebu-0.1.18.dist-info/RECORD,,
16
+ nebu-0.1.20.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
17
+ nebu-0.1.20.dist-info/METADATA,sha256=mmrQRfaJ6jXFMa02a7rp4ghX1wKW8m3TLHeBCsmk64g,1678
18
+ nebu-0.1.20.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
19
+ nebu-0.1.20.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
20
+ nebu-0.1.20.dist-info/RECORD,,
File without changes