nebu 0.1.82__py3-none-any.whl → 0.1.84__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -783,27 +783,53 @@ def process_message(message_id: str, message_data: Dict[str, str]) -> None:
783
783
  # Execute the function
784
784
  print("Executing function...")
785
785
  result = target_function(input_obj)
786
- print(f"Result: {result}") # Reduce verbosity
786
+ # print(f"Raw Result: {result}") # Debugging
787
787
 
788
- # Convert result to dict if it's a Pydantic model
789
- if hasattr(result, "model_dump"): # Use model_dump for Pydantic v2+
790
- result_content = result.model_dump(mode="json") # Serialize properly
791
- elif hasattr(result, "dict"): # Fallback for older Pydantic
792
- result_content = result.dict()
793
- else:
794
- result_content = result # Assume JSON-serializable
788
+ result_content = None # Default to None
789
+ if result is not None: # Only process if there's a result
790
+ try:
791
+ if hasattr(result, "model_dump"):
792
+ print("[Consumer] Result has model_dump, using it.")
793
+ # Use 'json' mode to ensure serializability where possible
794
+ result_content = result.model_dump(mode="json")
795
+ # print(f"[Consumer] Result after model_dump: {result_content}") # Debugging
796
+ else:
797
+ # Try standard json.dumps as a fallback to check serializability
798
+ print(
799
+ "[Consumer] Result has no model_dump, attempting json.dumps check."
800
+ )
801
+ try:
802
+ # Test if it's serializable
803
+ json.dumps(result)
804
+ # If the above line doesn't raise TypeError, assign the original result
805
+ result_content = result
806
+ # print(f"[Consumer] Result assigned directly after json.dumps check passed: {result_content}") # Debugging
807
+ except TypeError as e:
808
+ print(
809
+ f"[Consumer] Warning: Result is not JSON serializable: {e}. Discarding result."
810
+ )
811
+ result_content = None # Explicitly set to None on failure
812
+
813
+ except (
814
+ Exception
815
+ ) as e: # Catch other potential model_dump errors or unexpected issues
816
+ print(
817
+ f"[Consumer] Warning: Unexpected error during result processing/serialization: {e}. Discarding result."
818
+ )
819
+ traceback.print_exc()
820
+ result_content = None
795
821
 
796
- # Prepare the response
822
+ # Prepare the response (ensure 'content' key exists even if None)
797
823
  response = {
798
824
  "kind": "StreamResponseMessage",
799
825
  "id": message_id,
800
- "content": result_content,
826
+ "content": result_content, # Use the potentially None result_content
801
827
  "status": "success",
802
- "created_at": datetime.now().isoformat(),
828
+ "created_at": datetime.now(timezone.utc).isoformat(), # Use UTC
803
829
  "user_id": user_id, # Pass user_id back
804
830
  }
805
831
 
806
- # print(f"Response: {response}") # Reduce verbosity
832
+ # print(f"Final Response Content: {response['content']}") # Debugging
807
833
 
808
834
  # Send the result to the return stream
809
835
  if return_stream:
@@ -890,7 +916,13 @@ CLAIM_COUNT = 10 # Max messages to claim at once
890
916
 
891
917
  try:
892
918
  while True:
919
+ print(
920
+ f"[{datetime.now(timezone.utc).isoformat()}] --- Top of main loop ---"
921
+ ) # Added log
893
922
  # --- Check for Code Updates ---
923
+ print(
924
+ f"[{datetime.now(timezone.utc).isoformat()}] Checking for code updates..."
925
+ ) # Added log
894
926
  if entrypoint_abs_path: # Should always be set after init
895
927
  try:
896
928
  current_mtime = os.path.getmtime(entrypoint_abs_path)
@@ -942,8 +974,14 @@ try:
942
974
  print(
943
975
  "[Consumer] Warning: Entrypoint absolute path not set, cannot check for code updates."
944
976
  )
977
+ print(
978
+ f"[{datetime.now(timezone.utc).isoformat()}] Finished checking for code updates."
979
+ ) # Added log
945
980
 
946
981
  # --- Claim Old Pending Messages ---
982
+ print(
983
+ f"[{datetime.now(timezone.utc).isoformat()}] Checking for pending messages to claim..."
984
+ ) # Added log
947
985
  try:
948
986
  if target_function is not None: # Only claim if we can process
949
987
  assert isinstance(REDIS_STREAM, str)
@@ -1001,7 +1039,7 @@ try:
1001
1039
 
1002
1040
  if claimed_messages:
1003
1041
  print(
1004
- f"[Consumer] Claimed {len(claimed_messages[0][1])} pending message(s). Processing..."
1042
+ f"[{datetime.now(timezone.utc).isoformat()}] Claimed {claimed_messages} pending message(s). Processing..."
1005
1043
  )
1006
1044
  # Process claimed messages immediately
1007
1045
  # Cast messages to expected type to satisfy type checker
@@ -1019,6 +1057,10 @@ try:
1019
1057
  # After processing claimed messages, loop back to check for more potentially
1020
1058
  # This avoids immediately blocking on XREADGROUP if there were claimed messages
1021
1059
  continue
1060
+ else: # Added log
1061
+ print(
1062
+ f"[{datetime.now(timezone.utc).isoformat()}] No pending messages claimed."
1063
+ ) # Added log
1022
1064
 
1023
1065
  except ResponseError as e_claim:
1024
1066
  # Handle specific errors like NOGROUP gracefully if needed
@@ -1030,17 +1072,26 @@ try:
1030
1072
  else:
1031
1073
  print(f"[Consumer] Error during XAUTOCLAIM: {e_claim}")
1032
1074
  # Decide if this is fatal or recoverable
1075
+ print(
1076
+ f"[{datetime.now(timezone.utc).isoformat()}] Error during XAUTOCLAIM: {e_claim}"
1077
+ ) # Added log
1033
1078
  time.sleep(5) # Wait before retrying claim
1034
1079
  except ConnectionError as e_claim_conn:
1035
1080
  print(
1036
1081
  f"Redis connection error during XAUTOCLAIM: {e_claim_conn}. Will attempt reconnect in main loop."
1037
1082
  )
1038
1083
  # Let the main ConnectionError handler below deal with reconnection
1084
+ print(
1085
+ f"[{datetime.now(timezone.utc).isoformat()}] Redis connection error during XAUTOCLAIM: {e_claim_conn}. Will attempt reconnect."
1086
+ ) # Added log
1039
1087
  time.sleep(5) # Avoid tight loop on connection errors during claim
1040
1088
  except Exception as e_claim_other:
1041
1089
  print(
1042
1090
  f"[Consumer] Unexpected error during XAUTOCLAIM/processing claimed messages: {e_claim_other}"
1043
1091
  )
1092
+ print(
1093
+ f"[{datetime.now(timezone.utc).isoformat()}] Unexpected error during XAUTOCLAIM/processing claimed: {e_claim_other}"
1094
+ ) # Added log
1044
1095
  traceback.print_exc()
1045
1096
  time.sleep(5) # Wait before retrying
1046
1097
 
@@ -1059,27 +1110,42 @@ try:
1059
1110
  streams_arg: Dict[str, str] = {REDIS_STREAM: ">"}
1060
1111
 
1061
1112
  # With decode_responses=True, redis-py expects str types here
1113
+ print(
1114
+ f"[{datetime.now(timezone.utc).isoformat()}] Calling xreadgroup (block=5000ms)..."
1115
+ ) # Added log
1062
1116
  messages = r.xreadgroup(
1063
1117
  REDIS_CONSUMER_GROUP,
1064
1118
  consumer_name,
1065
- streams_arg, # type: ignore[arg-type]
1119
+ streams_arg, # type: ignore[arg-type] # Suppress linter warning
1066
1120
  count=1,
1067
1121
  block=5000, # Use milliseconds for block
1068
1122
  )
1069
1123
 
1070
1124
  if not messages:
1125
+ print(
1126
+ f"[{datetime.now(timezone.utc).isoformat()}] xreadgroup timed out (no new messages)."
1127
+ ) # Added log
1071
1128
  # print("[Consumer] No new messages.") # Reduce verbosity
1072
1129
  continue
1130
+ # Removed the else block here
1073
1131
 
1132
+ # If we reached here, messages is not empty.
1074
1133
  # Assert messages is not None to help type checker (already implied by `if not messages`)
1075
1134
  assert messages is not None
1076
1135
 
1077
- # Cast messages to expected type to satisfy type checker
1136
+ # Cast messages to expected type to satisfy type checker (do it once)
1078
1137
  typed_messages = cast(
1079
1138
  List[Tuple[str, List[Tuple[str, Dict[str, str]]]]], messages
1080
1139
  )
1081
1140
  stream_name_str, stream_messages = typed_messages[0]
1141
+ num_msgs = len(stream_messages)
1142
+
1143
+ # Log reception and count before processing
1144
+ print(
1145
+ f"[{datetime.now(timezone.utc).isoformat()}] xreadgroup returned {num_msgs} message(s). Processing..."
1146
+ ) # Moved and combined log
1082
1147
 
1148
+ # Process the received messages
1083
1149
  # for msg_id_bytes, msg_data_bytes_dict in stream_messages: # Original structure
1084
1150
  for (
1085
1151
  message_id_str,
@@ -726,8 +726,6 @@ def processor(
726
726
  f"[DEBUG Decorator] Parameter '{param_name}' type hint: {param_type_str_repr}"
727
727
  )
728
728
 
729
- if "return" not in type_hints:
730
- raise TypeError(f"{processor_name} must have a return type hint")
731
729
  return_type = type_hints.get("return")
732
730
  return_type_str_repr = str(return_type)
733
731
  print(f"[DEBUG Decorator] Return type hint: {return_type_str_repr}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nebu
3
- Version: 0.1.82
3
+ Version: 0.1.84
4
4
  Summary: A globally distributed container runtime
5
5
  Requires-Python: >=3.10.14
6
6
  Description-Content-Type: text/markdown
@@ -14,17 +14,17 @@ nebu/containers/models.py,sha256=0j6NGy4yto-enRDh_4JH_ZTbHrLdSpuMOqNQPnIrwC4,681
14
14
  nebu/containers/server.py,sha256=yFa2Y9PzBn59E1HftKiv0iapPonli2rbGAiU6r-wwe0,2513
15
15
  nebu/namespaces/models.py,sha256=EqUOpzhVBhvJw2P92ONDUbIgC31M9jMmcaG5vyOrsWg,497
16
16
  nebu/namespaces/namespace.py,sha256=Q_EDH7BgQrTkaDh_l4tbo22qpq-uARfIk8ZPBLjITGY,4967
17
- nebu/processors/consumer.py,sha256=WMWZs0qkEy9b8G7AM1oE1JBOOgsR9Ia-Y9kaXJC3aw4,48546
17
+ nebu/processors/consumer.py,sha256=DznWT6_51JPi3lA2KKalnZMlYJc11bpzdfeNmGe5bIQ,52039
18
18
  nebu/processors/consumer_process_worker.py,sha256=tF5KU3Rnmzfc3Y0cM8J5nwGg1cJMe-ry0FmMSgGvXrY,31765
19
- nebu/processors/decorate.py,sha256=U-NjFszyfKD6ACEyPJogFCbOPsfRYJUgGobLzfaHwD8,54766
19
+ nebu/processors/decorate.py,sha256=jMh7OMamPdxGn7cMxQsOl5CEEmhZ1TXkMz8nCzBpVaU,54649
20
20
  nebu/processors/default.py,sha256=W4slJenG59rvyTlJ7gRp58eFfXcNOTT2Hfi6zzJAobI,365
21
21
  nebu/processors/models.py,sha256=FnBJFxtaJkp-uIOs90qkJUBvOR80l2cdGnfmOIWIvVA,4058
22
22
  nebu/processors/processor.py,sha256=OgEK8Fz0ehSe_VFiNsxweVKZIckhgVvQQ11NNffYZqA,15848
23
23
  nebu/processors/remote.py,sha256=TeAIPGEMqnDIb7H1iett26IEZrBlcbPB_-DSm6jcH1E,1285
24
24
  nebu/redis/models.py,sha256=coPovAcVXnOU1Xh_fpJL4PO3QctgK9nBe5QYoqEcnxg,1230
25
25
  nebu/services/service.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
- nebu-0.1.82.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
27
- nebu-0.1.82.dist-info/METADATA,sha256=rGGm0UyV1uisVjnbQQZ54zeVl_5JgDsLQC_zAhWsI3Y,1731
28
- nebu-0.1.82.dist-info/WHEEL,sha256=wXxTzcEDnjrTwFYjLPcsW_7_XihufBwmpiBeiXNBGEA,91
29
- nebu-0.1.82.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
30
- nebu-0.1.82.dist-info/RECORD,,
26
+ nebu-0.1.84.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
27
+ nebu-0.1.84.dist-info/METADATA,sha256=5OLXVQZv3g7yNQGh8_gVsJ6hsjO_LzDpN6S9hqUjNdo,1731
28
+ nebu-0.1.84.dist-info/WHEEL,sha256=wXxTzcEDnjrTwFYjLPcsW_7_XihufBwmpiBeiXNBGEA,91
29
+ nebu-0.1.84.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
30
+ nebu-0.1.84.dist-info/RECORD,,
File without changes