nebu 0.1.73__py3-none-any.whl → 0.1.76__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,6 +3,7 @@ import importlib
3
3
  import json
4
4
  import os
5
5
  import socket
6
+ import subprocess
6
7
  import sys
7
8
  import time
8
9
  import traceback
@@ -28,6 +29,19 @@ local_namespace: Dict[str, Any] = {} # Namespace for included objects
28
29
  last_load_mtime: float = 0.0
29
30
  entrypoint_abs_path: Optional[str] = None
30
31
 
32
+ REDIS_CONSUMER_GROUP = os.environ.get("REDIS_CONSUMER_GROUP")
33
+ REDIS_STREAM = os.environ.get("REDIS_STREAM")
34
+ NEBU_EXECUTION_MODE = os.environ.get("NEBU_EXECUTION_MODE", "inline").lower()
35
+ execution_mode = NEBU_EXECUTION_MODE
36
+
37
+ if execution_mode not in ["inline", "subprocess"]:
38
+ print(
39
+ f"Invalid NEBU_EXECUTION_MODE: {NEBU_EXECUTION_MODE}. Must be 'inline' or 'subprocess'. Defaulting to 'inline'."
40
+ )
41
+ execution_mode = "inline"
42
+
43
+ print(f"Execution mode: {execution_mode}")
44
+
31
45
 
32
46
  # --- Function to Load/Reload User Code ---
33
47
  def load_or_reload_user_code(
@@ -287,8 +301,6 @@ except Exception as e:
287
301
 
288
302
  # Get Redis connection parameters from environment
289
303
  REDIS_URL = os.environ.get("REDIS_URL", "")
290
- REDIS_CONSUMER_GROUP = os.environ.get("REDIS_CONSUMER_GROUP")
291
- REDIS_STREAM = os.environ.get("REDIS_STREAM")
292
304
 
293
305
  if not all([REDIS_URL, REDIS_CONSUMER_GROUP, REDIS_STREAM]):
294
306
  print("Missing required Redis environment variables")
@@ -334,15 +346,100 @@ except ResponseError as e:
334
346
  def process_message(message_id: str, message_data: Dict[str, str]) -> None:
335
347
  # Access the globally managed user code elements
336
348
  global target_function, imported_module, local_namespace
349
+ global execution_mode, r, REDIS_STREAM, REDIS_CONSUMER_GROUP
350
+
351
+ # --- Subprocess Execution Path ---
352
+ if execution_mode == "subprocess":
353
+ print(f"Processing message {message_id} in subprocess...")
354
+ try:
355
+ worker_cmd = [
356
+ sys.executable,
357
+ "-m",
358
+ "nebu.processors.consumer_process_worker",
359
+ ]
360
+ process_input = json.dumps(
361
+ {"message_id": message_id, "message_data": message_data}
362
+ )
363
+
364
+ # Run the worker script as a module
365
+ # Inherit environment variables automatically
366
+ # Pass message data via stdin
367
+ result = subprocess.run(
368
+ worker_cmd,
369
+ input=process_input,
370
+ text=True,
371
+ capture_output=True,
372
+ check=True, # Raise CalledProcessError on non-zero exit
373
+ env=os.environ.copy(), # Ensure environment is passed
374
+ )
375
+ print(f"Subprocess for {message_id} completed successfully.")
376
+ if result.stdout:
377
+ print(f"Subprocess stdout:\n{result.stdout}")
378
+ # Assume the subprocess handled response sending and acknowledgement
379
+
380
+ except subprocess.CalledProcessError as e:
381
+ print(
382
+ f"Subprocess for message {message_id} failed with exit code {e.returncode}."
383
+ )
384
+ if e.stdout:
385
+ print(f"Subprocess stdout:\n{e.stdout}")
386
+ if e.stderr:
387
+ print(f"Subprocess stderr:\n{e.stderr}")
388
+
389
+ # Send a generic error message back, as the subprocess likely failed
390
+ # before it could send its specific error.
391
+ _send_error_response(
392
+ message_id,
393
+ f"Subprocess execution failed with exit code {e.returncode}",
394
+ e.stderr or "No stderr captured",
395
+ message_data.get(
396
+ "return_stream"
397
+ ), # Try to get return stream from original data
398
+ message_data.get("user_id"), # Try to get user_id from original data
399
+ )
400
+
401
+ # CRITICAL: Acknowledge the message here since the subprocess failed
402
+ try:
403
+ assert isinstance(REDIS_STREAM, str)
404
+ assert isinstance(REDIS_CONSUMER_GROUP, str)
405
+ r.xack(REDIS_STREAM, REDIS_CONSUMER_GROUP, message_id)
406
+ print(f"Acknowledged failed subprocess message {message_id}")
407
+ except Exception as e_ack:
408
+ print(
409
+ f"CRITICAL: Failed to acknowledge failed subprocess message {message_id}: {e_ack}"
410
+ )
411
+
412
+ except Exception as e:
413
+ print(
414
+ f"Error launching or managing subprocess for message {message_id}: {e}"
415
+ )
416
+ traceback.print_exc()
417
+ # Also send an error and acknowledge
418
+ _send_error_response(
419
+ message_id,
420
+ f"Failed to launch subprocess: {e}",
421
+ traceback.format_exc(),
422
+ message_data.get("return_stream"),
423
+ message_data.get("user_id"),
424
+ )
425
+ try:
426
+ assert isinstance(REDIS_STREAM, str)
427
+ assert isinstance(REDIS_CONSUMER_GROUP, str)
428
+ r.xack(REDIS_STREAM, REDIS_CONSUMER_GROUP, message_id)
429
+ print(
430
+ f"Acknowledged message {message_id} after subprocess launch failure"
431
+ )
432
+ except Exception as e_ack:
433
+ print(
434
+ f"CRITICAL: Failed to acknowledge message {message_id} after subprocess launch failure: {e_ack}"
435
+ )
436
+ return # Exit process_message after handling subprocess logic
337
437
 
338
- # Check if target_function is loaded (might be None if reload failed)
438
+ # --- Inline Execution Path (Original Logic) ---
339
439
  if target_function is None or imported_module is None:
340
440
  print(
341
441
  f"Error processing message {message_id}: User code (target_function or module) is not loaded. Skipping."
342
442
  )
343
- # Decide how to handle this - skip and ack? Send error?
344
- # Sending error for now, but not acking yet.
345
- # This requires the main loop to handle potential ack failure later if needed.
346
443
  _send_error_response(
347
444
  message_id,
348
445
  "User code is not loaded (likely due to a failed reload)",
@@ -562,7 +659,7 @@ def process_message(message_id: str, message_data: Dict[str, str]) -> None:
562
659
  # print(f"Input object: {input_obj}") # Reduce verbosity
563
660
 
564
661
  # Execute the function
565
- print(f"Executing function...")
662
+ print("Executing function...")
566
663
  result = target_function(input_obj)
567
664
  print(f"Result: {result}") # Reduce verbosity
568
665
 
@@ -0,0 +1,710 @@
1
+ #!/usr/bin/env python3
2
+ import importlib
3
+ import json
4
+ import os
5
+ import socket
6
+ import sys
7
+
8
+ # import time # Removed unused import
9
+ import traceback
10
+ import types
11
+ from datetime import datetime, timezone
12
+ from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar, cast
13
+
14
+ import redis
15
+ import socks
16
+
17
+ # from redis import ConnectionError, ResponseError # Removed unused imports
18
+
19
+ # Define TypeVar for generic models
20
+ T = TypeVar("T")
21
+
22
+ # Environment variable name used as a guard in the decorator
23
+ _NEBU_INSIDE_CONSUMER_ENV_VAR = "_NEBU_INSIDE_CONSUMER_EXEC"
24
+
25
+
26
+ # --- Global variables for dynamically loaded code (in this process) ---
27
+ target_function: Optional[Callable] = None
28
+ init_function: Optional[Callable] = None
29
+ imported_module: Optional[types.ModuleType] = None
30
+ local_namespace: Dict[str, Any] = {} # Namespace for included objects
31
+ last_load_mtime: float = (
32
+ 0.0 # Note: This worker doesn't auto-reload code. It loads once.
33
+ )
34
+ entrypoint_abs_path: Optional[str] = None
35
+
36
+
37
+ # --- Function to Load User Code (Copied from consumer.py, no reload needed) ---
38
+ def load_user_code(
39
+ module_path: str,
40
+ function_name: str,
41
+ entrypoint_abs_path: str,
42
+ init_func_name: Optional[str] = None,
43
+ included_object_sources: Optional[List[Tuple[str, List[str]]]] = None,
44
+ ) -> Tuple[
45
+ Optional[Callable],
46
+ Optional[Callable],
47
+ Optional[types.ModuleType],
48
+ Dict[str, Any],
49
+ ]:
50
+ """Loads the user code module, executes includes, and returns functions/module."""
51
+ global _NEBU_INSIDE_CONSUMER_ENV_VAR # Access the global guard var name
52
+
53
+ loaded_target_func = None
54
+ loaded_init_func = None
55
+ loaded_module = None
56
+ exec_namespace: Dict[str, Any] = {} # Use a local namespace for this load attempt
57
+
58
+ print(f"[Worker Code Loader] Attempting to load module: '{module_path}'")
59
+ os.environ[_NEBU_INSIDE_CONSUMER_ENV_VAR] = "1" # Set guard *before* import
60
+ print(
61
+ f"[Worker Code Loader] Set environment variable {_NEBU_INSIDE_CONSUMER_ENV_VAR}=1"
62
+ )
63
+
64
+ try:
65
+ # Execute included object sources FIRST (if any)
66
+ if included_object_sources:
67
+ print("[Worker Code Loader] Executing @include object sources...")
68
+ # Include necessary imports for the exec context
69
+ exec("from pydantic import BaseModel, Field", exec_namespace)
70
+ exec(
71
+ "from typing import Optional, List, Dict, Any, Generic, TypeVar",
72
+ exec_namespace,
73
+ )
74
+ exec("T_exec = TypeVar('T_exec')", exec_namespace)
75
+ exec("from nebu.processors.models import *", exec_namespace)
76
+ # ... add other common imports if needed by included objects ...
77
+
78
+ for i, (obj_source, args_sources) in enumerate(included_object_sources):
79
+ try:
80
+ exec(obj_source, exec_namespace)
81
+ print(
82
+ f"[Worker Code Loader] Successfully executed included object {i} base source"
83
+ )
84
+ for j, arg_source in enumerate(args_sources):
85
+ try:
86
+ exec(arg_source, exec_namespace)
87
+ print(
88
+ f"[Worker Code Loader] Successfully executed included object {i} arg {j} source"
89
+ )
90
+ except Exception as e_arg:
91
+ print(
92
+ f"Error executing included object {i} arg {j} source: {e_arg}"
93
+ )
94
+ traceback.print_exc()
95
+ except Exception as e_base:
96
+ print(f"Error executing included object {i} base source: {e_base}")
97
+ traceback.print_exc()
98
+ print("[Worker Code Loader] Finished executing included object sources.")
99
+
100
+ # Import the main module (no reload needed in worker)
101
+ loaded_module = importlib.import_module(module_path)
102
+ print(f"[Worker Code Loader] Successfully imported module: {module_path}")
103
+
104
+ # Get the target function from the loaded module
105
+ loaded_target_func = getattr(loaded_module, function_name)
106
+ print(
107
+ f"[Worker Code Loader] Successfully loaded function '{function_name}' from module '{module_path}'"
108
+ )
109
+
110
+ # Get the init function if specified
111
+ if init_func_name:
112
+ loaded_init_func = getattr(loaded_module, init_func_name)
113
+ print(
114
+ f"[Worker Code Loader] Successfully loaded init function '{init_func_name}' from module '{module_path}'"
115
+ )
116
+ # Execute init_func
117
+ print(f"[Worker Code Loader] Executing init_func: {init_func_name}...")
118
+ loaded_init_func() # Call the function
119
+ print(
120
+ f"[Worker Code Loader] Successfully executed init_func: {init_func_name}"
121
+ )
122
+
123
+ print("[Worker Code Loader] Code load successful.")
124
+ return (
125
+ loaded_target_func,
126
+ loaded_init_func,
127
+ loaded_module,
128
+ exec_namespace,
129
+ )
130
+
131
+ except FileNotFoundError:
132
+ print(
133
+ f"[Worker Code Loader] Error: Entrypoint file not found at '{entrypoint_abs_path}'. Cannot load."
134
+ )
135
+ return None, None, None, {} # Indicate failure
136
+ except ImportError as e:
137
+ print(f"[Worker Code Loader] Error importing module '{module_path}': {e}")
138
+ traceback.print_exc()
139
+ return None, None, None, {} # Indicate failure
140
+ except AttributeError as e:
141
+ print(
142
+ f"[Worker Code Loader] Error accessing function '{function_name}' or '{init_func_name}' in module '{module_path}': {e}"
143
+ )
144
+ traceback.print_exc()
145
+ return None, None, None, {} # Indicate failure
146
+ except Exception as e:
147
+ print(f"[Worker Code Loader] Unexpected error during code load: {e}")
148
+ traceback.print_exc()
149
+ return None, None, None, {} # Indicate failure
150
+ finally:
151
+ # Unset the guard environment variable
152
+ os.environ.pop(_NEBU_INSIDE_CONSUMER_ENV_VAR, None)
153
+ print(
154
+ f"[Worker Code Loader] Unset environment variable {_NEBU_INSIDE_CONSUMER_ENV_VAR}"
155
+ )
156
+
157
+
158
+ # --- Helper to Send Error Response (Copied from consumer.py) ---
159
+ # Note: 'r' and 'REDIS_STREAM' will be global in this worker's context
160
+ def _send_error_response(
161
+ message_id: str,
162
+ error_msg: str,
163
+ tb: str,
164
+ return_stream: Optional[str],
165
+ user_id: Optional[str],
166
+ ):
167
+ """Sends a standardized error response to Redis."""
168
+ global r, redis_stream # Use lowercase redis_stream
169
+
170
+ # Check if Redis connection exists before trying to use it
171
+ if r is None:
172
+ print(
173
+ "[Worker] CRITICAL: Cannot send error response, Redis connection is not available."
174
+ )
175
+ return
176
+ # Assert REDIS_STREAM type here for safety, although it should be set if r is available
177
+ if not isinstance(redis_stream, str):
178
+ print(
179
+ "[Worker] CRITICAL: Cannot send error response, REDIS_STREAM is not a valid string."
180
+ )
181
+ return
182
+
183
+ error_response = {
184
+ "kind": "StreamResponseMessage",
185
+ "id": message_id,
186
+ "content": {
187
+ "error": error_msg,
188
+ "traceback": tb,
189
+ },
190
+ "status": "error",
191
+ "created_at": datetime.now(timezone.utc).isoformat(), # Use UTC
192
+ "user_id": user_id,
193
+ }
194
+
195
+ error_destination = f"{redis_stream}.errors" # Default error stream
196
+ if return_stream: # Prefer return_stream if available
197
+ error_destination = return_stream
198
+
199
+ try:
200
+ assert isinstance(error_destination, str)
201
+ r.xadd(error_destination, {"data": json.dumps(error_response)})
202
+ print(
203
+ f"[Worker] Sent error response for message {message_id} to {error_destination}"
204
+ )
205
+ except Exception as e_redis:
206
+ print(
207
+ f"[Worker] CRITICAL: Failed to send error response for {message_id} to Redis: {e_redis}"
208
+ )
209
+ traceback.print_exc()
210
+
211
+
212
+ # --- Main Worker Logic ---
213
+ if __name__ == "__main__":
214
+ print("[Worker] Starting subprocess worker...")
215
+ r: Optional[redis.Redis] = None # Initialize Redis connection variable
216
+ # Initialize potentially unbound variables
217
+ message_id: Optional[str] = None
218
+ message_data: Dict[str, Any] = {}
219
+ # Use lowercase variables for mutable values from env
220
+ redis_stream: Optional[str] = None
221
+ redis_consumer_group: Optional[str] = None
222
+
223
+ try:
224
+ # --- 1. Read Input from Stdin ---
225
+ print("[Worker] Reading message data from stdin...")
226
+ input_data_str = sys.stdin.read()
227
+ if not input_data_str:
228
+ print("[Worker] FATAL: No input data received from stdin.")
229
+ sys.exit(1)
230
+
231
+ try:
232
+ input_data = json.loads(input_data_str)
233
+ message_id = input_data["message_id"]
234
+ message_data = input_data["message_data"]
235
+ print(f"[Worker] Received message_id: {message_id}")
236
+ except (json.JSONDecodeError, KeyError) as e:
237
+ print(f"[Worker] FATAL: Failed to parse input JSON from stdin: {e}")
238
+ # Cannot easily send error response without message_id/Redis info
239
+ sys.exit(1)
240
+
241
+ # --- 2. Read Configuration from Environment ---
242
+ print("[Worker] Reading configuration from environment variables...")
243
+ try:
244
+ # Core function info
245
+ _function_name = os.environ.get("FUNCTION_NAME")
246
+ _entrypoint_rel_path = os.environ.get("NEBU_ENTRYPOINT_MODULE_PATH")
247
+
248
+ # Type info
249
+ is_stream_message = os.environ.get("IS_STREAM_MESSAGE") == "True"
250
+ param_type_str = os.environ.get("PARAM_TYPE_STR")
251
+ return_type_str = os.environ.get("RETURN_TYPE_STR")
252
+ content_type_name = os.environ.get("CONTENT_TYPE_NAME")
253
+
254
+ # Init func info
255
+ _init_func_name = os.environ.get("INIT_FUNC_NAME")
256
+
257
+ # Included object sources
258
+ _included_object_sources = []
259
+ i = 0
260
+ while True:
261
+ obj_source = os.environ.get(f"INCLUDED_OBJECT_{i}_SOURCE")
262
+ if obj_source:
263
+ args = []
264
+ j = 0
265
+ while True:
266
+ arg_source = os.environ.get(
267
+ f"INCLUDED_OBJECT_{i}_ARG_{j}_SOURCE"
268
+ )
269
+ if arg_source:
270
+ args.append(arg_source)
271
+ j += 1
272
+ else:
273
+ break
274
+ _included_object_sources.append((obj_source, args))
275
+ i += 1
276
+ else:
277
+ break
278
+
279
+ if not _function_name or not _entrypoint_rel_path:
280
+ raise ValueError(
281
+ "FUNCTION_NAME or NEBU_ENTRYPOINT_MODULE_PATH environment variables not set"
282
+ )
283
+
284
+ # Redis info
285
+ REDIS_URL = os.environ.get("REDIS_URL", "")
286
+ # Read into temporary uppercase vars first
287
+ _redis_consumer_group_env = os.environ.get("REDIS_CONSUMER_GROUP")
288
+ _redis_stream_env = os.environ.get("REDIS_STREAM")
289
+ # Assign to lowercase mutable vars
290
+ redis_consumer_group = _redis_consumer_group_env
291
+ redis_stream = _redis_stream_env
292
+
293
+ if not all([REDIS_URL, redis_consumer_group, redis_stream]):
294
+ raise ValueError("Missing required Redis environment variables")
295
+
296
+ # Calculate absolute path
297
+ entrypoint_abs_path = os.path.abspath(_entrypoint_rel_path)
298
+ if not os.path.exists(entrypoint_abs_path):
299
+ python_path = os.environ.get("PYTHONPATH", "").split(os.pathsep)
300
+ found_path = False
301
+ for p_path in python_path:
302
+ potential_path = os.path.abspath(
303
+ os.path.join(p_path, _entrypoint_rel_path)
304
+ )
305
+ if os.path.exists(potential_path):
306
+ entrypoint_abs_path = potential_path
307
+ found_path = True
308
+ print(
309
+ f"[Worker] Found entrypoint absolute path via PYTHONPATH: {entrypoint_abs_path}"
310
+ )
311
+ break
312
+ if not found_path:
313
+ raise ValueError(
314
+ f"Could not find entrypoint file via relative path '{_entrypoint_rel_path}' or in PYTHONPATH."
315
+ )
316
+
317
+ # Convert entrypoint file path to module path
318
+ _module_path = _entrypoint_rel_path.replace(os.sep, ".")
319
+ if _module_path.endswith(".py"):
320
+ _module_path = _module_path[:-3]
321
+ if _module_path.endswith(".__init__"):
322
+ _module_path = _module_path[: -len(".__init__")]
323
+ elif _module_path == "__init__":
324
+ raise ValueError(
325
+ f"Entrypoint '{_entrypoint_rel_path}' resolves to ambiguous top-level __init__."
326
+ )
327
+ if not _module_path:
328
+ raise ValueError(
329
+ f"Could not derive a valid module path from entrypoint '{_entrypoint_rel_path}'"
330
+ )
331
+
332
+ print(
333
+ f"[Worker] Config loaded. Module: '{_module_path}', Function: '{_function_name}'"
334
+ )
335
+
336
+ except ValueError as e:
337
+ print(f"[Worker] FATAL: Configuration error: {e}")
338
+ # Cannot send error response without Redis connection
339
+ sys.exit(1)
340
+ except Exception as e:
341
+ print(f"[Worker] FATAL: Unexpected error reading environment: {e}")
342
+ traceback.print_exc()
343
+ sys.exit(1)
344
+
345
+ # --- 3. Set up SOCKS Proxy ---
346
+ print("[Worker] Configuring SOCKS proxy...")
347
+ try:
348
+ socks.set_default_proxy(socks.SOCKS5, "localhost", 1055)
349
+ socket.socket = socks.socksocket
350
+ print(
351
+ "[Worker] Configured SOCKS5 proxy for socket connections via localhost:1055"
352
+ )
353
+ except Exception as e:
354
+ print(f"[Worker] FATAL: Failed to configure SOCKS proxy: {e}")
355
+ traceback.print_exc()
356
+ sys.exit(1)
357
+
358
+ # --- 4. Connect to Redis ---
359
+ print("[Worker] Connecting to Redis...")
360
+ try:
361
+ r = redis.from_url(REDIS_URL, decode_responses=True)
362
+ r.ping()
363
+ redis_info = REDIS_URL.split("@")[-1] if "@" in REDIS_URL else REDIS_URL
364
+ print(f"[Worker] Connected to Redis via SOCKS proxy at {redis_info}")
365
+ except Exception as e:
366
+ print(f"[Worker] FATAL: Failed to connect to Redis: {e}")
367
+ traceback.print_exc()
368
+ sys.exit(1) # Cannot proceed without Redis
369
+
370
+ # --- 5. Load User Code ---
371
+ print("[Worker] Loading user code...")
372
+ try:
373
+ (
374
+ target_function,
375
+ init_function,
376
+ imported_module,
377
+ local_namespace,
378
+ ) = load_user_code(
379
+ _module_path,
380
+ _function_name,
381
+ entrypoint_abs_path,
382
+ _init_func_name,
383
+ _included_object_sources,
384
+ )
385
+
386
+ if target_function is None or imported_module is None:
387
+ # load_user_code prints errors, just need to exit
388
+ raise RuntimeError("User code loading failed.")
389
+ print("[Worker] User code loaded successfully.")
390
+
391
+ except Exception as e:
392
+ print(f"[Worker] Error during user code load: {e}")
393
+ traceback.print_exc()
394
+ # Send error response via Redis before exiting
395
+ # Assert message_id is str before sending error
396
+ assert isinstance(message_id, str)
397
+ _send_error_response(
398
+ message_id,
399
+ f"User code load failed: {e}",
400
+ traceback.format_exc(),
401
+ message_data.get("return_stream"),
402
+ message_data.get("user_id"),
403
+ )
404
+ # Acknowledge the message to prevent reprocessing a load failure
405
+ try:
406
+ assert isinstance(redis_stream, str)
407
+ assert isinstance(redis_consumer_group, str)
408
+ # message_id should be str here if code load failed after reading it
409
+ assert isinstance(message_id, str)
410
+ r.xack(redis_stream, redis_consumer_group, message_id)
411
+ print(
412
+ f"[Worker] Acknowledged message {message_id} after code load failure."
413
+ )
414
+ except Exception as e_ack:
415
+ print(
416
+ f"[Worker] CRITICAL: Failed to acknowledge message {message_id} after code load failure: {e_ack}"
417
+ )
418
+ sys.exit(1) # Exit after attempting to report failure
419
+
420
+ # --- 6. Execute Processing Logic (Adapted from consumer.py inline path) ---
421
+ print(f"[Worker] Processing message {message_id}...")
422
+ return_stream = None
423
+ user_id = None
424
+ try:
425
+ payload_str = message_data.get("data")
426
+ if not payload_str:
427
+ raise ValueError("Missing or invalid 'data' field")
428
+ try:
429
+ raw_payload = json.loads(payload_str)
430
+ except json.JSONDecodeError as json_err:
431
+ raise ValueError(
432
+ f"Failed to parse JSON payload: {json_err}"
433
+ ) from json_err
434
+ if not isinstance(raw_payload, dict):
435
+ raise TypeError(
436
+ f"Expected parsed payload dictionary, got {type(raw_payload)}"
437
+ )
438
+
439
+ kind = raw_payload.get("kind", "")
440
+ msg_id = raw_payload.get("id", "") # ID from within the payload
441
+ content_raw = raw_payload.get("content", {})
442
+ created_at_str = raw_payload.get("created_at")
443
+ try:
444
+ created_at = (
445
+ datetime.fromisoformat(created_at_str)
446
+ if created_at_str and isinstance(created_at_str, str)
447
+ else datetime.now(timezone.utc)
448
+ )
449
+ except ValueError:
450
+ created_at = datetime.now(timezone.utc)
451
+
452
+ return_stream = raw_payload.get("return_stream")
453
+ user_id = raw_payload.get("user_id")
454
+ orgs = raw_payload.get("organizations")
455
+ handle = raw_payload.get("handle")
456
+ adapter = raw_payload.get("adapter")
457
+
458
+ # --- Health Check Logic ---
459
+ if kind == "HealthCheck":
460
+ print(f"[Worker] Received HealthCheck message {message_id}")
461
+ health_response = {
462
+ "kind": "StreamResponseMessage",
463
+ "id": message_id, # Respond with original stream message ID
464
+ "content": {"status": "healthy", "checked_message_id": msg_id},
465
+ "status": "success",
466
+ "created_at": datetime.now().isoformat(),
467
+ "user_id": user_id,
468
+ }
469
+ if return_stream:
470
+ assert isinstance(return_stream, str)
471
+ r.xadd(return_stream, {"data": json.dumps(health_response)})
472
+ print(f"[Worker] Sent health check response to {return_stream}")
473
+ # Ack handled outside try/except block
474
+ print(f"[Worker] HealthCheck for {message_id} processed successfully.")
475
+ result_content = None # Indicate healthcheck success path
476
+ else:
477
+ # --- Normal Message Processing ---
478
+ if isinstance(content_raw, str):
479
+ try:
480
+ content = json.loads(content_raw)
481
+ except json.JSONDecodeError:
482
+ content = content_raw
483
+ else:
484
+ content = content_raw
485
+ print(f"[Worker] Content: {content}")
486
+
487
+ # --- Construct Input Object ---
488
+ input_obj: Any = None
489
+ input_type_class = None
490
+ try:
491
+ from nebu.processors.models import Message
492
+
493
+ if is_stream_message:
494
+ message_class = Message
495
+ content_model_class = None
496
+ if content_type_name:
497
+ try:
498
+ content_model_class = getattr(
499
+ imported_module, content_type_name, None
500
+ )
501
+ if content_model_class is None:
502
+ content_model_class = local_namespace.get(
503
+ content_type_name
504
+ )
505
+ if content_model_class is None:
506
+ print(
507
+ f"[Worker] Warning: Content type class '{content_type_name}' not found."
508
+ )
509
+ else:
510
+ print(
511
+ f"[Worker] Found content model class: {content_model_class}"
512
+ )
513
+ except Exception as e:
514
+ print(
515
+ f"[Worker] Warning: Error resolving content type class '{content_type_name}': {e}"
516
+ )
517
+
518
+ if content_model_class:
519
+ try:
520
+ content_model = content_model_class.model_validate(
521
+ content
522
+ )
523
+ print(
524
+ f"[Worker] Validated content model: {content_model}"
525
+ )
526
+ input_obj = message_class(
527
+ kind=kind,
528
+ id=msg_id,
529
+ content=content_model,
530
+ created_at=int(created_at.timestamp()),
531
+ return_stream=return_stream,
532
+ user_id=user_id,
533
+ orgs=orgs,
534
+ handle=handle,
535
+ adapter=adapter,
536
+ )
537
+ except Exception as e:
538
+ print(
539
+ f"[Worker] Error validating/creating content model '{content_type_name}': {e}. Falling back."
540
+ )
541
+ input_obj = message_class(
542
+ kind=kind,
543
+ id=msg_id,
544
+ content=cast(Any, content),
545
+ created_at=int(created_at.timestamp()),
546
+ return_stream=return_stream,
547
+ user_id=user_id,
548
+ orgs=orgs,
549
+ handle=handle,
550
+ adapter=adapter,
551
+ )
552
+ else:
553
+ input_obj = message_class(
554
+ kind=kind,
555
+ id=msg_id,
556
+ content=cast(Any, content),
557
+ created_at=int(created_at.timestamp()),
558
+ return_stream=return_stream,
559
+ user_id=user_id,
560
+ orgs=orgs,
561
+ handle=handle,
562
+ adapter=adapter,
563
+ )
564
+ else: # Not a stream message
565
+ param_type_name = param_type_str
566
+ try:
567
+ input_type_class = (
568
+ getattr(imported_module, param_type_name, None)
569
+ if param_type_name
570
+ else None
571
+ )
572
+ if input_type_class is None and param_type_name:
573
+ input_type_class = local_namespace.get(param_type_name)
574
+ if input_type_class is None:
575
+ if param_type_name:
576
+ print(
577
+ f"[Worker] Warning: Input type class '{param_type_name}' not found. Passing raw."
578
+ )
579
+ input_obj = content
580
+ else:
581
+ print(
582
+ f"[Worker] Found input model class: {input_type_class}"
583
+ )
584
+ input_obj = input_type_class.model_validate(content)
585
+ print(f"[Worker] Validated input model: {input_obj}")
586
+ except Exception as e:
587
+ print(
588
+ f"[Worker] Error resolving/validating input type '{param_type_name}': {e}. Passing raw."
589
+ )
590
+ input_obj = content
591
+
592
+ except NameError as e:
593
+ raise RuntimeError(
594
+ f"Required class not found (e.g., Message or param type): {e}"
595
+ ) from e
596
+ except Exception as e:
597
+ print(f"[Worker] Error constructing input object: {e}")
598
+ raise
599
+
600
+ # --- Execute the Function ---
601
+ print(f"[Worker] Executing function '{_function_name}'...")
602
+ result = target_function(input_obj)
603
+ print(f"[Worker] Result: {result}")
604
+
605
+ # --- Convert Result ---
606
+ if hasattr(result, "model_dump"):
607
+ result_content = result.model_dump(mode="json")
608
+ elif hasattr(result, "dict"):
609
+ result_content = result.dict()
610
+ else:
611
+ result_content = result
612
+
613
+ # --- 7. Send Result / Handle Success (outside HealthCheck specific block) ---
614
+ if kind != "HealthCheck": # Only send response for non-healthcheck messages
615
+ response = {
616
+ "kind": "StreamResponseMessage",
617
+ "id": message_id, # Use original stream message ID
618
+ "content": result_content,
619
+ "status": "success",
620
+ "created_at": datetime.now().isoformat(),
621
+ "user_id": user_id,
622
+ }
623
+ if return_stream:
624
+ assert isinstance(return_stream, str)
625
+ r.xadd(return_stream, {"data": json.dumps(response)})
626
+ print(
627
+ f"[Worker] Processed message {message_id}, result sent to {return_stream}"
628
+ )
629
+
630
+ # --- 8. Acknowledge Original Message (on success) ---
631
+ assert isinstance(redis_stream, str)
632
+ assert isinstance(redis_consumer_group, str)
633
+ assert isinstance(
634
+ message_id, str
635
+ ) # message_id is str if processing succeeded
636
+ r.xack(redis_stream, redis_consumer_group, message_id)
637
+ print(f"[Worker] Acknowledged message {message_id} successfully.")
638
+
639
+ # --- 9. Exit Successfully ---
640
+ print("[Worker] Exiting with status 0.")
641
+ sys.exit(0)
642
+
643
+ except Exception as e:
644
+ # --- Handle Processing Error ---
645
+ print(f"[Worker] Error processing message {message_id}: {e}")
646
+ tb = traceback.format_exc()
647
+ print(tb)
648
+ # Assert message_id is str before sending error
649
+ assert isinstance(message_id, str)
650
+ _send_error_response(message_id, str(e), tb, return_stream, user_id)
651
+
652
+ # Acknowledge the message even if processing failed
653
+ try:
654
+ assert isinstance(redis_stream, str)
655
+ assert isinstance(redis_consumer_group, str)
656
+ # message_id is str if processing failed after reading it
657
+ assert isinstance(message_id, str)
658
+ r.xack(redis_stream, redis_consumer_group, message_id)
659
+ print(f"[Worker] Acknowledged failed message {message_id}")
660
+ except Exception as e_ack:
661
+ print(
662
+ f"[Worker] CRITICAL: Failed to acknowledge failed message {message_id}: {e_ack}"
663
+ )
664
+
665
+ # --- 9. Exit with Failure ---
666
+ print("[Worker] Exiting with status 1 due to processing error.")
667
+ sys.exit(1)
668
+
669
+ except Exception as outer_e:
670
+ # --- Handle Catastrophic Worker Error (e.g., setup failure) ---
671
+ print(f"[Worker] FATAL outer error: {outer_e}")
672
+ tb = traceback.format_exc()
673
+ print(tb)
674
+ # If Redis was connected, try to send a generic error for the message_id read from stdin
675
+ # Check that all required variables are not None before proceeding
676
+ if (
677
+ r is not None
678
+ and message_id is not None
679
+ and redis_stream is not None
680
+ and redis_consumer_group is not None
681
+ ):
682
+ try:
683
+ # Assert types explicitly before calls
684
+ assert isinstance(message_id, str)
685
+ assert isinstance(redis_stream, str)
686
+ assert isinstance(redis_consumer_group, str)
687
+
688
+ _send_error_response(
689
+ message_id,
690
+ f"Worker failed during setup or processing: {outer_e}",
691
+ tb,
692
+ message_data.get("return_stream"),
693
+ message_data.get("user_id"),
694
+ )
695
+ # Attempt to ack if possible, even though the main consumer *might* also try
696
+ r.xack(redis_stream, redis_consumer_group, message_id)
697
+ print(
698
+ f"[Worker] Attempted to acknowledge message {message_id} after fatal error."
699
+ )
700
+ except Exception as final_e:
701
+ print(
702
+ f"[Worker] CRITICAL: Failed during final error reporting/ack: {final_e}"
703
+ )
704
+ else:
705
+ print(
706
+ "[Worker] CRITICAL: Could not report final error or ack message due to missing Redis connection or message details."
707
+ )
708
+
709
+ print("[Worker] Exiting with status 1 due to fatal error.")
710
+ sys.exit(1)
@@ -390,6 +390,7 @@ def processor(
390
390
  ports: Optional[List[V1PortRequest]] = None,
391
391
  proxy_port: Optional[int] = None,
392
392
  health_check: Optional[V1ContainerHealthCheck] = None,
393
+ execution_mode: str = "inline", # Added parameter
393
394
  ):
394
395
  def decorator(
395
396
  func: Callable[[Any], Any],
@@ -912,6 +913,14 @@ def processor(
912
913
  print("[DEBUG Decorator] Type validation complete.")
913
914
  # --- End Type Validation ---
914
915
 
916
+ # --- Validate Execution Mode ---
917
+ if execution_mode not in ["inline", "subprocess"]:
918
+ raise ValueError(
919
+ f"Invalid execution_mode: '{execution_mode}'. Must be 'inline' or 'subprocess'."
920
+ )
921
+ print(f"[DEBUG Decorator] Using execution mode: {execution_mode}")
922
+ # --- End Execution Mode Validation ---
923
+
915
924
  # --- Populate Environment Variables ---
916
925
  print("[DEBUG Decorator] Populating environment variables...")
917
926
  # Keep: FUNCTION_NAME, PARAM_TYPE_STR, RETURN_TYPE_STR, IS_STREAM_MESSAGE, CONTENT_TYPE_NAME, MODULE_NAME
@@ -1037,6 +1046,10 @@ def processor(
1037
1046
  )
1038
1047
  print(f"[DEBUG Decorator] Set MODULE_NAME to: {module_path}")
1039
1048
 
1049
+ # Add Execution Mode
1050
+ all_env.append(V1EnvVar(key="NEBU_EXECUTION_MODE", value=execution_mode))
1051
+ print(f"[DEBUG Decorator] Set NEBU_EXECUTION_MODE to: {execution_mode}")
1052
+
1040
1053
  # Add PYTHONPATH
1041
1054
  pythonpath_value = CONTAINER_CODE_DIR
1042
1055
  existing_pythonpath = next(
@@ -5,7 +5,6 @@ from typing import Any, Dict, Generic, List, Optional, TypeVar
5
5
  import requests
6
6
  from pydantic import BaseModel
7
7
 
8
- from nebu.auth import get_user_profile
9
8
  from nebu.config import GlobalConfig
10
9
  from nebu.meta import V1ResourceMetaRequest, V1ResourceReference
11
10
  from nebu.processors.models import (
@@ -128,14 +127,7 @@ class Processor(Generic[InputType, OutputType]):
128
127
  response.raise_for_status()
129
128
 
130
129
  if not namespace:
131
- if not self.api_key:
132
- raise ValueError("No API key provided")
133
-
134
- user_profile = get_user_profile(self.api_key)
135
- namespace = user_profile.handle
136
-
137
- if not namespace:
138
- namespace = user_profile.email.replace("@", "-").replace(".", "-")
130
+ namespace = "-"
139
131
 
140
132
  print(f"Using namespace: {namespace}")
141
133
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nebu
3
- Version: 0.1.73
3
+ Version: 0.1.76
4
4
  Summary: A globally distributed container runtime
5
5
  Requires-Python: >=3.10.14
6
6
  Description-Content-Type: text/markdown
@@ -13,16 +13,17 @@ nebu/containers/models.py,sha256=0j6NGy4yto-enRDh_4JH_ZTbHrLdSpuMOqNQPnIrwC4,681
13
13
  nebu/containers/server.py,sha256=yFa2Y9PzBn59E1HftKiv0iapPonli2rbGAiU6r-wwe0,2513
14
14
  nebu/namespaces/models.py,sha256=EqUOpzhVBhvJw2P92ONDUbIgC31M9jMmcaG5vyOrsWg,497
15
15
  nebu/namespaces/namespace.py,sha256=Q_EDH7BgQrTkaDh_l4tbo22qpq-uARfIk8ZPBLjITGY,4967
16
- nebu/processors/consumer.py,sha256=wFxPwLXCrRM8eD4nd6pQsGW46e06rbkoF5YZihodjVk,33857
17
- nebu/processors/decorate.py,sha256=hgDoi3S00VEN0gpKi8MM2HctEdV_XsjqwY8bITLQxCs,53996
16
+ nebu/processors/consumer.py,sha256=Fxgl0cuqKpX3UMsig_aw5KBbg1blE4xUY4yNnIvIHuw,37806
17
+ nebu/processors/consumer_process_worker.py,sha256=l5_BSMfqy-n2yK_UC3sm_pimzelaASeMdPxRE97HFwc,30959
18
+ nebu/processors/decorate.py,sha256=mu1o05BjNcbJ4M1so4Xvt7UbslX--B4dsYLgs5h8bEg,54610
18
19
  nebu/processors/default.py,sha256=W4slJenG59rvyTlJ7gRp58eFfXcNOTT2Hfi6zzJAobI,365
19
20
  nebu/processors/models.py,sha256=y40HoW-MEzDWB2dm_tsYlUy3Nf3s6eiLC0iGO9BoNog,3956
20
- nebu/processors/processor.py,sha256=PsLs-Oo0bcvqoDKHErpOaic25y8uvTQ8KxtyFwLptW0,16165
21
+ nebu/processors/processor.py,sha256=OgEK8Fz0ehSe_VFiNsxweVKZIckhgVvQQ11NNffYZqA,15848
21
22
  nebu/processors/remote.py,sha256=TeAIPGEMqnDIb7H1iett26IEZrBlcbPB_-DSm6jcH1E,1285
22
23
  nebu/redis/models.py,sha256=coPovAcVXnOU1Xh_fpJL4PO3QctgK9nBe5QYoqEcnxg,1230
23
24
  nebu/services/service.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
- nebu-0.1.73.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
25
- nebu-0.1.73.dist-info/METADATA,sha256=dHdTchUuRy2eGX_PM-DGM2f9E4_rzqhsmhRLOCcs97g,1731
26
- nebu-0.1.73.dist-info/WHEEL,sha256=ck4Vq1_RXyvS4Jt6SI0Vz6fyVs4GWg7AINwpsaGEgPE,91
27
- nebu-0.1.73.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
28
- nebu-0.1.73.dist-info/RECORD,,
25
+ nebu-0.1.76.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
26
+ nebu-0.1.76.dist-info/METADATA,sha256=j130KtOVQeqJRkv6xgC6CJ0BUSmqUmb8jFyhKAC11jE,1731
27
+ nebu-0.1.76.dist-info/WHEEL,sha256=ck4Vq1_RXyvS4Jt6SI0Vz6fyVs4GWg7AINwpsaGEgPE,91
28
+ nebu-0.1.76.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
29
+ nebu-0.1.76.dist-info/RECORD,,
File without changes