nebu 0.1.73__py3-none-any.whl → 0.1.77__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,12 +3,14 @@ import importlib
3
3
  import json
4
4
  import os
5
5
  import socket
6
+ import subprocess
6
7
  import sys
8
+ import threading
7
9
  import time
8
10
  import traceback
9
11
  import types
10
12
  from datetime import datetime, timezone
11
- from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar, cast
13
+ from typing import IO, Any, Callable, Dict, List, Optional, Tuple, TypeVar, cast
12
14
 
13
15
  import redis
14
16
  import socks
@@ -28,6 +30,19 @@ local_namespace: Dict[str, Any] = {} # Namespace for included objects
28
30
  last_load_mtime: float = 0.0
29
31
  entrypoint_abs_path: Optional[str] = None
30
32
 
33
+ REDIS_CONSUMER_GROUP = os.environ.get("REDIS_CONSUMER_GROUP")
34
+ REDIS_STREAM = os.environ.get("REDIS_STREAM")
35
+ NEBU_EXECUTION_MODE = os.environ.get("NEBU_EXECUTION_MODE", "inline").lower()
36
+ execution_mode = NEBU_EXECUTION_MODE
37
+
38
+ if execution_mode not in ["inline", "subprocess"]:
39
+ print(
40
+ f"Invalid NEBU_EXECUTION_MODE: {NEBU_EXECUTION_MODE}. Must be 'inline' or 'subprocess'. Defaulting to 'inline'."
41
+ )
42
+ execution_mode = "inline"
43
+
44
+ print(f"Execution mode: {execution_mode}")
45
+
31
46
 
32
47
  # --- Function to Load/Reload User Code ---
33
48
  def load_or_reload_user_code(
@@ -287,8 +302,6 @@ except Exception as e:
287
302
 
288
303
  # Get Redis connection parameters from environment
289
304
  REDIS_URL = os.environ.get("REDIS_URL", "")
290
- REDIS_CONSUMER_GROUP = os.environ.get("REDIS_CONSUMER_GROUP")
291
- REDIS_STREAM = os.environ.get("REDIS_STREAM")
292
305
 
293
306
  if not all([REDIS_URL, REDIS_CONSUMER_GROUP, REDIS_STREAM]):
294
307
  print("Missing required Redis environment variables")
@@ -334,15 +347,202 @@ except ResponseError as e:
334
347
  def process_message(message_id: str, message_data: Dict[str, str]) -> None:
335
348
  # Access the globally managed user code elements
336
349
  global target_function, imported_module, local_namespace
350
+ global execution_mode, r, REDIS_STREAM, REDIS_CONSUMER_GROUP
351
+
352
+ # --- Subprocess Execution Path ---
353
+ if execution_mode == "subprocess":
354
+ print(f"Processing message {message_id} in subprocess...")
355
+ process = None # Initialize process variable
356
+
357
+ # Helper function to read and print stream lines
358
+ def stream_reader(stream: IO[str], prefix: str):
359
+ try:
360
+ for line in iter(stream.readline, ""):
361
+ print(f"{prefix}: {line.strip()}", flush=True)
362
+ except Exception as e:
363
+ print(f"Error reading stream {prefix}: {e}")
364
+ finally:
365
+ stream.close()
366
+
367
+ try:
368
+ worker_cmd = [
369
+ sys.executable,
370
+ "-u", # Force unbuffered stdout/stderr in the subprocess
371
+ "-m",
372
+ "nebu.processors.consumer_process_worker",
373
+ ]
374
+ process_input = json.dumps(
375
+ {"message_id": message_id, "message_data": message_data}
376
+ )
377
+
378
+ # Start the worker process
379
+ process = subprocess.Popen(
380
+ worker_cmd,
381
+ stdin=subprocess.PIPE,
382
+ stdout=subprocess.PIPE,
383
+ stderr=subprocess.PIPE,
384
+ text=True,
385
+ encoding="utf-8",
386
+ bufsize=1, # Line buffered
387
+ env=os.environ.copy(),
388
+ )
389
+
390
+ # Create threads to read stdout and stderr concurrently
391
+ stdout_thread = threading.Thread(
392
+ target=stream_reader,
393
+ args=(process.stdout, f"[Subprocess STDOUT {message_id[:8]}]"),
394
+ )
395
+ stderr_thread = threading.Thread(
396
+ target=stream_reader,
397
+ args=(process.stderr, f"[Subprocess STDERR {message_id[:8]}]"),
398
+ )
399
+
400
+ stdout_thread.start()
401
+ stderr_thread.start()
402
+
403
+ # Send input data to the subprocess
404
+ # Ensure process and stdin are valid before writing/closing
405
+ if process and process.stdin:
406
+ try:
407
+ process.stdin.write(process_input)
408
+ process.stdin.close() # Signal end of input
409
+ except (BrokenPipeError, OSError) as e:
410
+ # Handle cases where the process might have exited early
411
+ print(
412
+ f"Warning: Failed to write full input to subprocess {message_id}: {e}. It might have exited prematurely."
413
+ )
414
+ # Continue to wait and check return code
415
+ else:
416
+ print(
417
+ f"Error: Subprocess stdin stream not available for {message_id}. Cannot send input."
418
+ )
419
+ # Handle this case - perhaps terminate and report error?
420
+ # For now, we'll let it proceed to wait() which will likely show an error code.
421
+
422
+ # Wait for the process to finish
423
+ return_code = (
424
+ process.wait() if process else -1
425
+ ) # Handle case where process is None
426
+
427
+ # Wait for reader threads to finish consuming remaining output
428
+ stdout_thread.join()
429
+ stderr_thread.join()
430
+
431
+ if return_code == 0:
432
+ print(
433
+ f"Subprocess for {message_id} completed successfully (return code 0)."
434
+ )
435
+ # Assume success handling (ack/response) was done by the worker
436
+ else:
437
+ print(
438
+ f"Subprocess for {message_id} failed with exit code {return_code}."
439
+ )
440
+ # Worker likely failed, send generic error and ACK here
441
+ _send_error_response(
442
+ message_id,
443
+ f"Subprocess execution failed with exit code {return_code}",
444
+ "See consumer logs for subprocess stderr.", # stderr was already printed
445
+ message_data.get("return_stream"),
446
+ message_data.get("user_id"),
447
+ )
448
+ # CRITICAL: Acknowledge the message here since the subprocess failed
449
+ try:
450
+ assert isinstance(REDIS_STREAM, str)
451
+ assert isinstance(REDIS_CONSUMER_GROUP, str)
452
+ r.xack(REDIS_STREAM, REDIS_CONSUMER_GROUP, message_id)
453
+ print(f"Acknowledged failed subprocess message {message_id}")
454
+ except Exception as e_ack:
455
+ print(
456
+ f"CRITICAL: Failed to acknowledge failed subprocess message {message_id}: {e_ack}"
457
+ )
458
+
459
+ except FileNotFoundError:
460
+ print(
461
+ "FATAL: Worker script 'nebu.processors.consumer_process_worker' not found. Check PYTHONPATH."
462
+ )
463
+ # Send error and ack if possible
464
+ _send_error_response(
465
+ message_id,
466
+ "Worker script not found",
467
+ traceback.format_exc(),
468
+ message_data.get("return_stream"),
469
+ message_data.get("user_id"),
470
+ )
471
+ try:
472
+ assert isinstance(REDIS_STREAM, str)
473
+ assert isinstance(REDIS_CONSUMER_GROUP, str)
474
+ r.xack(REDIS_STREAM, REDIS_CONSUMER_GROUP, message_id)
475
+ print(
476
+ f"Acknowledged message {message_id} after worker script not found failure"
477
+ )
478
+ except Exception as e_ack:
479
+ print(
480
+ f"CRITICAL: Failed to acknowledge message {message_id} after worker script not found failure: {e_ack}"
481
+ )
482
+
483
+ except Exception as e:
484
+ print(
485
+ f"Error launching or managing subprocess for message {message_id}: {e}"
486
+ )
487
+ traceback.print_exc()
488
+ # Also send an error and acknowledge
489
+ _send_error_response(
490
+ message_id,
491
+ f"Failed to launch/manage subprocess: {e}",
492
+ traceback.format_exc(),
493
+ message_data.get("return_stream"),
494
+ message_data.get("user_id"),
495
+ )
496
+ try:
497
+ assert isinstance(REDIS_STREAM, str)
498
+ assert isinstance(REDIS_CONSUMER_GROUP, str)
499
+ r.xack(REDIS_STREAM, REDIS_CONSUMER_GROUP, message_id)
500
+ print(
501
+ f"Acknowledged message {message_id} after subprocess launch/manage failure"
502
+ )
503
+ except Exception as e_ack:
504
+ print(
505
+ f"CRITICAL: Failed to acknowledge message {message_id} after subprocess launch/manage failure: {e_ack}"
506
+ )
507
+ # Ensure process is terminated if it's still running after an error
508
+ if process and process.poll() is None:
509
+ print(
510
+ f"Terminating potentially lingering subprocess for {message_id}..."
511
+ )
512
+ process.terminate()
513
+ process.wait(timeout=5) # Give it a moment to terminate
514
+ if process.poll() is None:
515
+ print(
516
+ f"Subprocess for {message_id} did not terminate gracefully, killing."
517
+ )
518
+ process.kill()
519
+ finally:
520
+ # Ensure streams are closed even if threads failed or process is None
521
+ if process:
522
+ if process.stdout:
523
+ try:
524
+ process.stdout.close()
525
+ except Exception:
526
+ pass # Ignore errors during cleanup close
527
+ if process.stderr:
528
+ try:
529
+ process.stderr.close()
530
+ except Exception:
531
+ pass # Ignore errors during cleanup close
532
+ # Stdin should already be closed, but doesn't hurt to be safe
533
+ if process.stdin and not process.stdin.closed:
534
+ try:
535
+ process.stdin.close()
536
+ except Exception:
537
+ pass
538
+
539
+ return # Exit process_message after handling subprocess logic
337
540
 
338
- # Check if target_function is loaded (might be None if reload failed)
541
+ # --- Inline Execution Path (Original Logic) ---
339
542
  if target_function is None or imported_module is None:
340
543
  print(
341
544
  f"Error processing message {message_id}: User code (target_function or module) is not loaded. Skipping."
342
545
  )
343
- # Decide how to handle this - skip and ack? Send error?
344
- # Sending error for now, but not acking yet.
345
- # This requires the main loop to handle potential ack failure later if needed.
346
546
  _send_error_response(
347
547
  message_id,
348
548
  "User code is not loaded (likely due to a failed reload)",
@@ -562,7 +762,7 @@ def process_message(message_id: str, message_data: Dict[str, str]) -> None:
562
762
  # print(f"Input object: {input_obj}") # Reduce verbosity
563
763
 
564
764
  # Execute the function
565
- print(f"Executing function...")
765
+ print("Executing function...")
566
766
  result = target_function(input_obj)
567
767
  print(f"Result: {result}") # Reduce verbosity
568
768
 
@@ -0,0 +1,710 @@
1
+ #!/usr/bin/env python3
2
+ import importlib
3
+ import json
4
+ import os
5
+ import socket
6
+ import sys
7
+
8
+ # import time # Removed unused import
9
+ import traceback
10
+ import types
11
+ from datetime import datetime, timezone
12
+ from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar, cast
13
+
14
+ import redis
15
+ import socks
16
+
17
+ # from redis import ConnectionError, ResponseError # Removed unused imports
18
+
19
+ # Define TypeVar for generic models
20
+ T = TypeVar("T")
21
+
22
+ # Environment variable name used as a guard in the decorator
23
+ _NEBU_INSIDE_CONSUMER_ENV_VAR = "_NEBU_INSIDE_CONSUMER_EXEC"
24
+
25
+
26
+ # --- Global variables for dynamically loaded code (in this process) ---
27
+ target_function: Optional[Callable] = None
28
+ init_function: Optional[Callable] = None
29
+ imported_module: Optional[types.ModuleType] = None
30
+ local_namespace: Dict[str, Any] = {} # Namespace for included objects
31
+ last_load_mtime: float = (
32
+ 0.0 # Note: This worker doesn't auto-reload code. It loads once.
33
+ )
34
+ entrypoint_abs_path: Optional[str] = None
35
+
36
+
37
+ # --- Function to Load User Code (Copied from consumer.py, no reload needed) ---
38
+ def load_user_code(
39
+ module_path: str,
40
+ function_name: str,
41
+ entrypoint_abs_path: str,
42
+ init_func_name: Optional[str] = None,
43
+ included_object_sources: Optional[List[Tuple[str, List[str]]]] = None,
44
+ ) -> Tuple[
45
+ Optional[Callable],
46
+ Optional[Callable],
47
+ Optional[types.ModuleType],
48
+ Dict[str, Any],
49
+ ]:
50
+ """Loads the user code module, executes includes, and returns functions/module."""
51
+ global _NEBU_INSIDE_CONSUMER_ENV_VAR # Access the global guard var name
52
+
53
+ loaded_target_func = None
54
+ loaded_init_func = None
55
+ loaded_module = None
56
+ exec_namespace: Dict[str, Any] = {} # Use a local namespace for this load attempt
57
+
58
+ print(f"[Worker Code Loader] Attempting to load module: '{module_path}'")
59
+ os.environ[_NEBU_INSIDE_CONSUMER_ENV_VAR] = "1" # Set guard *before* import
60
+ print(
61
+ f"[Worker Code Loader] Set environment variable {_NEBU_INSIDE_CONSUMER_ENV_VAR}=1"
62
+ )
63
+
64
+ try:
65
+ # Execute included object sources FIRST (if any)
66
+ if included_object_sources:
67
+ print("[Worker Code Loader] Executing @include object sources...")
68
+ # Include necessary imports for the exec context
69
+ exec("from pydantic import BaseModel, Field", exec_namespace)
70
+ exec(
71
+ "from typing import Optional, List, Dict, Any, Generic, TypeVar",
72
+ exec_namespace,
73
+ )
74
+ exec("T_exec = TypeVar('T_exec')", exec_namespace)
75
+ exec("from nebu.processors.models import *", exec_namespace)
76
+ # ... add other common imports if needed by included objects ...
77
+
78
+ for i, (obj_source, args_sources) in enumerate(included_object_sources):
79
+ try:
80
+ exec(obj_source, exec_namespace)
81
+ print(
82
+ f"[Worker Code Loader] Successfully executed included object {i} base source"
83
+ )
84
+ for j, arg_source in enumerate(args_sources):
85
+ try:
86
+ exec(arg_source, exec_namespace)
87
+ print(
88
+ f"[Worker Code Loader] Successfully executed included object {i} arg {j} source"
89
+ )
90
+ except Exception as e_arg:
91
+ print(
92
+ f"Error executing included object {i} arg {j} source: {e_arg}"
93
+ )
94
+ traceback.print_exc()
95
+ except Exception as e_base:
96
+ print(f"Error executing included object {i} base source: {e_base}")
97
+ traceback.print_exc()
98
+ print("[Worker Code Loader] Finished executing included object sources.")
99
+
100
+ # Import the main module (no reload needed in worker)
101
+ loaded_module = importlib.import_module(module_path)
102
+ print(f"[Worker Code Loader] Successfully imported module: {module_path}")
103
+
104
+ # Get the target function from the loaded module
105
+ loaded_target_func = getattr(loaded_module, function_name)
106
+ print(
107
+ f"[Worker Code Loader] Successfully loaded function '{function_name}' from module '{module_path}'"
108
+ )
109
+
110
+ # Get the init function if specified
111
+ if init_func_name:
112
+ loaded_init_func = getattr(loaded_module, init_func_name)
113
+ print(
114
+ f"[Worker Code Loader] Successfully loaded init function '{init_func_name}' from module '{module_path}'"
115
+ )
116
+ # Execute init_func
117
+ print(f"[Worker Code Loader] Executing init_func: {init_func_name}...")
118
+ loaded_init_func() # Call the function
119
+ print(
120
+ f"[Worker Code Loader] Successfully executed init_func: {init_func_name}"
121
+ )
122
+
123
+ print("[Worker Code Loader] Code load successful.")
124
+ return (
125
+ loaded_target_func,
126
+ loaded_init_func,
127
+ loaded_module,
128
+ exec_namespace,
129
+ )
130
+
131
+ except FileNotFoundError:
132
+ print(
133
+ f"[Worker Code Loader] Error: Entrypoint file not found at '{entrypoint_abs_path}'. Cannot load."
134
+ )
135
+ return None, None, None, {} # Indicate failure
136
+ except ImportError as e:
137
+ print(f"[Worker Code Loader] Error importing module '{module_path}': {e}")
138
+ traceback.print_exc()
139
+ return None, None, None, {} # Indicate failure
140
+ except AttributeError as e:
141
+ print(
142
+ f"[Worker Code Loader] Error accessing function '{function_name}' or '{init_func_name}' in module '{module_path}': {e}"
143
+ )
144
+ traceback.print_exc()
145
+ return None, None, None, {} # Indicate failure
146
+ except Exception as e:
147
+ print(f"[Worker Code Loader] Unexpected error during code load: {e}")
148
+ traceback.print_exc()
149
+ return None, None, None, {} # Indicate failure
150
+ finally:
151
+ # Unset the guard environment variable
152
+ os.environ.pop(_NEBU_INSIDE_CONSUMER_ENV_VAR, None)
153
+ print(
154
+ f"[Worker Code Loader] Unset environment variable {_NEBU_INSIDE_CONSUMER_ENV_VAR}"
155
+ )
156
+
157
+
158
+ # --- Helper to Send Error Response (Copied from consumer.py) ---
159
+ # Note: 'r' and 'REDIS_STREAM' will be global in this worker's context
160
+ def _send_error_response(
161
+ message_id: str,
162
+ error_msg: str,
163
+ tb: str,
164
+ return_stream: Optional[str],
165
+ user_id: Optional[str],
166
+ ):
167
+ """Sends a standardized error response to Redis."""
168
+ global r, redis_stream # Use lowercase redis_stream
169
+
170
+ # Check if Redis connection exists before trying to use it
171
+ if r is None:
172
+ print(
173
+ "[Worker] CRITICAL: Cannot send error response, Redis connection is not available."
174
+ )
175
+ return
176
+ # Assert REDIS_STREAM type here for safety, although it should be set if r is available
177
+ if not isinstance(redis_stream, str):
178
+ print(
179
+ "[Worker] CRITICAL: Cannot send error response, REDIS_STREAM is not a valid string."
180
+ )
181
+ return
182
+
183
+ error_response = {
184
+ "kind": "StreamResponseMessage",
185
+ "id": message_id,
186
+ "content": {
187
+ "error": error_msg,
188
+ "traceback": tb,
189
+ },
190
+ "status": "error",
191
+ "created_at": datetime.now(timezone.utc).isoformat(), # Use UTC
192
+ "user_id": user_id,
193
+ }
194
+
195
+ error_destination = f"{redis_stream}.errors" # Default error stream
196
+ if return_stream: # Prefer return_stream if available
197
+ error_destination = return_stream
198
+
199
+ try:
200
+ assert isinstance(error_destination, str)
201
+ r.xadd(error_destination, {"data": json.dumps(error_response)})
202
+ print(
203
+ f"[Worker] Sent error response for message {message_id} to {error_destination}"
204
+ )
205
+ except Exception as e_redis:
206
+ print(
207
+ f"[Worker] CRITICAL: Failed to send error response for {message_id} to Redis: {e_redis}"
208
+ )
209
+ traceback.print_exc()
210
+
211
+
212
+ # --- Main Worker Logic ---
213
+ if __name__ == "__main__":
214
+ print("[Worker] Starting subprocess worker...")
215
+ r: Optional[redis.Redis] = None # Initialize Redis connection variable
216
+ # Initialize potentially unbound variables
217
+ message_id: Optional[str] = None
218
+ message_data: Dict[str, Any] = {}
219
+ # Use lowercase variables for mutable values from env
220
+ redis_stream: Optional[str] = None
221
+ redis_consumer_group: Optional[str] = None
222
+
223
+ try:
224
+ # --- 1. Read Input from Stdin ---
225
+ print("[Worker] Reading message data from stdin...")
226
+ input_data_str = sys.stdin.read()
227
+ if not input_data_str:
228
+ print("[Worker] FATAL: No input data received from stdin.")
229
+ sys.exit(1)
230
+
231
+ try:
232
+ input_data = json.loads(input_data_str)
233
+ message_id = input_data["message_id"]
234
+ message_data = input_data["message_data"]
235
+ print(f"[Worker] Received message_id: {message_id}")
236
+ except (json.JSONDecodeError, KeyError) as e:
237
+ print(f"[Worker] FATAL: Failed to parse input JSON from stdin: {e}")
238
+ # Cannot easily send error response without message_id/Redis info
239
+ sys.exit(1)
240
+
241
+ # --- 2. Read Configuration from Environment ---
242
+ print("[Worker] Reading configuration from environment variables...")
243
+ try:
244
+ # Core function info
245
+ _function_name = os.environ.get("FUNCTION_NAME")
246
+ _entrypoint_rel_path = os.environ.get("NEBU_ENTRYPOINT_MODULE_PATH")
247
+
248
+ # Type info
249
+ is_stream_message = os.environ.get("IS_STREAM_MESSAGE") == "True"
250
+ param_type_str = os.environ.get("PARAM_TYPE_STR")
251
+ return_type_str = os.environ.get("RETURN_TYPE_STR")
252
+ content_type_name = os.environ.get("CONTENT_TYPE_NAME")
253
+
254
+ # Init func info
255
+ _init_func_name = os.environ.get("INIT_FUNC_NAME")
256
+
257
+ # Included object sources
258
+ _included_object_sources = []
259
+ i = 0
260
+ while True:
261
+ obj_source = os.environ.get(f"INCLUDED_OBJECT_{i}_SOURCE")
262
+ if obj_source:
263
+ args = []
264
+ j = 0
265
+ while True:
266
+ arg_source = os.environ.get(
267
+ f"INCLUDED_OBJECT_{i}_ARG_{j}_SOURCE"
268
+ )
269
+ if arg_source:
270
+ args.append(arg_source)
271
+ j += 1
272
+ else:
273
+ break
274
+ _included_object_sources.append((obj_source, args))
275
+ i += 1
276
+ else:
277
+ break
278
+
279
+ if not _function_name or not _entrypoint_rel_path:
280
+ raise ValueError(
281
+ "FUNCTION_NAME or NEBU_ENTRYPOINT_MODULE_PATH environment variables not set"
282
+ )
283
+
284
+ # Redis info
285
+ REDIS_URL = os.environ.get("REDIS_URL", "")
286
+ # Read into temporary uppercase vars first
287
+ _redis_consumer_group_env = os.environ.get("REDIS_CONSUMER_GROUP")
288
+ _redis_stream_env = os.environ.get("REDIS_STREAM")
289
+ # Assign to lowercase mutable vars
290
+ redis_consumer_group = _redis_consumer_group_env
291
+ redis_stream = _redis_stream_env
292
+
293
+ if not all([REDIS_URL, redis_consumer_group, redis_stream]):
294
+ raise ValueError("Missing required Redis environment variables")
295
+
296
+ # Calculate absolute path
297
+ entrypoint_abs_path = os.path.abspath(_entrypoint_rel_path)
298
+ if not os.path.exists(entrypoint_abs_path):
299
+ python_path = os.environ.get("PYTHONPATH", "").split(os.pathsep)
300
+ found_path = False
301
+ for p_path in python_path:
302
+ potential_path = os.path.abspath(
303
+ os.path.join(p_path, _entrypoint_rel_path)
304
+ )
305
+ if os.path.exists(potential_path):
306
+ entrypoint_abs_path = potential_path
307
+ found_path = True
308
+ print(
309
+ f"[Worker] Found entrypoint absolute path via PYTHONPATH: {entrypoint_abs_path}"
310
+ )
311
+ break
312
+ if not found_path:
313
+ raise ValueError(
314
+ f"Could not find entrypoint file via relative path '{_entrypoint_rel_path}' or in PYTHONPATH."
315
+ )
316
+
317
+ # Convert entrypoint file path to module path
318
+ _module_path = _entrypoint_rel_path.replace(os.sep, ".")
319
+ if _module_path.endswith(".py"):
320
+ _module_path = _module_path[:-3]
321
+ if _module_path.endswith(".__init__"):
322
+ _module_path = _module_path[: -len(".__init__")]
323
+ elif _module_path == "__init__":
324
+ raise ValueError(
325
+ f"Entrypoint '{_entrypoint_rel_path}' resolves to ambiguous top-level __init__."
326
+ )
327
+ if not _module_path:
328
+ raise ValueError(
329
+ f"Could not derive a valid module path from entrypoint '{_entrypoint_rel_path}'"
330
+ )
331
+
332
+ print(
333
+ f"[Worker] Config loaded. Module: '{_module_path}', Function: '{_function_name}'"
334
+ )
335
+
336
+ except ValueError as e:
337
+ print(f"[Worker] FATAL: Configuration error: {e}")
338
+ # Cannot send error response without Redis connection
339
+ sys.exit(1)
340
+ except Exception as e:
341
+ print(f"[Worker] FATAL: Unexpected error reading environment: {e}")
342
+ traceback.print_exc()
343
+ sys.exit(1)
344
+
345
+ # --- 3. Set up SOCKS Proxy ---
346
+ print("[Worker] Configuring SOCKS proxy...")
347
+ try:
348
+ socks.set_default_proxy(socks.SOCKS5, "localhost", 1055)
349
+ socket.socket = socks.socksocket
350
+ print(
351
+ "[Worker] Configured SOCKS5 proxy for socket connections via localhost:1055"
352
+ )
353
+ except Exception as e:
354
+ print(f"[Worker] FATAL: Failed to configure SOCKS proxy: {e}")
355
+ traceback.print_exc()
356
+ sys.exit(1)
357
+
358
+ # --- 4. Connect to Redis ---
359
+ print("[Worker] Connecting to Redis...")
360
+ try:
361
+ r = redis.from_url(REDIS_URL, decode_responses=True)
362
+ r.ping()
363
+ redis_info = REDIS_URL.split("@")[-1] if "@" in REDIS_URL else REDIS_URL
364
+ print(f"[Worker] Connected to Redis via SOCKS proxy at {redis_info}")
365
+ except Exception as e:
366
+ print(f"[Worker] FATAL: Failed to connect to Redis: {e}")
367
+ traceback.print_exc()
368
+ sys.exit(1) # Cannot proceed without Redis
369
+
370
+ # --- 5. Load User Code ---
371
+ print("[Worker] Loading user code...")
372
+ try:
373
+ (
374
+ target_function,
375
+ init_function,
376
+ imported_module,
377
+ local_namespace,
378
+ ) = load_user_code(
379
+ _module_path,
380
+ _function_name,
381
+ entrypoint_abs_path,
382
+ _init_func_name,
383
+ _included_object_sources,
384
+ )
385
+
386
+ if target_function is None or imported_module is None:
387
+ # load_user_code prints errors, just need to exit
388
+ raise RuntimeError("User code loading failed.")
389
+ print("[Worker] User code loaded successfully.")
390
+
391
+ except Exception as e:
392
+ print(f"[Worker] Error during user code load: {e}")
393
+ traceback.print_exc()
394
+ # Send error response via Redis before exiting
395
+ # Assert message_id is str before sending error
396
+ assert isinstance(message_id, str)
397
+ _send_error_response(
398
+ message_id,
399
+ f"User code load failed: {e}",
400
+ traceback.format_exc(),
401
+ message_data.get("return_stream"),
402
+ message_data.get("user_id"),
403
+ )
404
+ # Acknowledge the message to prevent reprocessing a load failure
405
+ try:
406
+ assert isinstance(redis_stream, str)
407
+ assert isinstance(redis_consumer_group, str)
408
+ # message_id should be str here if code load failed after reading it
409
+ assert isinstance(message_id, str)
410
+ r.xack(redis_stream, redis_consumer_group, message_id)
411
+ print(
412
+ f"[Worker] Acknowledged message {message_id} after code load failure."
413
+ )
414
+ except Exception as e_ack:
415
+ print(
416
+ f"[Worker] CRITICAL: Failed to acknowledge message {message_id} after code load failure: {e_ack}"
417
+ )
418
+ sys.exit(1) # Exit after attempting to report failure
419
+
420
+ # --- 6. Execute Processing Logic (Adapted from consumer.py inline path) ---
421
+ print(f"[Worker] Processing message {message_id}...")
422
+ return_stream = None
423
+ user_id = None
424
+ try:
425
+ payload_str = message_data.get("data")
426
+ if not payload_str:
427
+ raise ValueError("Missing or invalid 'data' field")
428
+ try:
429
+ raw_payload = json.loads(payload_str)
430
+ except json.JSONDecodeError as json_err:
431
+ raise ValueError(
432
+ f"Failed to parse JSON payload: {json_err}"
433
+ ) from json_err
434
+ if not isinstance(raw_payload, dict):
435
+ raise TypeError(
436
+ f"Expected parsed payload dictionary, got {type(raw_payload)}"
437
+ )
438
+
439
+ kind = raw_payload.get("kind", "")
440
+ msg_id = raw_payload.get("id", "") # ID from within the payload
441
+ content_raw = raw_payload.get("content", {})
442
+ created_at_str = raw_payload.get("created_at")
443
+ try:
444
+ created_at = (
445
+ datetime.fromisoformat(created_at_str)
446
+ if created_at_str and isinstance(created_at_str, str)
447
+ else datetime.now(timezone.utc)
448
+ )
449
+ except ValueError:
450
+ created_at = datetime.now(timezone.utc)
451
+
452
+ return_stream = raw_payload.get("return_stream")
453
+ user_id = raw_payload.get("user_id")
454
+ orgs = raw_payload.get("organizations")
455
+ handle = raw_payload.get("handle")
456
+ adapter = raw_payload.get("adapter")
457
+
458
+ # --- Health Check Logic ---
459
+ if kind == "HealthCheck":
460
+ print(f"[Worker] Received HealthCheck message {message_id}")
461
+ health_response = {
462
+ "kind": "StreamResponseMessage",
463
+ "id": message_id, # Respond with original stream message ID
464
+ "content": {"status": "healthy", "checked_message_id": msg_id},
465
+ "status": "success",
466
+ "created_at": datetime.now().isoformat(),
467
+ "user_id": user_id,
468
+ }
469
+ if return_stream:
470
+ assert isinstance(return_stream, str)
471
+ r.xadd(return_stream, {"data": json.dumps(health_response)})
472
+ print(f"[Worker] Sent health check response to {return_stream}")
473
+ # Ack handled outside try/except block
474
+ print(f"[Worker] HealthCheck for {message_id} processed successfully.")
475
+ result_content = None # Indicate healthcheck success path
476
+ else:
477
+ # --- Normal Message Processing ---
478
+ if isinstance(content_raw, str):
479
+ try:
480
+ content = json.loads(content_raw)
481
+ except json.JSONDecodeError:
482
+ content = content_raw
483
+ else:
484
+ content = content_raw
485
+ print(f"[Worker] Content: {content}")
486
+
487
+ # --- Construct Input Object ---
488
+ input_obj: Any = None
489
+ input_type_class = None
490
+ try:
491
+ from nebu.processors.models import Message
492
+
493
+ if is_stream_message:
494
+ message_class = Message
495
+ content_model_class = None
496
+ if content_type_name:
497
+ try:
498
+ content_model_class = getattr(
499
+ imported_module, content_type_name, None
500
+ )
501
+ if content_model_class is None:
502
+ content_model_class = local_namespace.get(
503
+ content_type_name
504
+ )
505
+ if content_model_class is None:
506
+ print(
507
+ f"[Worker] Warning: Content type class '{content_type_name}' not found."
508
+ )
509
+ else:
510
+ print(
511
+ f"[Worker] Found content model class: {content_model_class}"
512
+ )
513
+ except Exception as e:
514
+ print(
515
+ f"[Worker] Warning: Error resolving content type class '{content_type_name}': {e}"
516
+ )
517
+
518
+ if content_model_class:
519
+ try:
520
+ content_model = content_model_class.model_validate(
521
+ content
522
+ )
523
+ print(
524
+ f"[Worker] Validated content model: {content_model}"
525
+ )
526
+ input_obj = message_class(
527
+ kind=kind,
528
+ id=msg_id,
529
+ content=content_model,
530
+ created_at=int(created_at.timestamp()),
531
+ return_stream=return_stream,
532
+ user_id=user_id,
533
+ orgs=orgs,
534
+ handle=handle,
535
+ adapter=adapter,
536
+ )
537
+ except Exception as e:
538
+ print(
539
+ f"[Worker] Error validating/creating content model '{content_type_name}': {e}. Falling back."
540
+ )
541
+ input_obj = message_class(
542
+ kind=kind,
543
+ id=msg_id,
544
+ content=cast(Any, content),
545
+ created_at=int(created_at.timestamp()),
546
+ return_stream=return_stream,
547
+ user_id=user_id,
548
+ orgs=orgs,
549
+ handle=handle,
550
+ adapter=adapter,
551
+ )
552
+ else:
553
+ input_obj = message_class(
554
+ kind=kind,
555
+ id=msg_id,
556
+ content=cast(Any, content),
557
+ created_at=int(created_at.timestamp()),
558
+ return_stream=return_stream,
559
+ user_id=user_id,
560
+ orgs=orgs,
561
+ handle=handle,
562
+ adapter=adapter,
563
+ )
564
+ else: # Not a stream message
565
+ param_type_name = param_type_str
566
+ try:
567
+ input_type_class = (
568
+ getattr(imported_module, param_type_name, None)
569
+ if param_type_name
570
+ else None
571
+ )
572
+ if input_type_class is None and param_type_name:
573
+ input_type_class = local_namespace.get(param_type_name)
574
+ if input_type_class is None:
575
+ if param_type_name:
576
+ print(
577
+ f"[Worker] Warning: Input type class '{param_type_name}' not found. Passing raw."
578
+ )
579
+ input_obj = content
580
+ else:
581
+ print(
582
+ f"[Worker] Found input model class: {input_type_class}"
583
+ )
584
+ input_obj = input_type_class.model_validate(content)
585
+ print(f"[Worker] Validated input model: {input_obj}")
586
+ except Exception as e:
587
+ print(
588
+ f"[Worker] Error resolving/validating input type '{param_type_name}': {e}. Passing raw."
589
+ )
590
+ input_obj = content
591
+
592
+ except NameError as e:
593
+ raise RuntimeError(
594
+ f"Required class not found (e.g., Message or param type): {e}"
595
+ ) from e
596
+ except Exception as e:
597
+ print(f"[Worker] Error constructing input object: {e}")
598
+ raise
599
+
600
+ # --- Execute the Function ---
601
+ print(f"[Worker] Executing function '{_function_name}'...")
602
+ result = target_function(input_obj)
603
+ print(f"[Worker] Result: {result}")
604
+
605
+ # --- Convert Result ---
606
+ if hasattr(result, "model_dump"):
607
+ result_content = result.model_dump(mode="json")
608
+ elif hasattr(result, "dict"):
609
+ result_content = result.dict()
610
+ else:
611
+ result_content = result
612
+
613
+ # --- 7. Send Result / Handle Success (outside HealthCheck specific block) ---
614
+ if kind != "HealthCheck": # Only send response for non-healthcheck messages
615
+ response = {
616
+ "kind": "StreamResponseMessage",
617
+ "id": message_id, # Use original stream message ID
618
+ "content": result_content,
619
+ "status": "success",
620
+ "created_at": datetime.now().isoformat(),
621
+ "user_id": user_id,
622
+ }
623
+ if return_stream:
624
+ assert isinstance(return_stream, str)
625
+ r.xadd(return_stream, {"data": json.dumps(response)})
626
+ print(
627
+ f"[Worker] Processed message {message_id}, result sent to {return_stream}"
628
+ )
629
+
630
+ # --- 8. Acknowledge Original Message (on success) ---
631
+ assert isinstance(redis_stream, str)
632
+ assert isinstance(redis_consumer_group, str)
633
+ assert isinstance(
634
+ message_id, str
635
+ ) # message_id is str if processing succeeded
636
+ r.xack(redis_stream, redis_consumer_group, message_id)
637
+ print(f"[Worker] Acknowledged message {message_id} successfully.")
638
+
639
+ # --- 9. Exit Successfully ---
640
+ print("[Worker] Exiting with status 0.")
641
+ sys.exit(0)
642
+
643
+ except Exception as e:
644
+ # --- Handle Processing Error ---
645
+ print(f"[Worker] Error processing message {message_id}: {e}")
646
+ tb = traceback.format_exc()
647
+ print(tb)
648
+ # Assert message_id is str before sending error
649
+ assert isinstance(message_id, str)
650
+ _send_error_response(message_id, str(e), tb, return_stream, user_id)
651
+
652
+ # Acknowledge the message even if processing failed
653
+ try:
654
+ assert isinstance(redis_stream, str)
655
+ assert isinstance(redis_consumer_group, str)
656
+ # message_id is str if processing failed after reading it
657
+ assert isinstance(message_id, str)
658
+ r.xack(redis_stream, redis_consumer_group, message_id)
659
+ print(f"[Worker] Acknowledged failed message {message_id}")
660
+ except Exception as e_ack:
661
+ print(
662
+ f"[Worker] CRITICAL: Failed to acknowledge failed message {message_id}: {e_ack}"
663
+ )
664
+
665
+ # --- 9. Exit with Failure ---
666
+ print("[Worker] Exiting with status 1 due to processing error.")
667
+ sys.exit(1)
668
+
669
+ except Exception as outer_e:
670
+ # --- Handle Catastrophic Worker Error (e.g., setup failure) ---
671
+ print(f"[Worker] FATAL outer error: {outer_e}")
672
+ tb = traceback.format_exc()
673
+ print(tb)
674
+ # If Redis was connected, try to send a generic error for the message_id read from stdin
675
+ # Check that all required variables are not None before proceeding
676
+ if (
677
+ r is not None
678
+ and message_id is not None
679
+ and redis_stream is not None
680
+ and redis_consumer_group is not None
681
+ ):
682
+ try:
683
+ # Assert types explicitly before calls
684
+ assert isinstance(message_id, str)
685
+ assert isinstance(redis_stream, str)
686
+ assert isinstance(redis_consumer_group, str)
687
+
688
+ _send_error_response(
689
+ message_id,
690
+ f"Worker failed during setup or processing: {outer_e}",
691
+ tb,
692
+ message_data.get("return_stream"),
693
+ message_data.get("user_id"),
694
+ )
695
+ # Attempt to ack if possible, even though the main consumer *might* also try
696
+ r.xack(redis_stream, redis_consumer_group, message_id)
697
+ print(
698
+ f"[Worker] Attempted to acknowledge message {message_id} after fatal error."
699
+ )
700
+ except Exception as final_e:
701
+ print(
702
+ f"[Worker] CRITICAL: Failed during final error reporting/ack: {final_e}"
703
+ )
704
+ else:
705
+ print(
706
+ "[Worker] CRITICAL: Could not report final error or ack message due to missing Redis connection or message details."
707
+ )
708
+
709
+ print("[Worker] Exiting with status 1 due to fatal error.")
710
+ sys.exit(1)
@@ -390,6 +390,7 @@ def processor(
390
390
  ports: Optional[List[V1PortRequest]] = None,
391
391
  proxy_port: Optional[int] = None,
392
392
  health_check: Optional[V1ContainerHealthCheck] = None,
393
+ execution_mode: str = "inline", # Added parameter
393
394
  ):
394
395
  def decorator(
395
396
  func: Callable[[Any], Any],
@@ -912,6 +913,14 @@ def processor(
912
913
  print("[DEBUG Decorator] Type validation complete.")
913
914
  # --- End Type Validation ---
914
915
 
916
+ # --- Validate Execution Mode ---
917
+ if execution_mode not in ["inline", "subprocess"]:
918
+ raise ValueError(
919
+ f"Invalid execution_mode: '{execution_mode}'. Must be 'inline' or 'subprocess'."
920
+ )
921
+ print(f"[DEBUG Decorator] Using execution mode: {execution_mode}")
922
+ # --- End Execution Mode Validation ---
923
+
915
924
  # --- Populate Environment Variables ---
916
925
  print("[DEBUG Decorator] Populating environment variables...")
917
926
  # Keep: FUNCTION_NAME, PARAM_TYPE_STR, RETURN_TYPE_STR, IS_STREAM_MESSAGE, CONTENT_TYPE_NAME, MODULE_NAME
@@ -1037,6 +1046,10 @@ def processor(
1037
1046
  )
1038
1047
  print(f"[DEBUG Decorator] Set MODULE_NAME to: {module_path}")
1039
1048
 
1049
+ # Add Execution Mode
1050
+ all_env.append(V1EnvVar(key="NEBU_EXECUTION_MODE", value=execution_mode))
1051
+ print(f"[DEBUG Decorator] Set NEBU_EXECUTION_MODE to: {execution_mode}")
1052
+
1040
1053
  # Add PYTHONPATH
1041
1054
  pythonpath_value = CONTAINER_CODE_DIR
1042
1055
  existing_pythonpath = next(
@@ -5,7 +5,6 @@ from typing import Any, Dict, Generic, List, Optional, TypeVar
5
5
  import requests
6
6
  from pydantic import BaseModel
7
7
 
8
- from nebu.auth import get_user_profile
9
8
  from nebu.config import GlobalConfig
10
9
  from nebu.meta import V1ResourceMetaRequest, V1ResourceReference
11
10
  from nebu.processors.models import (
@@ -128,14 +127,7 @@ class Processor(Generic[InputType, OutputType]):
128
127
  response.raise_for_status()
129
128
 
130
129
  if not namespace:
131
- if not self.api_key:
132
- raise ValueError("No API key provided")
133
-
134
- user_profile = get_user_profile(self.api_key)
135
- namespace = user_profile.handle
136
-
137
- if not namespace:
138
- namespace = user_profile.email.replace("@", "-").replace(".", "-")
130
+ namespace = "-"
139
131
 
140
132
  print(f"Using namespace: {namespace}")
141
133
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nebu
3
- Version: 0.1.73
3
+ Version: 0.1.77
4
4
  Summary: A globally distributed container runtime
5
5
  Requires-Python: >=3.10.14
6
6
  Description-Content-Type: text/markdown
@@ -13,16 +13,17 @@ nebu/containers/models.py,sha256=0j6NGy4yto-enRDh_4JH_ZTbHrLdSpuMOqNQPnIrwC4,681
13
13
  nebu/containers/server.py,sha256=yFa2Y9PzBn59E1HftKiv0iapPonli2rbGAiU6r-wwe0,2513
14
14
  nebu/namespaces/models.py,sha256=EqUOpzhVBhvJw2P92ONDUbIgC31M9jMmcaG5vyOrsWg,497
15
15
  nebu/namespaces/namespace.py,sha256=Q_EDH7BgQrTkaDh_l4tbo22qpq-uARfIk8ZPBLjITGY,4967
16
- nebu/processors/consumer.py,sha256=wFxPwLXCrRM8eD4nd6pQsGW46e06rbkoF5YZihodjVk,33857
17
- nebu/processors/decorate.py,sha256=hgDoi3S00VEN0gpKi8MM2HctEdV_XsjqwY8bITLQxCs,53996
16
+ nebu/processors/consumer.py,sha256=N1olarPEHHqisxuM6gkar_LHG9CUbGhGvrIz-tmsPT4,42267
17
+ nebu/processors/consumer_process_worker.py,sha256=l5_BSMfqy-n2yK_UC3sm_pimzelaASeMdPxRE97HFwc,30959
18
+ nebu/processors/decorate.py,sha256=mu1o05BjNcbJ4M1so4Xvt7UbslX--B4dsYLgs5h8bEg,54610
18
19
  nebu/processors/default.py,sha256=W4slJenG59rvyTlJ7gRp58eFfXcNOTT2Hfi6zzJAobI,365
19
20
  nebu/processors/models.py,sha256=y40HoW-MEzDWB2dm_tsYlUy3Nf3s6eiLC0iGO9BoNog,3956
20
- nebu/processors/processor.py,sha256=PsLs-Oo0bcvqoDKHErpOaic25y8uvTQ8KxtyFwLptW0,16165
21
+ nebu/processors/processor.py,sha256=OgEK8Fz0ehSe_VFiNsxweVKZIckhgVvQQ11NNffYZqA,15848
21
22
  nebu/processors/remote.py,sha256=TeAIPGEMqnDIb7H1iett26IEZrBlcbPB_-DSm6jcH1E,1285
22
23
  nebu/redis/models.py,sha256=coPovAcVXnOU1Xh_fpJL4PO3QctgK9nBe5QYoqEcnxg,1230
23
24
  nebu/services/service.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
- nebu-0.1.73.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
25
- nebu-0.1.73.dist-info/METADATA,sha256=dHdTchUuRy2eGX_PM-DGM2f9E4_rzqhsmhRLOCcs97g,1731
26
- nebu-0.1.73.dist-info/WHEEL,sha256=ck4Vq1_RXyvS4Jt6SI0Vz6fyVs4GWg7AINwpsaGEgPE,91
27
- nebu-0.1.73.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
28
- nebu-0.1.73.dist-info/RECORD,,
25
+ nebu-0.1.77.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
26
+ nebu-0.1.77.dist-info/METADATA,sha256=6zdRYrPRNGO6NnN30EV26EVc5drWDvJKJbAhirgj2sQ,1731
27
+ nebu-0.1.77.dist-info/WHEEL,sha256=ck4Vq1_RXyvS4Jt6SI0Vz6fyVs4GWg7AINwpsaGEgPE,91
28
+ nebu-0.1.77.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
29
+ nebu-0.1.77.dist-info/RECORD,,
File without changes