nebu 0.1.88__py3-none-any.whl → 0.1.93__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  import ast # For parsing notebook code
2
2
  import inspect
3
3
  import os
4
- import re # Added import
4
+ import re
5
5
  import textwrap
6
6
  from typing import (
7
7
  Any,
@@ -36,6 +36,7 @@ from nebu.containers.models import (
36
36
  V1VolumePath,
37
37
  )
38
38
  from nebu.data import Bucket
39
+ from nebu.logging import logger
39
40
  from nebu.meta import V1ResourceMetaRequest
40
41
  from nebu.processors.models import (
41
42
  Message,
@@ -78,19 +79,19 @@ def is_jupyter_notebook():
78
79
 
79
80
  ip = get_ipython() # Use the imported function
80
81
  if ip is None: # type: ignore
81
- # print("[DEBUG Helper] is_jupyter_notebook: No IPython instance found.")
82
+ # logger.debug("is_jupyter_notebook: No IPython instance found.")
82
83
  return False
83
84
  class_name = str(ip.__class__)
84
- # print(f"[DEBUG Helper] is_jupyter_notebook: IPython class name: {class_name}")
85
+ # logger.debug(f"is_jupyter_notebook: IPython class name: {class_name}")
85
86
  if "ZMQInteractiveShell" in class_name:
86
- # print("[DEBUG Helper] is_jupyter_notebook: Jupyter detected (ZMQInteractiveShell).")
87
+ # logger.debug("is_jupyter_notebook: Jupyter detected (ZMQInteractiveShell).")
87
88
  return True
88
- # print("[DEBUG Helper] is_jupyter_notebook: Not Jupyter (IPython instance found, but not ZMQInteractiveShell).")
89
+ # logger.debug("is_jupyter_notebook: Not Jupyter (IPython instance found, but not ZMQInteractiveShell).")
89
90
  return False
90
91
  except Exception as e:
91
- print(
92
- f"[DEBUG Helper] is_jupyter_notebook: Exception occurred: {e}"
93
- ) # Reduce verbosity
92
+ logger.debug(
93
+ f"is_jupyter_notebook: Exception occurred: {e}"
94
+ ) # Keep as debug for less noise
94
95
  return False
95
96
 
96
97
 
@@ -99,35 +100,35 @@ def get_notebook_executed_code():
99
100
  Returns all executed code from the current notebook session.
100
101
  Returns str or None: All executed code as a string, or None if not possible.
101
102
  """
102
- print("[DEBUG Helper] Attempting to get notebook execution history...")
103
+ logger.debug("Attempting to get notebook execution history...")
103
104
  try:
104
105
  # Fix: Import get_ipython directly
105
106
  from IPython.core.getipython import get_ipython
106
107
 
107
108
  ip = get_ipython() # Use the imported function
108
109
  if ip is None or not hasattr(ip, "history_manager"):
109
- print(
110
- "[DEBUG Helper] get_notebook_executed_code: No IPython instance or history_manager."
110
+ logger.debug(
111
+ "get_notebook_executed_code: No IPython instance or history_manager."
111
112
  )
112
113
  return None
113
114
  history_manager = ip.history_manager
114
115
  # Limiting history range for debugging? Maybe get_tail(N)? For now, get all.
115
116
  # history = history_manager.get_range(start=1) # type: ignore
116
117
  history = list(history_manager.get_range(start=1)) # type: ignore # Convert to list to get length
117
- print(
118
- f"[DEBUG Helper] get_notebook_executed_code: Retrieved {len(history)} history entries."
118
+ logger.debug(
119
+ f"get_notebook_executed_code: Retrieved {len(history)} history entries."
119
120
  )
120
121
  source_code = ""
121
122
  separator = "\n#<NEBU_CELL_SEP>#\n"
122
123
  for _, _, content in history: # Use _ for unused session, lineno
123
124
  if isinstance(content, str) and content.strip():
124
125
  source_code += content + separator
125
- print(
126
- f"[DEBUG Helper] get_notebook_executed_code: Total history source length: {len(source_code)}"
126
+ logger.debug(
127
+ f"get_notebook_executed_code: Total history source length: {len(source_code)}"
127
128
  )
128
129
  return source_code
129
130
  except Exception as e:
130
- print(f"[DEBUG Helper] get_notebook_executed_code: Error getting history: {e}")
131
+ logger.error(f"get_notebook_executed_code: Error getting history: {e}")
131
132
  return None
132
133
 
133
134
 
@@ -140,15 +141,15 @@ def extract_definition_source_from_string(
140
141
  Uses AST parsing for robustness.
141
142
  def_type can be ast.FunctionDef or ast.ClassDef.
142
143
  """
143
- print(
144
- f"[DEBUG Helper] Extracting '{def_name}' ({def_type.__name__}) from history string (len: {len(source_string)})..."
144
+ logger.debug(
145
+ f"Extracting '{def_name}' ({def_type.__name__}) from history string (len: {len(source_string)})..."
145
146
  )
146
147
  if not source_string or not def_name:
147
- print("[DEBUG Helper] extract: Empty source string or def_name.")
148
+ logger.debug("extract: Empty source string or def_name.")
148
149
  return None
149
150
 
150
151
  cells = source_string.split("#<NEBU_CELL_SEP>#")
151
- print(f"[DEBUG Helper] extract: Split history into {len(cells)} potential cells.")
152
+ logger.debug(f"extract: Split history into {len(cells)} potential cells.")
152
153
  last_found_source = None
153
154
 
154
155
  for i, cell in enumerate(reversed(cells)):
@@ -156,7 +157,7 @@ def extract_definition_source_from_string(
156
157
  cell = cell.strip()
157
158
  if not cell:
158
159
  continue
159
- # print(f"[DEBUG Helper] extract: Analyzing cell #{cell_num}...") # Can be very verbose
160
+ # logger.debug(f"extract: Analyzing cell #{cell_num}...") # Can be very verbose
160
161
  try:
161
162
  tree = ast.parse(cell)
162
163
  found_in_cell = False
@@ -167,22 +168,22 @@ def extract_definition_source_from_string(
167
168
  ) # Check if it's the right type (FuncDef or ClassDef)
168
169
  and getattr(node, "name", None) == def_name # Safely check name
169
170
  ):
170
- print(
171
- f"[DEBUG Helper] extract: Found node for '{def_name}' in cell #{cell_num}."
171
+ logger.debug(
172
+ f"extract: Found node for '{def_name}' in cell #{cell_num}."
172
173
  )
173
174
  try:
174
175
  # Use ast.get_source_segment for accurate extraction (Python 3.8+)
175
176
  func_source = ast.get_source_segment(cell, node)
176
177
  if func_source:
177
- print(
178
- f"[DEBUG Helper] extract: Successfully extracted source using get_source_segment for '{def_name}'."
178
+ logger.debug(
179
+ f"extract: Successfully extracted source using get_source_segment for '{def_name}'."
179
180
  )
180
181
  last_found_source = func_source
181
182
  found_in_cell = True
182
183
  break # Stop searching this cell
183
184
  except AttributeError: # Fallback for Python < 3.8
184
- print(
185
- f"[DEBUG Helper] extract: get_source_segment failed (likely Py < 3.8), using fallback for '{def_name}'."
185
+ logger.debug(
186
+ f"extract: get_source_segment failed (likely Py < 3.8), using fallback for '{def_name}'."
186
187
  )
187
188
  start_lineno = getattr(node, "lineno", 1) - 1
188
189
  end_lineno = getattr(node, "end_lineno", start_lineno + 1)
@@ -210,34 +211,34 @@ def extract_definition_source_from_string(
210
211
  .startswith(("def ", "class "))
211
212
  ):
212
213
  last_found_source = "\n".join(extracted_lines)
213
- print(
214
- f"[DEBUG Helper] extract: Extracted source via fallback for '{def_name}'."
214
+ logger.debug(
215
+ f"extract: Extracted source via fallback for '{def_name}'."
215
216
  )
216
217
  found_in_cell = True
217
218
  break
218
219
  else:
219
- print(
220
- f"[DEBUG Helper] extract: Warning: Line numbers out of bounds for {def_name} in cell (fallback)."
220
+ logger.warning(
221
+ f"extract: Line numbers out of bounds for {def_name} in cell (fallback)."
221
222
  )
222
223
 
223
224
  if found_in_cell:
224
- print(
225
- f"[DEBUG Helper] extract: Found and returning source for '{def_name}' from cell #{cell_num}."
225
+ logger.debug(
226
+ f"extract: Found and returning source for '{def_name}' from cell #{cell_num}."
226
227
  )
227
228
  return last_found_source # Found last definition, return immediately
228
229
 
229
230
  except (SyntaxError, ValueError) as e:
230
- # print(f"[DEBUG Helper] extract: Skipping cell #{cell_num} due to parse error: {e}") # Can be verbose
231
+ # logger.debug(f"extract: Skipping cell #{cell_num} due to parse error: {e}") # Can be verbose
231
232
  continue
232
233
  except Exception as e:
233
- print(
234
- f"[DEBUG Helper] extract: Warning: AST processing error for {def_name} in cell #{cell_num}: {e}"
234
+ logger.warning(
235
+ f"extract: AST processing error for {def_name} in cell #{cell_num}: {e}"
235
236
  )
236
237
  continue
237
238
 
238
239
  if not last_found_source:
239
- print(
240
- f"[DEBUG Helper] extract: Definition '{def_name}' of type {def_type.__name__} not found in history search."
240
+ logger.debug(
241
+ f"extract: Definition '{def_name}' of type {def_type.__name__} not found in history search."
241
242
  )
242
243
  return last_found_source
243
244
 
@@ -257,13 +258,13 @@ def include(obj: Any) -> Any:
257
258
  source = dill.source.getsource(obj)
258
259
  dedented_source = textwrap.dedent(source)
259
260
  setattr(obj, _NEBU_EXPLICIT_SOURCE_ATTR, dedented_source)
260
- print(
261
- f"[DEBUG @include] Successfully captured source for: {getattr(obj, '__name__', str(obj))}"
261
+ logger.debug(
262
+ f"@include: Successfully captured source for: {getattr(obj, '__name__', str(obj))}"
262
263
  )
263
264
  except Exception as e:
264
265
  # Don't fail the definition, just warn
265
- print(
266
- f"Warning: @include could not capture source for {getattr(obj, '__name__', str(obj))}: {e}. Automatic source retrieval will be attempted later."
266
+ logger.warning(
267
+ f"@include could not capture source for {getattr(obj, '__name__', str(obj))}: {e}. Automatic source retrieval will be attempted later."
267
268
  )
268
269
  return obj
269
270
 
@@ -276,44 +277,44 @@ def get_model_source(
276
277
  Checks explicit source, then notebook history (if provided), then dill.
277
278
  """
278
279
  model_name_str = getattr(model_class, "__name__", str(model_class))
279
- print(f"[DEBUG get_model_source] Getting source for: {model_name_str}")
280
+ logger.debug(f"get_model_source: Getting source for: {model_name_str}")
280
281
  # 1. Check explicit source
281
282
  explicit_source = getattr(model_class, _NEBU_EXPLICIT_SOURCE_ATTR, None)
282
283
  if explicit_source:
283
- print(
284
- f"[DEBUG get_model_source] Using explicit source (@include) for: {model_name_str}"
284
+ logger.debug(
285
+ f"get_model_source: Using explicit source (@include) for: {model_name_str}"
285
286
  )
286
287
  return explicit_source
287
288
 
288
289
  # 2. Check notebook history
289
290
  if notebook_code and hasattr(model_class, "__name__"):
290
- print(
291
- f"[DEBUG get_model_source] Attempting notebook history extraction for: {model_class.__name__}"
291
+ logger.debug(
292
+ f"get_model_source: Attempting notebook history extraction for: {model_class.__name__}"
292
293
  )
293
294
  extracted_source = extract_definition_source_from_string(
294
295
  notebook_code, model_class.__name__, ast.ClassDef
295
296
  )
296
297
  if extracted_source:
297
- print(
298
- f"[DEBUG get_model_source] Using notebook history source for: {model_class.__name__}"
298
+ logger.debug(
299
+ f"get_model_source: Using notebook history source for: {model_class.__name__}"
299
300
  )
300
301
  return extracted_source
301
302
  else:
302
- print(
303
- f"[DEBUG get_model_source] Notebook history extraction failed for: {model_class.__name__}. Proceeding to dill."
303
+ logger.debug(
304
+ f"get_model_source: Notebook history extraction failed for: {model_class.__name__}. Proceeding to dill."
304
305
  )
305
306
 
306
307
  # 3. Fallback to dill
307
308
  try:
308
- print(
309
- f"[DEBUG get_model_source] Attempting dill fallback for: {model_name_str}"
309
+ logger.debug(
310
+ f"get_model_source: Attempting dill fallback for: {model_name_str}"
310
311
  )
311
312
  source = dill.source.getsource(model_class)
312
- print(f"[DEBUG get_model_source] Using dill source for: {model_name_str}")
313
+ logger.debug(f"get_model_source: Using dill source for: {model_name_str}")
313
314
  return textwrap.dedent(source)
314
315
  except (IOError, TypeError, OSError) as e:
315
- print(
316
- f"[DEBUG get_model_source] Failed dill fallback for: {model_name_str}: {e}"
316
+ logger.debug(
317
+ f"get_model_source: Failed dill fallback for: {model_name_str}: {e}"
317
318
  )
318
319
  return None
319
320
 
@@ -324,22 +325,22 @@ def get_type_source(
324
325
  ) -> Optional[Any]:
325
326
  """Get the source code for a type, including generic parameters."""
326
327
  type_obj_str = str(type_obj)
327
- print(f"[DEBUG get_type_source] Getting source for type: {type_obj_str}")
328
+ logger.debug(f"get_type_source: Getting source for type: {type_obj_str}")
328
329
  origin = get_origin(type_obj)
329
330
  args = get_args(type_obj)
330
331
 
331
332
  if origin is not None:
332
333
  # Use updated get_model_source for origin
333
- print(
334
- f"[DEBUG get_type_source] Detected generic type. Origin: {origin}, Args: {args}"
334
+ logger.debug(
335
+ f"get_type_source: Detected generic type. Origin: {origin}, Args: {args}"
335
336
  )
336
337
  origin_source = get_model_source(origin, notebook_code)
337
338
  args_sources = []
338
339
 
339
340
  # Recursively get sources for all type arguments
340
341
  for arg in args:
341
- print(
342
- f"[DEBUG get_type_source] Recursively getting source for generic arg #{arg}"
342
+ logger.debug(
343
+ f"get_type_source: Recursively getting source for generic arg #{arg}"
343
344
  )
344
345
  arg_source = get_type_source(arg, notebook_code)
345
346
  if arg_source:
@@ -347,8 +348,8 @@ def get_type_source(
347
348
 
348
349
  # Return tuple only if origin source or some arg sources were found
349
350
  if origin_source or args_sources:
350
- print(
351
- f"[DEBUG get_type_source] Returning tuple source for generic: {type_obj_str}"
351
+ logger.debug(
352
+ f"get_type_source: Returning tuple source for generic: {type_obj_str}"
352
353
  )
353
354
  return (origin_source, args_sources)
354
355
 
@@ -356,12 +357,12 @@ def get_type_source(
356
357
  # Try get_model_source as a last resort for unknown types
357
358
  fallback_source = get_model_source(type_obj, notebook_code)
358
359
  if fallback_source:
359
- print(
360
- f"[DEBUG get_type_source] Using fallback get_model_source for: {type_obj_str}"
360
+ logger.debug(
361
+ f"get_type_source: Using fallback get_model_source for: {type_obj_str}"
361
362
  )
362
363
  return fallback_source
363
364
 
364
- print(f"[DEBUG get_type_source] Failed to get source for: {type_obj_str}")
365
+ logger.debug(f"get_type_source: Failed to get source for: {type_obj_str}")
365
366
  return None
366
367
 
367
368
 
@@ -392,20 +393,22 @@ def processor(
392
393
  health_check: Optional[V1ContainerHealthCheck] = None,
393
394
  execution_mode: str = "inline",
394
395
  config: Optional[GlobalConfig] = None,
396
+ hot_reload: bool = True,
397
+ debug: bool = False,
395
398
  ):
396
399
  def decorator(
397
400
  func: Callable[[Any], Any],
398
401
  ) -> Processor:
399
402
  # --- Prevent Recursion Guard ---
400
403
  if os.environ.get(_NEBU_INSIDE_CONSUMER_ENV_VAR) == "1":
401
- print(
402
- f"[DEBUG Decorator] Guard triggered for '{func.__name__}'. Returning original function."
404
+ logger.debug(
405
+ f"Decorator Guard triggered for '{func.__name__}'. Returning original function."
403
406
  )
404
407
  return func # type: ignore
405
408
  # --- End Guard ---
406
409
 
407
- print(
408
- f"[DEBUG Decorator Init] @processor decorating function '{func.__name__}'"
410
+ logger.debug(
411
+ f"Decorator Init: @processor decorating function '{func.__name__}'"
409
412
  )
410
413
  all_env = env or []
411
414
  processor_name = func.__name__
@@ -415,7 +418,7 @@ def processor(
415
418
  effective_config = config
416
419
 
417
420
  # --- Get Decorated Function File Path and Directory ---
418
- print("[DEBUG Decorator] Getting source file path for decorated function...")
421
+ logger.debug("Decorator: Getting source file path for decorated function...")
419
422
  func_file_path: Optional[str] = None
420
423
  func_dir: Optional[str] = None
421
424
  rel_func_path: Optional[str] = None # Relative path within func_dir
@@ -426,9 +429,9 @@ def processor(
426
429
  func_dir = os.path.dirname(func_file_path)
427
430
  # Calculate relative path based on the resolved directory
428
431
  rel_func_path = os.path.relpath(func_file_path, func_dir)
429
- print(f"[DEBUG Decorator] Found real file path: {func_file_path}")
430
- print(f"[DEBUG Decorator] Found function directory: {func_dir}")
431
- print(f"[DEBUG Decorator] Relative function path: {rel_func_path}")
432
+ logger.debug(f"Decorator: Found real file path: {func_file_path}")
433
+ logger.debug(f"Decorator: Found function directory: {func_dir}")
434
+ logger.debug(f"Decorator: Relative function path: {rel_func_path}")
432
435
  except (TypeError, OSError) as e:
433
436
  # TypeError can happen if func is not a module, class, method, function, traceback, frame, or code object
434
437
  raise ValueError(
@@ -447,7 +450,7 @@ def processor(
447
450
  "Could not determine function directory or relative path for S3 upload."
448
451
  )
449
452
  # --- Get API Key ---
450
- print("[DEBUG Decorator] Loading Nebu configuration...")
453
+ logger.debug("Decorator: Loading Nebu configuration...")
451
454
  try:
452
455
  if not effective_config:
453
456
  effective_config = GlobalConfig.read()
@@ -455,7 +458,7 @@ def processor(
455
458
  if not current_server or not current_server.api_key:
456
459
  raise ValueError("Nebu server configuration or API key not found.")
457
460
  api_key = current_server.api_key
458
- print("[DEBUG Decorator] Nebu API key loaded successfully.")
461
+ logger.debug("Decorator: Nebu API key loaded successfully.")
459
462
 
460
463
  # # Add additional environment variables from current configuration
461
464
  # all_env.append(V1EnvVar(key="AGENTSEA_API_KEY", value=api_key))
@@ -480,10 +483,10 @@ def processor(
480
483
  # if orign_server:
481
484
  # all_env.append(V1EnvVar(key="ORIGN_SERVER", value=orign_server))
482
485
  # else:
483
- # print("[DEBUG Decorator] No Orign server found. Not setting...")
486
+ # logger.debug("Decorator: No Orign server found. Not setting...")
484
487
 
485
488
  except Exception as e:
486
- print(f"ERROR: Failed to load Nebu configuration or API key: {e}")
489
+ logger.error(f"Failed to load Nebu configuration or API key: {e}")
487
490
  raise RuntimeError(
488
491
  f"Failed to load Nebu configuration or API key: {e}"
489
492
  ) from e
@@ -492,19 +495,19 @@ def processor(
492
495
  # --- Determine Namespace ---
493
496
  effective_namespace = namespace # Start with the provided namespace
494
497
  if effective_namespace is None:
495
- print("[DEBUG Decorator] Namespace not provided, fetching user profile...")
498
+ logger.debug("Decorator: Namespace not provided, fetching user profile...")
496
499
  try:
497
500
  user_profile = get_user_profile(api_key)
498
501
  if user_profile.handle:
499
502
  effective_namespace = user_profile.handle
500
- print(
501
- f"[DEBUG Decorator] Using user handle '{effective_namespace}' as namespace."
503
+ logger.debug(
504
+ f"Decorator: Using user handle '{effective_namespace}' as namespace."
502
505
  )
503
506
  else:
504
507
  raise ValueError("User profile does not contain a handle.")
505
508
  except Exception as e:
506
- print(
507
- f"ERROR: Failed to get user profile or handle for default namespace: {e}"
509
+ logger.error(
510
+ f"Failed to get user profile or handle for default namespace: {e}"
508
511
  )
509
512
  raise RuntimeError(
510
513
  f"Failed to get user profile or handle for default namespace: {e}"
@@ -513,7 +516,7 @@ def processor(
513
516
 
514
517
  # Use processor_name instead of name
515
518
  S3_TOKEN_ENDPOINT = f"{NEBU_API_BASE_URL}/v1/auth/temp-s3-tokens/{effective_namespace}/{processor_name}"
516
- print(f"[DEBUG Decorator] Fetching S3 token from: {S3_TOKEN_ENDPOINT}")
519
+ logger.debug(f"Decorator: Fetching S3 token from: {S3_TOKEN_ENDPOINT}")
517
520
  try:
518
521
  headers = {"Authorization": f"Bearer {api_key}"} # Add headers here
519
522
 
@@ -522,7 +525,7 @@ def processor(
522
525
  response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
523
526
  s3_token_data = response.json()
524
527
 
525
- print(f"[DEBUG Decorator] S3 token data: {s3_token_data}")
528
+ logger.debug(f"Decorator: S3 token data: {s3_token_data}")
526
529
 
527
530
  aws_access_key_id = s3_token_data.get("access_key_id")
528
531
  aws_secret_access_key = s3_token_data.get("secret_access_key")
@@ -558,8 +561,8 @@ def processor(
558
561
  ) # Ensure trailing slash for prefix
559
562
  s3_destination_uri = f"s3://{parsed_base.netloc}/{s3_destination_key}"
560
563
 
561
- print(
562
- f"[DEBUG Decorator] Uploading code from '{func_dir}' to '{s3_destination_uri}'"
564
+ logger.debug(
565
+ f"Decorator: Uploading code from '{func_dir}' to '{s3_destination_uri}'"
563
566
  )
564
567
 
565
568
  # Instantiate Bucket with temporary credentials
@@ -583,48 +586,46 @@ def processor(
583
586
  delete=True,
584
587
  dry_run=False,
585
588
  )
586
- print("[DEBUG Decorator] S3 code upload completed.")
589
+ logger.debug("Decorator: S3 code upload completed.")
587
590
 
588
591
  except requests.exceptions.RequestException as e:
589
- print(f"ERROR: Failed to fetch S3 token from {S3_TOKEN_ENDPOINT}: {e}")
592
+ logger.error(f"Failed to fetch S3 token from {S3_TOKEN_ENDPOINT}: {e}")
590
593
  raise RuntimeError(
591
594
  f"Failed to fetch S3 token from {S3_TOKEN_ENDPOINT}: {e}"
592
595
  ) from e
593
596
  except ClientError as e:
594
- print(f"ERROR: Failed to upload code to S3 {s3_destination_uri}: {e}")
597
+ logger.error(f"Failed to upload code to S3 {s3_destination_uri}: {e}")
595
598
  # Attempt to provide more context from the error if possible
596
599
  error_code = e.response.get("Error", {}).get("Code")
597
600
  error_msg = e.response.get("Error", {}).get("Message")
598
- print(f" S3 Error Code: {error_code}, Message: {error_msg}")
601
+ logger.error(f" S3 Error Code: {error_code}, Message: {error_msg}")
599
602
  raise RuntimeError(
600
603
  f"Failed to upload code to {s3_destination_uri}: {e}"
601
604
  ) from e
602
605
  except ValueError as e: # Catch ValueErrors from validation
603
- print(f"ERROR: Configuration or response data error: {e}")
606
+ logger.error(f"Configuration or response data error: {e}")
604
607
  raise RuntimeError(f"Configuration or response data error: {e}") from e
605
608
  except Exception as e:
606
- print(f"ERROR: Unexpected error during S3 token fetch or upload: {e}")
609
+ logger.exception(f"Unexpected error during S3 token fetch or upload: {e}")
607
610
  # Consider logging traceback here for better debugging
608
611
  import traceback
609
612
 
610
- traceback.print_exc()
613
+ traceback.print_exc() # Keep this explicit traceback for now in case logging isn't configured yet
611
614
  raise RuntimeError(f"Unexpected error during S3 setup: {e}") from e
612
615
 
613
616
  # --- Process Manually Included Objects (Keep for now, add source via env) ---
614
- # This part remains unchanged for now, using @include and environment variables.
615
- # Future: Could potentially upload these to S3 as well if they become large.
616
617
  included_sources: Dict[Any, Any] = {}
617
618
  notebook_code_for_include = None # Get notebook code only if needed for include
618
619
  if include:
619
620
  # Determine if we are in Jupyter only if needed for include fallback
620
- # print("[DEBUG Decorator] Processing manually included objects...")
621
+ # logger.debug("Decorator: Processing manually included objects...")
621
622
  is_jupyter_env = is_jupyter_notebook()
622
623
  if is_jupyter_env:
623
624
  notebook_code_for_include = get_notebook_executed_code()
624
625
 
625
626
  for i, obj in enumerate(include):
626
627
  obj_name_str = getattr(obj, "__name__", str(obj))
627
- # print(f"[DEBUG Decorator] Getting source for manually included object: {obj_name_str}")
628
+ # logger.debug(f"Decorator: Getting source for manually included object: {obj_name_str}")
628
629
  # Pass notebook code only if available and needed by get_model_source
629
630
  obj_source = get_model_source(
630
631
  obj, notebook_code_for_include if is_jupyter_env else None
@@ -633,27 +634,29 @@ def processor(
633
634
  included_sources[obj] = obj_source
634
635
  # Decide how to pass included source - keep using Env Vars for now
635
636
  env_key_base = f"INCLUDED_OBJECT_{i}"
636
- if isinstance(obj_source, str): # type: ignore[arg-type]
637
+ # Correct check for string type - Linter might complain but it's safer
638
+ if isinstance(obj_source, str): # type: ignore
637
639
  all_env.append(
638
640
  V1EnvVar(key=f"{env_key_base}_SOURCE", value=obj_source)
639
641
  )
640
- # print(f"[DEBUG Decorator] Added string source to env for included obj: {obj_name_str}")
642
+ # logger.debug(f"Decorator: Added string source to env for included obj: {obj_name_str}")
641
643
  elif isinstance(obj_source, tuple):
642
644
  # Handle tuple source (origin, args) - assumes get_model_source/get_type_source logic
643
645
  # Ensure obj_source is indeed a tuple before unpacking
644
646
  if len(obj_source) == 2:
645
647
  # Now safe to unpack
646
- origin_src, arg_srcs = obj_source # type: ignore[misc] # Suppress persistent tuple unpacking error
647
- # type: ignore[misc] # Suppress persistent tuple unpacking error
648
+ origin_src, arg_srcs = obj_source # type: ignore
648
649
  if origin_src and isinstance(origin_src, str):
649
650
  all_env.append(
650
651
  V1EnvVar(
651
652
  key=f"{env_key_base}_SOURCE", value=origin_src
652
653
  )
653
654
  )
654
- # Handle arg_srcs (this part seems okay)
655
+ # Handle arg_srcs - ensure it's iterable (list)
656
+ # Linter complains about "Never" not iterable, check type explicitly
655
657
  if isinstance(arg_srcs, list):
656
658
  for j, arg_src in enumerate(arg_srcs):
659
+ # Ensure arg_src is string before adding
657
660
  if isinstance(arg_src, str):
658
661
  all_env.append(
659
662
  V1EnvVar(
@@ -662,29 +665,27 @@ def processor(
662
665
  )
663
666
  )
664
667
  else:
665
- print(
666
- f"[DEBUG Decorator] Warning: Expected arg_srcs to be a list, got {type(arg_srcs)}"
668
+ logger.warning(
669
+ f"Decorator: Expected arg_srcs to be a list, got {type(arg_srcs)}"
667
670
  )
668
671
  else:
669
672
  # Handle unexpected type or structure for obj_source if necessary
670
- # For now, assume it fits the expected tuple structure if isinstance passes
671
- # origin_src, arg_srcs = None, [] # Default/error state (already covered by outer check)
672
- print(
673
- f"[DEBUG Decorator] Warning: Unexpected obj_source structure: {obj_source}"
673
+ logger.warning(
674
+ f"Decorator: Unexpected obj_source structure: {obj_source}"
674
675
  )
675
676
  else:
676
- print(
677
- f"Warning: Unknown source type for included object {obj_name_str}: {type(obj_source)}"
677
+ logger.warning(
678
+ f"Unknown source type for included object {obj_name_str}: {type(obj_source)}"
678
679
  )
679
680
  else:
680
- print(
681
- f"Warning: Could not retrieve source for manually included object: {obj_name_str}. It might not be available in the consumer."
681
+ logger.warning(
682
+ f"Could not retrieve source for manually included object: {obj_name_str}. It might not be available in the consumer."
682
683
  )
683
684
  # --- End Manually Included Objects ---
684
685
 
685
686
  # --- Validate Function Signature and Types (Keep as is) ---
686
- print(
687
- f"[DEBUG Decorator] Validating signature and type hints for {processor_name}..."
687
+ logger.debug(
688
+ f"Decorator: Validating signature and type hints for {processor_name}..."
688
689
  )
689
690
  sig = inspect.signature(func)
690
691
  params = list(sig.parameters.values())
@@ -696,17 +697,17 @@ def processor(
696
697
  try:
697
698
  # Attempt to resolve type hints
698
699
  type_hints = get_type_hints(func, globalns=func.__globals__, localns=None)
699
- print(f"[DEBUG Decorator] Resolved type hints: {type_hints}")
700
+ logger.debug(f"Decorator: Resolved type hints: {type_hints}")
700
701
  except NameError as e:
701
702
  # Specific handling for NameError (common in notebooks/dynamic environments)
702
- print(
703
- f"Warning: Could not fully resolve type hints for {processor_name} due to NameError: {e}. Type validation might be incomplete."
703
+ logger.warning(
704
+ f"Could not fully resolve type hints for {processor_name} due to NameError: {e}. Type validation might be incomplete."
704
705
  )
705
706
  # Try to get raw annotations as fallback?
706
707
  type_hints = getattr(func, "__annotations__", {})
707
- print(f"[DEBUG Decorator] Using raw annotations as fallback: {type_hints}")
708
+ logger.debug(f"Decorator: Using raw annotations as fallback: {type_hints}")
708
709
  except Exception as e:
709
- print(f"[DEBUG Decorator] Error getting type hints: {e}")
710
+ logger.error(f"Decorator: Error getting type hints: {e}")
710
711
  # Potentially re-raise or handle based on severity
711
712
  raise TypeError(
712
713
  f"Could not evaluate type hints for {processor_name}: {e}. Ensure all type dependencies are defined or imported."
@@ -722,22 +723,22 @@ def processor(
722
723
  param_name
723
724
  ) # Use .get for safety with raw annotations fallback
724
725
  param_type_str_repr = str(param_type) # Use string representation
725
- print(
726
- f"[DEBUG Decorator] Parameter '{param_name}' type hint: {param_type_str_repr}"
726
+ logger.debug(
727
+ f"Decorator: Parameter '{param_name}' type hint: {param_type_str_repr}"
727
728
  )
728
729
 
729
730
  return_type = type_hints.get("return")
730
731
  return_type_str_repr = str(return_type)
731
- print(f"[DEBUG Decorator] Return type hint: {return_type_str_repr}")
732
+ logger.debug(f"Decorator: Return type hint: {return_type_str_repr}")
732
733
 
733
734
  # --- Determine Input Type (StreamMessage, ContentType) ---
734
735
  # This logic remains mostly the same, using the resolved types
735
- print(
736
- f"[DEBUG Decorator] Determining input type structure for param type hint: {param_type_str_repr}"
736
+ logger.debug(
737
+ f"Decorator: Determining input type structure for param type hint: {param_type_str_repr}"
737
738
  )
738
739
  origin = get_origin(param_type) if param_type else None
739
740
  args = get_args(param_type) if param_type else tuple()
740
- print(f"[DEBUG Decorator] get_origin result: {origin}, get_args result: {args}")
741
+ logger.debug(f"Decorator: get_origin result: {origin}, get_args result: {args}")
741
742
  is_stream_message = False
742
743
  content_type = None
743
744
  content_type_name_from_regex = None # Store regex result here
@@ -749,96 +750,96 @@ def processor(
749
750
  if origin is message_cls or (
750
751
  isinstance(origin, type) and origin is message_cls
751
752
  ):
752
- print(
753
- "[DEBUG Decorator] Input type identified as Message via get_origin/isinstance."
753
+ logger.debug(
754
+ "Decorator: Input type identified as Message via get_origin/isinstance."
754
755
  )
755
756
  is_stream_message = True
756
757
  if args:
757
758
  content_type = args[0]
758
- print(
759
- f"[DEBUG Decorator] Content type extracted via get_args: {content_type}"
759
+ logger.debug(
760
+ f"Decorator: Content type extracted via get_args: {content_type}"
760
761
  )
761
762
  else:
762
- print(
763
- "[DEBUG Decorator] Message detected, but no generic arguments found via get_args. Attempting regex fallback on string repr."
763
+ logger.debug(
764
+ "Decorator: Message detected, but no generic arguments found via get_args. Attempting regex fallback on string repr."
764
765
  )
765
766
  # --- Regex Fallback Start ---
766
767
  match = re.search(r"Message\[([\w\.]+)\]", param_type_str_repr)
767
768
  if match:
768
769
  content_type_name_from_regex = match.group(1)
769
- print(
770
- f"[DEBUG Decorator] Extracted content type name via regex: {content_type_name_from_regex}"
770
+ logger.debug(
771
+ f"Decorator: Extracted content type name via regex: {content_type_name_from_regex}"
771
772
  )
772
773
  else:
773
- print(
774
- "[DEBUG Decorator] Regex fallback failed to extract content type name."
774
+ logger.debug(
775
+ "Decorator: Regex fallback failed to extract content type name."
775
776
  )
776
777
  # --- Regex Fallback End ---
777
778
  # Check 2a: Regex fallback if get_origin failed but string matches pattern
778
779
  elif origin is None and param_type is not None:
779
- print(
780
- "[DEBUG Decorator] get_origin failed. Attempting regex fallback on string representation."
780
+ logger.debug(
781
+ "Decorator: get_origin failed. Attempting regex fallback on string representation."
781
782
  )
782
783
  match = re.search(r"Message\[([\w\.]+)\]", param_type_str_repr)
783
784
  if match:
784
- print(
785
- "[DEBUG Decorator] Regex fallback successful after get_origin failed."
785
+ logger.debug(
786
+ "Decorator: Regex fallback successful after get_origin failed."
786
787
  )
787
788
  is_stream_message = True
788
789
  content_type_name_from_regex = match.group(1)
789
790
  # We don't have the actual content_type object here, only the name
790
791
  content_type = None
791
- print(
792
- f"[DEBUG Decorator] Extracted content type name via regex: {content_type_name_from_regex}"
792
+ logger.debug(
793
+ f"Decorator: Extracted content type name via regex: {content_type_name_from_regex}"
793
794
  )
794
795
  else:
795
- print(
796
- "[DEBUG Decorator] Regex fallback also failed. Treating as non-Message type."
796
+ logger.debug(
797
+ "Decorator: Regex fallback also failed. Treating as non-Message type."
797
798
  )
798
799
  is_stream_message = False
799
800
  content_type = None
800
801
  # Check 2: Direct type check (Handles cases where get_origin might fail but type is correct)
801
802
  elif isinstance(param_type, type) and param_type is message_cls:
802
803
  # This case likely won't have generic args accessible easily if get_origin failed
803
- print(
804
- "[DEBUG Decorator] Input type identified as direct Message type. Attempting regex fallback."
804
+ logger.debug(
805
+ "Decorator: Input type identified as direct Message type. Attempting regex fallback."
805
806
  )
806
807
  is_stream_message = True
807
808
  # --- Regex Fallback Start ---
808
809
  match = re.search(r"Message\[([\w\.]+)\]", param_type_str_repr)
809
810
  if match:
810
811
  content_type_name_from_regex = match.group(1)
811
- print(
812
- f"[DEBUG Decorator] Extracted content type name via regex: {content_type_name_from_regex}"
812
+ logger.debug(
813
+ f"Decorator: Extracted content type name via regex: {content_type_name_from_regex}"
813
814
  )
814
815
  else:
815
- print(
816
- "[DEBUG Decorator] Regex fallback failed to extract content type name."
816
+ logger.debug(
817
+ "Decorator: Regex fallback failed to extract content type name."
817
818
  )
818
819
  # --- Regex Fallback End ---
819
820
  # Check 3: Removed old placeholder elif branch
820
821
 
821
822
  else: # Handle cases where param_type might be None or origin is something else
822
- print(
823
- f"[DEBUG Decorator] Input parameter '{param_name}' type ({param_type_str_repr}) identified as non-Message type."
823
+ logger.debug(
824
+ f"Decorator: Input parameter '{param_name}' type ({param_type_str_repr}) identified as non-Message type."
824
825
  )
825
826
 
826
- print(
827
- f"[DEBUG Decorator] Final Input Type Determination: is_stream_message={is_stream_message}, content_type={content_type}"
827
+ logger.debug(
828
+ f"Decorator: Final Input Type Determination: is_stream_message={is_stream_message}, content_type={content_type}"
828
829
  )
829
830
  # --- End Input Type Determination ---
830
831
 
831
832
  # --- Validate Types are BaseModel ---
832
- print(
833
- "[DEBUG Decorator] Validating parameter and return types are BaseModel subclasses..."
833
+ logger.debug(
834
+ "Decorator: Validating parameter and return types are BaseModel subclasses..."
834
835
  )
835
836
 
836
837
  # Define check_basemodel locally or ensure it's available
837
838
  def check_basemodel(type_to_check: Optional[Any], desc: str):
838
- # print(f"[DEBUG Decorator] check_basemodel: Checking {desc} - Type: {type_to_check}") # Verbose
839
+ # logger.debug(f"Decorator check_basemodel: Checking {desc} - Type: {type_to_check}") # Verbose
839
840
  if type_to_check is None or type_to_check is Any:
840
- print(
841
- f"[DEBUG Decorator] check_basemodel: Skipping check for {desc} (type is None or Any)."
841
+ logger.debug(
842
+ f"Decorator check_basemodel: Skipping check for {desc} (type is None or Any)."
842
843
  )
843
844
  return
844
845
  # Handle Optional[T] by getting the inner type
@@ -852,11 +853,11 @@ def processor(
852
853
  non_none_args = [arg for arg in type_args if arg is not type(None)]
853
854
  if len(non_none_args) == 1:
854
855
  actual_type = non_none_args[0]
855
- # print(f"[DEBUG Decorator] check_basemodel: Unwrapped Optional/Union to {actual_type} for {desc}")
856
+ # logger.debug(f"Decorator check_basemodel: Unwrapped Optional/Union to {actual_type} for {desc}")
856
857
  else:
857
858
  # Handle complex Unions later if needed, skip check for now
858
- print(
859
- f"[DEBUG Decorator] check_basemodel: Skipping check for complex Union {desc}: {type_to_check}"
859
+ logger.debug(
860
+ f"Decorator check_basemodel: Skipping check for complex Union {desc}: {type_to_check}"
860
861
  )
861
862
  return
862
863
 
@@ -864,7 +865,7 @@ def processor(
864
865
  effective_type = (
865
866
  get_origin(actual_type) or actual_type
866
867
  ) # Handle generics like List[Model]
867
- # print(f"[DEBUG Decorator] check_basemodel: Effective type for {desc}: {effective_type}") # Verbose
868
+ # logger.debug(f"Decorator check_basemodel: Effective type for {desc}: {effective_type}") # Verbose
868
869
  if isinstance(effective_type, type) and not issubclass(
869
870
  effective_type, BaseModel
870
871
  ):
@@ -879,26 +880,26 @@ def processor(
879
880
  type(None),
880
881
  )
881
882
  if effective_type not in allowed_non_model_types:
882
- print(
883
- f"[DEBUG Decorator] check_basemodel: Error - {desc} effective type ({effective_type.__name__}) is not BaseModel or standard type."
883
+ logger.error(
884
+ f"Decorator check_basemodel: Error - {desc} effective type ({effective_type.__name__}) is not BaseModel or standard type."
884
885
  )
885
886
  raise TypeError(
886
887
  f"{desc} effective type ({effective_type.__name__}) must be BaseModel subclass or standard type (str, int, etc.)"
887
888
  )
888
889
  else:
889
- print(
890
- f"[DEBUG Decorator] check_basemodel: OK - {desc} is standard type {effective_type.__name__}."
890
+ logger.debug(
891
+ f"Decorator check_basemodel: OK - {desc} is standard type {effective_type.__name__}."
891
892
  )
892
893
 
893
894
  elif not isinstance(effective_type, type):
894
895
  # Allow TypeVars or other constructs for now? Or enforce BaseModel? Enforce for now.
895
- print(
896
- f"[DEBUG Decorator] check_basemodel: Warning - {desc} effective type '{effective_type}' is not a class. Cannot verify BaseModel subclass."
896
+ logger.warning(
897
+ f"Decorator check_basemodel: Warning - {desc} effective type '{effective_type}' is not a class. Cannot verify BaseModel subclass."
897
898
  )
898
899
  # Revisit this if TypeVars bound to BaseModel are needed.
899
900
  else:
900
- print(
901
- f"[DEBUG Decorator] check_basemodel: OK - {desc} effective type ({effective_type.__name__}) is a BaseModel subclass."
901
+ logger.debug(
902
+ f"Decorator check_basemodel: OK - {desc} effective type ({effective_type.__name__}) is a BaseModel subclass."
902
903
  )
903
904
 
904
905
  effective_param_type = (
@@ -912,7 +913,7 @@ def processor(
912
913
  if effective_param_type is not message_cls:
913
914
  check_basemodel(effective_param_type, f"Parameter '{param_name}'")
914
915
  check_basemodel(return_type, "Return value")
915
- print("[DEBUG Decorator] Type validation complete.")
916
+ logger.debug("Decorator: Type validation complete.")
916
917
  # --- End Type Validation ---
917
918
 
918
919
  # --- Validate Execution Mode ---
@@ -920,11 +921,11 @@ def processor(
920
921
  raise ValueError(
921
922
  f"Invalid execution_mode: '{execution_mode}'. Must be 'inline' or 'subprocess'."
922
923
  )
923
- print(f"[DEBUG Decorator] Using execution mode: {execution_mode}")
924
+ logger.debug(f"Decorator: Using execution mode: {execution_mode}")
924
925
  # --- End Execution Mode Validation ---
925
926
 
926
927
  # --- Populate Environment Variables ---
927
- print("[DEBUG Decorator] Populating environment variables...")
928
+ logger.debug("Decorator: Populating environment variables...")
928
929
  # Keep: FUNCTION_NAME, PARAM_TYPE_STR, RETURN_TYPE_STR, IS_STREAM_MESSAGE, CONTENT_TYPE_NAME, MODULE_NAME
929
930
  # Add: NEBU_ENTRYPOINT_MODULE_PATH
930
931
  # Add: Included object sources (if any)
@@ -944,15 +945,15 @@ def processor(
944
945
  calculated_module_path = ".".join(module_path_parts)
945
946
  else:
946
947
  # Not a python file? Should not happen based on inspect.getfile
947
- print(
948
- f"[DEBUG Decorator] Warning: Function source file is not a .py file: {rel_func_path}"
948
+ logger.warning(
949
+ f"Decorator: Function source file is not a .py file: {rel_func_path}"
949
950
  )
950
951
  # Set calculated_module_path to None explicitly to trigger fallback later
951
952
  calculated_module_path = None
952
953
  else:
953
954
  # Should have errored earlier if rel_func_path is None
954
- print(
955
- "[DEBUG Decorator] Warning: Could not determine relative function path. Falling back to func.__module__."
955
+ logger.warning(
956
+ "Decorator: Could not determine relative function path. Falling back to func.__module__."
956
957
  )
957
958
  # Set calculated_module_path to None explicitly to trigger fallback later
958
959
  calculated_module_path = None
@@ -960,10 +961,10 @@ def processor(
960
961
  # Assign final module_path using fallback if calculation failed or wasn't applicable
961
962
  if calculated_module_path is not None:
962
963
  module_path = calculated_module_path
963
- print(f"[DEBUG Decorator] Using calculated module path: {module_path}")
964
+ logger.debug(f"Decorator: Using calculated module path: {module_path}")
964
965
  else:
965
966
  module_path = func.__module__ # Fallback
966
- print(f"[DEBUG Decorator] Falling back to func.__module__: {module_path}")
967
+ logger.debug(f"Decorator: Falling back to func.__module__: {module_path}")
967
968
 
968
969
  # Basic info needed by consumer to find and run the function
969
970
  all_env.append(V1EnvVar(key="FUNCTION_NAME", value=processor_name))
@@ -972,8 +973,8 @@ def processor(
972
973
  all_env.append(
973
974
  V1EnvVar(key="NEBU_ENTRYPOINT_MODULE_PATH", value=rel_func_path)
974
975
  )
975
- print(
976
- f"[DEBUG Decorator] Set NEBU_ENTRYPOINT_MODULE_PATH to: {rel_func_path}"
976
+ logger.debug(
977
+ f"Decorator: Set NEBU_ENTRYPOINT_MODULE_PATH to: {rel_func_path}"
977
978
  )
978
979
  # No else needed, handled by fallback calculation above
979
980
 
@@ -986,7 +987,7 @@ def processor(
986
987
  f"init_func '{init_func_name}' must take zero parameters"
987
988
  )
988
989
  all_env.append(V1EnvVar(key="INIT_FUNC_NAME", value=init_func_name))
989
- print(f"[DEBUG Decorator] Set INIT_FUNC_NAME to: {init_func_name}")
990
+ logger.debug(f"Decorator: Set INIT_FUNC_NAME to: {init_func_name}")
990
991
 
991
992
  # Type info (still useful for deserialization/validation in consumer)
992
993
  # Adjust type strings to replace '__main__' with the calculated module path
@@ -998,8 +999,8 @@ def processor(
998
999
  param_type_str_repr = param_type_str_repr.replace(
999
1000
  "__main__.", f"{module_path}."
1000
1001
  )
1001
- print(
1002
- f"[DEBUG Decorator] Adjusted param type string: {param_type_str_repr}"
1002
+ logger.debug(
1003
+ f"Decorator: Adjusted param type string: {param_type_str_repr}"
1003
1004
  )
1004
1005
 
1005
1006
  all_env.append(V1EnvVar(key="PARAM_TYPE_STR", value=param_type_str_repr))
@@ -1009,8 +1010,8 @@ def processor(
1009
1010
  return_type_str_repr = return_type_str_repr.replace(
1010
1011
  "__main__.", f"{module_path}."
1011
1012
  )
1012
- print(
1013
- f"[DEBUG Decorator] Adjusted return type string: {return_type_str_repr}"
1013
+ logger.debug(
1014
+ f"Decorator: Adjusted return type string: {return_type_str_repr}"
1014
1015
  )
1015
1016
 
1016
1017
  all_env.append(V1EnvVar(key="RETURN_TYPE_STR", value=return_type_str_repr))
@@ -1020,19 +1021,19 @@ def processor(
1020
1021
  content_type_name_to_set = None
1021
1022
  if content_type and isinstance(content_type, type):
1022
1023
  content_type_name_to_set = content_type.__name__
1023
- print(
1024
- f"[DEBUG Decorator] Using content type name from resolved type object: {content_type_name_to_set}"
1024
+ logger.debug(
1025
+ f"Decorator: Using content type name from resolved type object: {content_type_name_to_set}"
1025
1026
  )
1026
1027
  elif content_type_name_from_regex:
1027
1028
  content_type_name_to_set = content_type_name_from_regex
1028
- print(
1029
- f"[DEBUG Decorator] Using content type name from regex fallback: {content_type_name_to_set}"
1029
+ logger.debug(
1030
+ f"Decorator: Using content type name from regex fallback: {content_type_name_to_set}"
1030
1031
  )
1031
1032
  else:
1032
1033
  # Only warn if it was supposed to be a Message type
1033
1034
  if is_stream_message:
1034
- print(
1035
- f"Warning: Could not determine CONTENT_TYPE_NAME for Message parameter {param_name} ({param_type_str_repr}). Consumer might use raw content."
1035
+ logger.warning(
1036
+ f"Could not determine CONTENT_TYPE_NAME for Message parameter {param_name} ({param_type_str_repr}). Consumer might use raw content."
1036
1037
  )
1037
1038
 
1038
1039
  if content_type_name_to_set:
@@ -1046,11 +1047,33 @@ def processor(
1046
1047
  key="MODULE_NAME", value=module_path
1047
1048
  ) # module_path is guaranteed to be a string here (calculated or fallback)
1048
1049
  )
1049
- print(f"[DEBUG Decorator] Set MODULE_NAME to: {module_path}")
1050
+ logger.debug(f"Decorator: Set MODULE_NAME to: {module_path}")
1050
1051
 
1051
1052
  # Add Execution Mode
1052
1053
  all_env.append(V1EnvVar(key="NEBU_EXECUTION_MODE", value=execution_mode))
1053
- print(f"[DEBUG Decorator] Set NEBU_EXECUTION_MODE to: {execution_mode}")
1054
+ logger.debug(f"Decorator: Set NEBU_EXECUTION_MODE to: {execution_mode}")
1055
+
1056
+ # Add Hot Reload Configuration
1057
+ if not hot_reload:
1058
+ all_env.append(V1EnvVar(key="NEBU_DISABLE_HOT_RELOAD", value="1"))
1059
+ logger.debug(
1060
+ "Decorator: Set NEBU_DISABLE_HOT_RELOAD to: 1 (Hot reload disabled)"
1061
+ )
1062
+ else:
1063
+ # Ensure it's explicitly '0' or unset if enabled (consumer defaults to enabled if var missing)
1064
+ # Setting to "0" might be clearer than removing it if it was added by other means.
1065
+ # Check if it exists and update, otherwise add "0"
1066
+ existing_hot_reload_var = next(
1067
+ (var for var in all_env if var.key == "NEBU_DISABLE_HOT_RELOAD"), None
1068
+ )
1069
+ if existing_hot_reload_var:
1070
+ existing_hot_reload_var.value = "0"
1071
+ else:
1072
+ # Not strictly needed as consumer defaults to enabled, but explicit is good.
1073
+ all_env.append(V1EnvVar(key="NEBU_DISABLE_HOT_RELOAD", value="0"))
1074
+ logger.debug(
1075
+ "Decorator: Hot reload enabled (NEBU_DISABLE_HOT_RELOAD=0 or unset)"
1076
+ )
1054
1077
 
1055
1078
  # Add PYTHONPATH
1056
1079
  pythonpath_value = CONTAINER_CODE_DIR
@@ -1068,15 +1091,15 @@ def processor(
1068
1091
  existing_pythonpath.value = pythonpath_value
1069
1092
  else:
1070
1093
  all_env.append(V1EnvVar(key="PYTHONPATH", value=pythonpath_value))
1071
- print(f"[DEBUG Decorator] Ensured PYTHONPATH includes: {pythonpath_value}")
1094
+ logger.debug(f"Decorator: Ensured PYTHONPATH includes: {pythonpath_value}")
1072
1095
 
1073
- print("[DEBUG Decorator] Finished populating environment variables.")
1096
+ logger.debug("Decorator: Finished populating environment variables.")
1074
1097
  # --- End Environment Variables ---
1075
1098
 
1076
1099
  # --- Add S3 Sync Volume ---
1077
1100
  if s3_destination_uri:
1078
- print(
1079
- f"[DEBUG Decorator] Adding volume to sync S3 code from {s3_destination_uri} to {CONTAINER_CODE_DIR}"
1101
+ logger.debug(
1102
+ f"Decorator: Adding volume to sync S3 code from {s3_destination_uri} to {CONTAINER_CODE_DIR}"
1080
1103
  )
1081
1104
  s3_sync_volume = V1VolumePath(
1082
1105
  source=s3_destination_uri,
@@ -1091,8 +1114,8 @@ def processor(
1091
1114
  ):
1092
1115
  all_volumes.append(s3_sync_volume)
1093
1116
  else:
1094
- print(
1095
- f"[DEBUG Decorator] Volume for {s3_destination_uri} to {CONTAINER_CODE_DIR} already exists."
1117
+ logger.debug(
1118
+ f"Decorator: Volume for {s3_destination_uri} to {CONTAINER_CODE_DIR} already exists."
1096
1119
  )
1097
1120
  else:
1098
1121
  # Should have errored earlier if S3 upload failed
@@ -1102,7 +1125,7 @@ def processor(
1102
1125
  # --- End S3 Sync Volume ---
1103
1126
 
1104
1127
  # --- Final Setup ---
1105
- print("[DEBUG Decorator] Preparing final Processor object...")
1128
+ logger.debug("Decorator: Preparing final Processor object...")
1106
1129
  metadata = V1ResourceMetaRequest(
1107
1130
  name=processor_name, namespace=effective_namespace, labels=labels
1108
1131
  )
@@ -1122,16 +1145,19 @@ def processor(
1122
1145
  setup_commands_list = [base_deps_install]
1123
1146
 
1124
1147
  if setup_script:
1125
- print("[DEBUG Decorator] Adding user setup script to setup commands.")
1148
+ logger.debug("Decorator: Adding user setup script to setup commands.")
1126
1149
  setup_commands_list.append(setup_script.strip())
1127
1150
 
1151
+ if debug:
1152
+ all_env.append(V1EnvVar(key="PYTHON_LOG", value="DEBUG"))
1153
+
1128
1154
  # Combine setup commands and the final execution command
1129
1155
  all_commands = setup_commands_list + [consumer_execution_command]
1130
1156
  # Use newline separator for clarity in logs and script execution
1131
1157
  final_command = "\n".join(all_commands)
1132
1158
 
1133
- print(
1134
- f"[DEBUG Decorator] Final container command:\n-------\n{final_command}\n-------"
1159
+ logger.debug(
1160
+ f"Decorator: Final container command:\n-------\n{final_command}\n-------"
1135
1161
  )
1136
1162
 
1137
1163
  container_request = V1ContainerRequest(
@@ -1154,16 +1180,16 @@ def processor(
1154
1180
  proxy_port=proxy_port,
1155
1181
  health_check=health_check,
1156
1182
  )
1157
- print("[DEBUG Decorator] Final Container Request Env Vars (Summary):")
1183
+ logger.debug("Decorator: Final Container Request Env Vars (Summary):")
1158
1184
  for env_var in all_env:
1159
1185
  # Avoid printing potentially large included source code
1160
1186
  value_str = env_var.value or ""
1161
1187
  if "SOURCE" in env_var.key and len(value_str) > 100:
1162
- print(
1163
- f"[DEBUG Decorator] {env_var.key}: <source code present, length={len(value_str)}>"
1188
+ logger.debug(
1189
+ f" {env_var.key}: <source code present, length={len(value_str)}>"
1164
1190
  )
1165
1191
  else:
1166
- print(f"[DEBUG Decorator] {env_var.key}: {value_str}")
1192
+ logger.debug(f" {env_var.key}: {value_str}")
1167
1193
 
1168
1194
  processor_instance = Processor(
1169
1195
  name=processor_name,
@@ -1177,19 +1203,19 @@ def processor(
1177
1203
  scale_config=scale,
1178
1204
  no_delete=no_delete,
1179
1205
  )
1180
- print(
1181
- f"[DEBUG Decorator] Processor instance '{processor_name}' created successfully."
1206
+ logger.debug(
1207
+ f"Decorator: Processor instance '{processor_name}' created successfully."
1182
1208
  )
1183
1209
  # Store original func for potential local invocation/testing? Keep for now.
1184
1210
  # TODO: Add original_func to Processor model definition if this is desired
1185
- # setattr(processor_instance, 'original_func', func) # Use setattr if not in model
1186
- try:
1187
- # This will fail if Processor hasn't been updated to include this field
1188
- processor_instance.original_func = func # type: ignore
1189
- except AttributeError:
1190
- print(
1191
- "Warning: Could not assign original_func to Processor instance. Update Processor model or remove assignment."
1192
- )
1211
+ # Commenting out as Processor model does not have this field
1212
+ # try:
1213
+ # # This will fail if Processor hasn't been updated to include this field
1214
+ # processor_instance.original_func = func # type: ignore
1215
+ # except AttributeError:
1216
+ # logger.warning(
1217
+ # "Could not assign original_func to Processor instance. Update Processor model or remove assignment."
1218
+ # )
1193
1219
 
1194
1220
  return processor_instance
1195
1221