nebu 0.1.12__py3-none-any.whl → 0.1.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nebu/__init__.py CHANGED
@@ -7,3 +7,6 @@ from .config import *
7
7
  from .containers.container import Container
8
8
  from .containers.models import *
9
9
  from .meta import *
10
+ from .processors.decorate import *
11
+ from .processors.models import *
12
+ from .processors.processor import *
@@ -94,6 +94,28 @@ try:
94
94
  local_namespace,
95
95
  )
96
96
  exec("T = TypeVar('T')", local_namespace)
97
+ exec("from nebu.processors.models import *", local_namespace)
98
+ exec("from nebu.processors.processor import *", local_namespace)
99
+
100
+ # Execute included object sources FIRST, as they might define types needed by others
101
+ print("[Consumer] Executing included object sources...")
102
+ for i, (obj_source, args_sources) in enumerate(included_object_sources):
103
+ try:
104
+ exec(obj_source, local_namespace)
105
+ print(f"[Consumer] Successfully executed included object {i} base source")
106
+ for j, arg_source in enumerate(args_sources):
107
+ try:
108
+ exec(arg_source, local_namespace)
109
+ print(
110
+ f"[Consumer] Successfully executed included object {i} arg {j} source"
111
+ )
112
+ except Exception as e:
113
+ print(f"Error executing included object {i} arg {j} source: {e}")
114
+ traceback.print_exc()
115
+ except Exception as e:
116
+ print(f"Error executing included object {i} base source: {e}")
117
+ traceback.print_exc()
118
+ print("[Consumer] Finished executing included object sources.")
97
119
 
98
120
  # First try to import the module to get any needed dependencies
99
121
  # This is a fallback in case the module is available
@@ -174,22 +196,6 @@ try:
174
196
  print(f"Error defining output model: {e}")
175
197
  traceback.print_exc()
176
198
 
177
- # Execute included object sources
178
- for i, (obj_source, args_sources) in enumerate(included_object_sources):
179
- try:
180
- exec(obj_source, local_namespace)
181
- print(f"Successfully executed included object {i} base source")
182
- for j, arg_source in enumerate(args_sources):
183
- try:
184
- exec(arg_source, local_namespace)
185
- print(f"Successfully executed included object {i} arg {j} source")
186
- except Exception as e:
187
- print(f"Error executing included object {i} arg {j} source: {e}")
188
- traceback.print_exc()
189
- except Exception as e:
190
- print(f"Error executing included object {i} base source: {e}")
191
- traceback.print_exc()
192
-
193
199
  # Finally, execute the function code
194
200
  try:
195
201
  exec(function_source, local_namespace)
@@ -1,6 +1,17 @@
1
1
  import inspect
2
+ import re # Import re for fallback check
2
3
  import textwrap
3
- from typing import Any, Callable, Dict, List, Optional, TypeVar, get_type_hints
4
+ from typing import (
5
+ Any,
6
+ Callable,
7
+ Dict,
8
+ List,
9
+ Optional,
10
+ TypeVar,
11
+ get_args,
12
+ get_origin,
13
+ get_type_hints,
14
+ )
4
15
 
5
16
  from pydantic import BaseModel
6
17
 
@@ -31,6 +42,7 @@ def get_model_source(model_class: Any) -> Optional[str]:
31
42
  source = inspect.getsource(model_class)
32
43
  return textwrap.dedent(source)
33
44
  except (IOError, TypeError):
45
+ print(f"[DEBUG get_model_source] Failed for: {model_class}") # Added debug
34
46
  return None
35
47
 
36
48
 
@@ -41,19 +53,28 @@ def get_type_source(type_obj: Any) -> Optional[Any]:
41
53
  return get_model_source(type_obj)
42
54
 
43
55
  # If it's a GenericAlias (like V1StreamMessage[SomeType])
44
- if hasattr(type_obj, "__origin__") and hasattr(type_obj, "__args__"):
45
- origin_source = get_model_source(type_obj.__origin__)
56
+ # Use get_origin and get_args for robustness
57
+ origin = get_origin(type_obj)
58
+ args = get_args(type_obj)
59
+
60
+ if origin is not None:
61
+ origin_source = get_model_source(origin)
46
62
  args_sources = []
47
63
 
48
64
  # Get sources for all type arguments
49
- for arg in type_obj.__args__:
65
+ for arg in args:
50
66
  arg_source = get_type_source(arg)
51
67
  if arg_source:
52
68
  args_sources.append(arg_source)
53
69
 
54
- return origin_source, args_sources
70
+ # Return tuple only if origin source and some arg sources were found
71
+ if origin_source or args_sources:
72
+ return (
73
+ origin_source,
74
+ args_sources,
75
+ ) # Return even if origin_source is None if args_sources exist
55
76
 
56
- return None
77
+ return None # Fallback if not a class or recognizable generic alias
57
78
 
58
79
 
59
80
  def processor(
@@ -98,117 +119,215 @@ def processor(
98
119
  include: Optional list of Python objects whose source code should be included
99
120
  """
100
121
 
101
- def decorator(func: Callable[[T], R]) -> Processor:
102
- # Validate that the function takes a single parameter that is a BaseModel
122
+ def decorator(
123
+ func: Callable[[Any], Any],
124
+ ) -> Processor: # Changed T/R to Any for broader compatibility
125
+ # Prepare environment variables early
126
+ all_env = env or []
127
+
128
+ # --- Process Included Objects First ---
129
+ included_sources: Dict[Any, Any] = {} # Store source keyed by the object itself
130
+ if include:
131
+ print(f"[DEBUG Decorator] Processing included objects: {include}")
132
+ for i, obj in enumerate(include):
133
+ # Directly use get_model_source as include expects types/classes usually
134
+ obj_source = get_model_source(obj)
135
+ if obj_source:
136
+ print(f"[DEBUG Decorator] Found source for included object: {obj}")
137
+ included_sources[obj] = obj_source # Store source by object
138
+ # Add to env vars immediately (simplifies later logic)
139
+ env_key = f"INCLUDED_OBJECT_{i}_SOURCE"
140
+ all_env.append(V1EnvVar(key=env_key, value=obj_source))
141
+ print(f"[DEBUG Decorator] Added {env_key} for {obj}")
142
+
143
+ else:
144
+ # Optionally raise an error or log a warning if source can't be found
145
+ print(
146
+ f"Warning: Could not retrieve source via get_model_source for included object: {obj}. Decorator might fail if this type is needed but cannot be auto-detected."
147
+ )
148
+ print(
149
+ f"[DEBUG Decorator] Finished processing included objects. Sources found: {len(included_sources)}"
150
+ )
151
+ # --- End Included Objects Processing ---
152
+
153
+ # Validate function signature
103
154
  sig = inspect.signature(func)
104
155
  params = list(sig.parameters.values())
105
156
 
106
157
  if len(params) != 1:
107
158
  raise TypeError(f"Function {func.__name__} must take exactly one parameter")
108
159
 
109
- # Check parameter type
110
- type_hints = get_type_hints(func)
160
+ # Check parameter type hint
161
+ try:
162
+ # Use eval_str=True for forward references if needed, requires Python 3.10+ globals/locals
163
+ type_hints = get_type_hints(
164
+ func, globalns=func.__globals__, localns=None
165
+ ) # Pass globals
166
+ except Exception as e:
167
+ print(
168
+ f"[DEBUG Decorator] Error getting type hints for {func.__name__}: {e}"
169
+ )
170
+ raise TypeError(
171
+ f"Could not evaluate type hints for {func.__name__}. Ensure all types are defined or imported."
172
+ ) from e
173
+
111
174
  param_name = params[0].name
112
175
  if param_name not in type_hints:
113
176
  raise TypeError(
114
177
  f"Parameter {param_name} in function {func.__name__} must have a type annotation"
115
178
  )
116
-
117
179
  param_type = type_hints[param_name]
118
180
 
119
- # Check if input type is V1StreamMessage or a subclass
181
+ # --- Determine Input Type, Content Type, and is_stream_message ---
182
+ print(f"[DEBUG Decorator] Full type_hints: {type_hints}")
183
+ print(f"[DEBUG Decorator] Detected param_type: {param_type}")
184
+ origin = get_origin(param_type)
185
+ args = get_args(param_type)
186
+ print(f"[DEBUG Decorator] Param type origin (using get_origin): {origin}")
187
+ print(f"[DEBUG Decorator] Param type args (using get_args): {args}")
188
+ if origin:
189
+ print(
190
+ f"[DEBUG Decorator] Origin name: {getattr(origin, '__name__', 'N/A')}, module: {getattr(origin, '__module__', 'N/A')}"
191
+ )
192
+ print(
193
+ f"[DEBUG Decorator] V1StreamMessage name: {V1StreamMessage.__name__}, module: {V1StreamMessage.__module__}"
194
+ )
195
+
120
196
  is_stream_message = False
121
197
  content_type = None
122
198
 
123
- # Handle generic V1StreamMessage
199
+ # Check 1: Standard check using get_origin
124
200
  if (
125
- hasattr(param_type, "__origin__")
126
- and param_type.__origin__ == V1StreamMessage
201
+ origin is not None
202
+ and origin.__name__ == V1StreamMessage.__name__
203
+ and origin.__module__ == V1StreamMessage.__module__
127
204
  ):
128
205
  is_stream_message = True
129
- # Extract the content type from V1StreamMessage[ContentType]
130
- if hasattr(param_type, "__args__") and param_type.__args__:
131
- content_type = param_type.__args__[0]
132
- # Handle direct V1StreamMessage
206
+ print("[DEBUG Decorator] V1StreamMessage detected via origin check.")
207
+ if args:
208
+ content_type = args[0]
209
+
210
+ # Check 2: Fallback check using string representation
211
+ elif origin is None:
212
+ type_str = str(param_type)
213
+ match = re.match(
214
+ r"<class 'nebu\.processors\.models\.V1StreamMessage\[(.*)\]\'>",
215
+ type_str,
216
+ )
217
+ if match:
218
+ print(
219
+ "[DEBUG Decorator] V1StreamMessage detected via string regex check (origin/args failed)."
220
+ )
221
+ content_type_name = match.group(1)
222
+ print(
223
+ f"[DEBUG Decorator] Manually parsed content_type name: {content_type_name}"
224
+ )
225
+ # Attempt to find the type
226
+ resolved_type = None
227
+ func_globals = func.__globals__
228
+ if content_type_name in func_globals:
229
+ resolved_type = func_globals[content_type_name]
230
+ print(
231
+ f"[DEBUG Decorator] Found content type '{content_type_name}' in function globals."
232
+ )
233
+ else:
234
+ func_module = inspect.getmodule(func)
235
+ if func_module and hasattr(func_module, content_type_name):
236
+ resolved_type = getattr(func_module, content_type_name)
237
+ print(
238
+ f"[DEBUG Decorator] Found content type '{content_type_name}' in function module."
239
+ )
240
+
241
+ if resolved_type:
242
+ content_type = resolved_type
243
+ is_stream_message = True # Set flag *only if* resolved
244
+ else:
245
+ print(
246
+ f"[DEBUG Decorator] Fallback failed: Could not find type '{content_type_name}' in globals or module. Use 'include'."
247
+ )
248
+ # else: Fallback regex did not match
249
+
250
+ # Check 3: Handle direct V1StreamMessage
133
251
  elif param_type is V1StreamMessage:
252
+ print("[DEBUG Decorator] V1StreamMessage detected via direct type check.")
134
253
  is_stream_message = True
254
+ # content_type remains None
135
255
 
136
- # Ensure the parameter is a BaseModel
137
- actual_type = (
138
- param_type.__origin__ if hasattr(param_type, "__origin__") else param_type # type: ignore
139
- )
140
- if not issubclass(actual_type, BaseModel):
141
- raise TypeError(
142
- f"Parameter {param_name} in function {func.__name__} must be a BaseModel"
256
+ print(f"[DEBUG Decorator] Final is_stream_message: {is_stream_message}")
257
+ print(f"[DEBUG Decorator] Final content_type: {content_type}")
258
+ # --- End Input Type Determination ---
259
+
260
+ # --- Validate Parameter Type is BaseModel ---
261
+ type_to_check_for_basemodel = None
262
+ if is_stream_message:
263
+ if content_type:
264
+ type_to_check_for_basemodel = content_type
265
+ # else: Base V1StreamMessage itself is a BaseModel, no need to check further
266
+ else:
267
+ type_to_check_for_basemodel = param_type
268
+
269
+ if type_to_check_for_basemodel:
270
+ actual_type_to_check = (
271
+ get_origin(type_to_check_for_basemodel) or type_to_check_for_basemodel
143
272
  )
273
+ if not issubclass(actual_type_to_check, BaseModel):
274
+ raise TypeError(
275
+ f"Parameter '{param_name}' effective type ({actual_type_to_check.__name__}) in function '{func.__name__}' must be a BaseModel subclass"
276
+ )
277
+ # --- End Parameter Validation ---
144
278
 
145
- # Check return type
279
+ # --- Validate Return Type ---
146
280
  if "return" not in type_hints:
147
281
  raise TypeError(
148
282
  f"Function {func.__name__} must have a return type annotation"
149
283
  )
150
-
151
284
  return_type = type_hints["return"]
152
- actual_return_type = (
153
- return_type.__origin__
154
- if hasattr(return_type, "__origin__")
155
- else return_type
156
- )
285
+ actual_return_type = get_origin(return_type) or return_type
157
286
  if not issubclass(actual_return_type, BaseModel):
158
287
  raise TypeError(
159
- f"Return value of function {func.__name__} must be a BaseModel"
288
+ f"Return value of function {func.__name__} must be a BaseModel subclass"
160
289
  )
290
+ # --- End Return Type Validation ---
161
291
 
162
- # Get function name to use as processor name
292
+ # --- Get Function Source ---
163
293
  processor_name = func.__name__
164
-
165
- # Prepare environment variables
166
- all_env = env or []
167
-
168
- # Get the source code of the function
169
294
  try:
170
295
  raw_function_source = inspect.getsource(func)
171
- print(
172
- f"[DEBUG Decorator] Raw source for {func.__name__}:\n{raw_function_source}"
173
- )
174
-
175
- # Clean up the indentation
176
- dedented_function_source = textwrap.dedent(raw_function_source)
177
- print(
178
- f"[DEBUG Decorator] Dedented source for {func.__name__}:\n{dedented_function_source}"
179
- )
180
-
181
- # Find the start of the function definition ('def')
182
- # Skip lines starting with '@' or empty lines until 'def' is found
183
- lines = dedented_function_source.splitlines()
296
+ # ... (rest of source processing remains the same) ...
297
+ lines = raw_function_source.splitlines()
184
298
  func_def_index = -1
299
+ decorator_lines = 0
300
+ in_decorator = False
185
301
  for i, line in enumerate(lines):
186
302
  stripped_line = line.strip()
303
+ if stripped_line.startswith("@"):
304
+ in_decorator = True
305
+ decorator_lines += 1
306
+ continue # Skip decorator line
307
+ if in_decorator and stripped_line.endswith(
308
+ ")"
309
+ ): # Simple check for end of decorator args
310
+ in_decorator = False
311
+ decorator_lines += 1
312
+ continue
313
+ if in_decorator:
314
+ decorator_lines += 1
315
+ continue # Skip multi-line decorator args
316
+
187
317
  if stripped_line.startswith("def "):
188
318
  func_def_index = i
189
319
  break
190
- elif stripped_line.startswith("@") or not stripped_line:
191
- # Skip decorator lines and empty lines before 'def'
192
- continue
193
- else:
194
- # Found something unexpected before 'def'
195
- raise ValueError(
196
- f"Unexpected content found before 'def' in source for {func.__name__}. Cannot reliably strip decorators."
197
- )
198
320
 
199
321
  if func_def_index != -1:
200
322
  # Keep lines from the 'def' line onwards
201
- function_source = "\n".join(
202
- lines[func_def_index:]
203
- ) # Use \n for env var
323
+ function_source = "\n".join(lines[func_def_index:])
204
324
  else:
205
- # If 'def' wasn't found (shouldn't happen with valid function source)
206
325
  raise ValueError(
207
326
  f"Could not find function definition 'def' in source for {func.__name__}"
208
327
  )
209
328
 
210
329
  print(
211
- f"[DEBUG Decorator] Processed function source for {func.__name__}:\n{function_source}"
330
+ f"[DEBUG Decorator] Processed function source for {func.__name__}:\n{function_source[:200]}..."
212
331
  )
213
332
 
214
333
  except (IOError, TypeError) as e:
@@ -216,161 +335,135 @@ def processor(
216
335
  raise ValueError(
217
336
  f"Could not retrieve source code for function {func.__name__}: {e}"
218
337
  ) from e
338
+ # --- End Function Source ---
219
339
 
220
- # Get source code for the models
340
+ # --- Get Model Sources (Prioritizing Included) ---
221
341
  input_model_source = None
222
342
  output_model_source = None
223
343
  content_type_source = None
224
-
225
- # Get the V1StreamMessage class source
226
- stream_message_source = get_model_source(V1StreamMessage)
227
-
228
- # Get input model source
229
- if is_stream_message:
230
- input_model_source = stream_message_source
231
- if content_type:
232
- content_type_source = get_type_source(content_type)
233
- else:
234
- input_model_source = get_type_source(param_type)
235
-
236
- # Get output model source
237
- output_model_source = get_type_source(return_type)
238
-
239
- # Add function source code to environment variables
240
- print(
241
- f"[DEBUG Decorator] Setting FUNCTION_SOURCE: {function_source[:100]}..."
242
- ) # Print first 100 chars
243
- all_env.append(V1EnvVar(key="FUNCTION_SOURCE", value=function_source))
244
- print(f"[DEBUG Decorator] Setting FUNCTION_NAME: {func.__name__}")
245
- all_env.append(V1EnvVar(key="FUNCTION_NAME", value=func.__name__))
246
-
247
- # Add model source codes
248
- if input_model_source:
249
- if isinstance(input_model_source, tuple):
250
- all_env.append(
251
- V1EnvVar(key="INPUT_MODEL_SOURCE", value=input_model_source[0])
344
+ stream_message_source = get_model_source(V1StreamMessage) # Still get this
345
+
346
+ # Get content_type source (if applicable)
347
+ if is_stream_message and content_type:
348
+ if content_type in included_sources:
349
+ content_type_source = included_sources[content_type]
350
+ print(
351
+ f"[DEBUG Decorator] Using included source for content_type: {content_type}"
252
352
  )
253
- # Add generic args sources
254
- for i, arg_source in enumerate(input_model_source[1]):
255
- all_env.append(
256
- V1EnvVar(key=f"INPUT_MODEL_ARG_{i}_SOURCE", value=arg_source)
257
- )
258
353
  else:
259
- all_env.append(
260
- V1EnvVar(key="INPUT_MODEL_SOURCE", value=input_model_source)
354
+ print(
355
+ f"[DEBUG Decorator] Attempting get_type_source for content_type: {content_type}"
261
356
  )
262
-
263
- if output_model_source:
264
- if isinstance(output_model_source, tuple):
265
- all_env.append(
266
- V1EnvVar(key="OUTPUT_MODEL_SOURCE", value=output_model_source[0])
267
- )
268
- # Add generic args sources
269
- for i, arg_source in enumerate(output_model_source[1]):
270
- all_env.append(
271
- V1EnvVar(key=f"OUTPUT_MODEL_ARG_{i}_SOURCE", value=arg_source)
357
+ content_type_source = get_type_source(content_type)
358
+ if content_type_source is None:
359
+ print(
360
+ f"[DEBUG Decorator] Warning: get_type_source failed for content_type: {content_type}. Consumer might fail if not included."
272
361
  )
273
- else:
274
- all_env.append(
275
- V1EnvVar(key="OUTPUT_MODEL_SOURCE", value=output_model_source)
276
- )
277
362
 
278
- if stream_message_source:
279
- all_env.append(
280
- V1EnvVar(key="STREAM_MESSAGE_SOURCE", value=stream_message_source)
363
+ print(
364
+ f"[DEBUG Decorator] Final content_type_source: {str(content_type_source)[:100]}..."
281
365
  )
282
366
 
283
- if content_type_source:
284
- if isinstance(content_type_source, tuple):
285
- all_env.append(
286
- V1EnvVar(key="CONTENT_TYPE_SOURCE", value=content_type_source[0])
367
+ # Get input_model source (which is V1StreamMessage if is_stream_message)
368
+ if is_stream_message:
369
+ input_model_source = (
370
+ stream_message_source # Always use base stream message source
371
+ )
372
+ elif (
373
+ param_type in included_sources
374
+ ): # Check if non-stream-message input type was included
375
+ input_model_source = included_sources[param_type]
376
+ print(
377
+ f"[DEBUG Decorator] Using included source for param_type: {param_type}"
378
+ )
379
+ else: # Fallback for non-stream-message, non-included input type
380
+ print(
381
+ f"[DEBUG Decorator] Attempting get_type_source for param_type: {param_type}"
382
+ )
383
+ input_model_source = get_type_source(param_type)
384
+ if input_model_source is None:
385
+ print(
386
+ f"[DEBUG Decorator] Warning: get_type_source failed for param_type: {param_type}. Consumer might fail if not included."
287
387
  )
288
- # Add generic args sources for content type
289
- for i, arg_source in enumerate(content_type_source[1]):
290
- all_env.append(
291
- V1EnvVar(key=f"CONTENT_TYPE_ARG_{i}_SOURCE", value=arg_source)
292
- )
293
- else:
294
- all_env.append(
295
- V1EnvVar(key="CONTENT_TYPE_SOURCE", value=content_type_source)
388
+ print(
389
+ f"[DEBUG Decorator] Final input_model_source: {str(input_model_source)[:100]}..."
390
+ )
391
+
392
+ # Get output_model source
393
+ if return_type in included_sources:
394
+ output_model_source = included_sources[return_type]
395
+ print(
396
+ f"[DEBUG Decorator] Using included source for return_type: {return_type}"
397
+ )
398
+ else:
399
+ print(
400
+ f"[DEBUG Decorator] Attempting get_type_source for return_type: {return_type}"
401
+ )
402
+ output_model_source = get_type_source(return_type)
403
+ if output_model_source is None:
404
+ print(
405
+ f"[DEBUG Decorator] Warning: get_type_source failed for return_type: {return_type}. Consumer might fail if not included."
296
406
  )
407
+ print(
408
+ f"[DEBUG Decorator] Final output_model_source: {str(output_model_source)[:100]}..."
409
+ )
410
+ # --- End Model Sources ---
297
411
 
298
- # Add included object sources
299
- if include:
300
- for i, obj in enumerate(include):
301
- obj_source = get_type_source(
302
- obj
303
- ) # Reuse existing function for source retrieval
304
- if obj_source:
305
- if isinstance(obj_source, tuple):
306
- # Handle complex types (like generics) if needed, similar to models
412
+ # --- Populate Environment Variables ---
413
+ print("[DEBUG Decorator] Populating environment variables...")
414
+ all_env.append(V1EnvVar(key="FUNCTION_SOURCE", value=function_source))
415
+ all_env.append(V1EnvVar(key="FUNCTION_NAME", value=func.__name__))
416
+
417
+ # Add model source codes (handle tuples from get_type_source if necessary, although unlikely with prioritization)
418
+ def add_source_to_env(key_base: str, source: Any):
419
+ if source:
420
+ if isinstance(source, tuple):
421
+ # This path is less likely now with include prioritization
422
+ if source[0]: # Origin source
307
423
  all_env.append(
308
- V1EnvVar(
309
- key=f"INCLUDED_OBJECT_{i}_SOURCE", value=obj_source[0]
310
- )
424
+ V1EnvVar(key=f"{key_base}_SOURCE", value=source[0])
311
425
  )
312
- for j, arg_source in enumerate(obj_source[1]):
313
- all_env.append(
314
- V1EnvVar(
315
- key=f"INCLUDED_OBJECT_{i}_ARG_{j}_SOURCE",
316
- value=arg_source,
317
- )
318
- )
319
- else:
426
+ for i, arg_source in enumerate(source[1]): # Arg sources
320
427
  all_env.append(
321
- V1EnvVar(
322
- key=f"INCLUDED_OBJECT_{i}_SOURCE", value=obj_source
323
- )
428
+ V1EnvVar(key=f"{key_base}_ARG_{i}_SOURCE", value=arg_source)
324
429
  )
325
- else:
326
- # Optionally raise an error or log a warning if source can't be found
327
- print(
328
- f"Warning: Could not retrieve source for included object: {obj}"
329
- )
430
+ else: # Simple string source
431
+ all_env.append(V1EnvVar(key=f"{key_base}_SOURCE", value=source))
432
+
433
+ add_source_to_env("INPUT_MODEL", input_model_source)
434
+ add_source_to_env("OUTPUT_MODEL", output_model_source)
435
+ add_source_to_env("CONTENT_TYPE", content_type_source)
436
+ add_source_to_env(
437
+ "STREAM_MESSAGE", stream_message_source
438
+ ) # Add base stream message source
330
439
 
331
- # Add parameter and return type info for runtime validation
440
+ # Type names for consumer validation/parsing
332
441
  all_env.append(
333
442
  V1EnvVar(
334
- key="PARAM_TYPE_NAME",
335
- value=param_type.__name__
336
- if hasattr(param_type, "__name__")
337
- else str(param_type),
338
- )
443
+ key="PARAM_TYPE_STR", value=str(param_type)
444
+ ) # Send string representation
339
445
  )
340
446
  all_env.append(
341
447
  V1EnvVar(
342
- key="RETURN_TYPE_NAME",
343
- value=return_type.__name__
344
- if hasattr(return_type, "__name__")
345
- else str(return_type),
346
- )
448
+ key="RETURN_TYPE_STR", value=str(return_type)
449
+ ) # Send string representation
347
450
  )
348
451
  all_env.append(V1EnvVar(key="IS_STREAM_MESSAGE", value=str(is_stream_message)))
349
-
350
452
  if content_type:
351
453
  all_env.append(
352
- V1EnvVar(
353
- key="CONTENT_TYPE_NAME",
354
- value=content_type.__name__
355
- if hasattr(content_type, "__name__")
356
- else str(content_type),
357
- )
454
+ V1EnvVar(key="CONTENT_TYPE_NAME", value=content_type.__name__)
358
455
  )
359
456
 
360
- # We still add the module for reference, but we won't rely on importing it
361
457
  all_env.append(V1EnvVar(key="MODULE_NAME", value=func.__module__))
458
+ # --- End Environment Variables ---
362
459
 
363
- # Prepare metadata
460
+ # --- Final Setup ---
364
461
  metadata = V1ResourceMetaRequest(
365
462
  name=processor_name, namespace=namespace, labels=labels
366
463
  )
367
-
368
- # Create the command to run the consumer directly
369
464
  consumer_command = f"{python_cmd} -m nebu.processors.consumer"
465
+ final_command = f"{python_cmd} -m pip install redis nebu\n\n{setup_script or ''}\n\n{consumer_command}"
370
466
 
371
- final_command = f"{python_cmd} -m pip install redis nebu\n\n{setup_script}\n\n{consumer_command}"
372
-
373
- # Create the V1ContainerRequest
374
467
  container_request = V1ContainerRequest(
375
468
  image=image,
376
469
  command=final_command,
@@ -384,16 +477,19 @@ def processor(
384
477
  platform=platform,
385
478
  metadata=metadata,
386
479
  )
387
- print("container_request", container_request)
480
+ print("[DEBUG Decorator] Final Container Request Env Vars:")
481
+ for env_var in all_env:
482
+ print(
483
+ f"[DEBUG Decorator] {env_var.key}: {str(env_var.value)[:70]}..."
484
+ ) # Print key and start of value
388
485
 
389
- # Create the processor instance
390
486
  processor_instance = Processor(
391
487
  name=processor_name,
392
- stream=processor_name,
488
+ stream=processor_name, # Default stream name to processor name
393
489
  namespace=namespace,
394
490
  labels=labels,
395
491
  container=container_request,
396
- schema_=None, # TODO
492
+ schema_=None,
397
493
  common_schema=None,
398
494
  min_replicas=min_replicas,
399
495
  max_replicas=max_replicas,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nebu
3
- Version: 0.1.12
3
+ Version: 0.1.16
4
4
  Summary: A globally distributed container runtime
5
5
  Requires-Python: >=3.10.14
6
6
  Description-Content-Type: text/markdown
@@ -1,4 +1,4 @@
1
- nebu/__init__.py,sha256=EbdC8ZKnRTt6jkX0WN0p1pnaDEzb2InqZ1r8QZWzph0,195
1
+ nebu/__init__.py,sha256=Frz4LWVslmGeEE_ZjCIV5riSw8RC4ZJ-gb4iQPKCOSM,299
2
2
  nebu/auth.py,sha256=rApCd-7_c3GpIb7gjCB79rR7SOcmkG7MmaTE6zMbvr0,1125
3
3
  nebu/config.py,sha256=XBY7uKgcJX9d1HGxqqpx87o_9DuF3maUlUnKkcpUrKU,4565
4
4
  nebu/meta.py,sha256=CzFHMND9seuewzq9zNNx9WTr6JvrCBExe7BLqDSr7lM,745
@@ -6,15 +6,15 @@ nebu/containers/container.py,sha256=yb7KaPTVXnEEAlrpdlUi4HNqF6P7z9bmwAILGlq6iqU,
6
6
  nebu/containers/decorator.py,sha256=qiM7hbHne9MhSp1gDgX5z5bimsXr_YPjTIZoe09dwr4,2741
7
7
  nebu/containers/models.py,sha256=0j6NGy4yto-enRDh_4JH_ZTbHrLdSpuMOqNQPnIrwC4,6815
8
8
  nebu/containers/server.py,sha256=yFa2Y9PzBn59E1HftKiv0iapPonli2rbGAiU6r-wwe0,2513
9
- nebu/processors/consumer.py,sha256=rFqd6gg2OYgXi3gf11GFpuaOOzuK1TYaPO-t_leSR8Y,15097
10
- nebu/processors/decorate.py,sha256=m5zDs7wxHLS0drgTwXUQTiUEFml1qoYDuVY8VMsgHQs,15218
9
+ nebu/processors/consumer.py,sha256=LsMfh-Ai1goxolQiw913Tj9bYLR0Ji3OXp3LGFFoiQ8,15480
10
+ nebu/processors/decorate.py,sha256=NwuV0uY1weAM472VntfyYKd0tTsqtFCPrNGhF4vJP6I,20492
11
11
  nebu/processors/default.py,sha256=W4slJenG59rvyTlJ7gRp58eFfXcNOTT2Hfi6zzJAobI,365
12
12
  nebu/processors/models.py,sha256=GvnI8UJrQSjHo2snP07cPfisCH90cEGTY-PZV5_AtXI,3654
13
13
  nebu/processors/processor.py,sha256=oy2YdI-cy6qQWxrZhpZahJV46oWZlu_Im-jm811R_oo,9667
14
14
  nebu/redis/models.py,sha256=coPovAcVXnOU1Xh_fpJL4PO3QctgK9nBe5QYoqEcnxg,1230
15
15
  nebu/services/service.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
- nebu-0.1.12.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
17
- nebu-0.1.12.dist-info/METADATA,sha256=QEN1FBqo3AsqbtLdcOnu7wqiYf6Las8cI1dRrL8V7oY,1588
18
- nebu-0.1.12.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
19
- nebu-0.1.12.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
20
- nebu-0.1.12.dist-info/RECORD,,
16
+ nebu-0.1.16.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
17
+ nebu-0.1.16.dist-info/METADATA,sha256=TN23ux4MnSCtVx4I23hiai9S-ROhFmza8-qCAw0f9Pw,1588
18
+ nebu-0.1.16.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
19
+ nebu-0.1.16.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
20
+ nebu-0.1.16.dist-info/RECORD,,
File without changes