nebu 0.1.14__py3-none-any.whl → 0.1.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nebu/containers/decorator.py +11 -3
- nebu/processors/consumer.py +36 -19
- nebu/processors/decorate.py +664 -242
- {nebu-0.1.14.dist-info → nebu-0.1.17.dist-info}/METADATA +4 -1
- {nebu-0.1.14.dist-info → nebu-0.1.17.dist-info}/RECORD +8 -8
- {nebu-0.1.14.dist-info → nebu-0.1.17.dist-info}/WHEEL +0 -0
- {nebu-0.1.14.dist-info → nebu-0.1.17.dist-info}/licenses/LICENSE +0 -0
- {nebu-0.1.14.dist-info → nebu-0.1.17.dist-info}/top_level.txt +0 -0
nebu/containers/decorator.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
import base64
|
2
|
-
import inspect
|
3
2
|
import pickle
|
4
3
|
import time
|
5
4
|
from typing import Any, Callable, List, Optional
|
6
5
|
|
6
|
+
import dill # Import dill
|
7
7
|
import requests
|
8
8
|
|
9
9
|
from nebu.containers.container import Container
|
@@ -47,8 +47,16 @@ def container(
|
|
47
47
|
)
|
48
48
|
time.sleep(1)
|
49
49
|
|
50
|
-
# Get function source code
|
51
|
-
|
50
|
+
# Get function source code using dill
|
51
|
+
try:
|
52
|
+
func_code = dill.source.getsource(func)
|
53
|
+
except (OSError, TypeError) as e:
|
54
|
+
raise RuntimeError(
|
55
|
+
f"Failed to retrieve source code for function '{func.__name__}'. "
|
56
|
+
"This can happen with functions defined dynamically or interactively "
|
57
|
+
"(e.g., in a Jupyter notebook or REPL). Ensure the function is defined "
|
58
|
+
f"in a standard Python module if possible. Original error: {e}"
|
59
|
+
)
|
52
60
|
|
53
61
|
# Serialize arguments using pickle for complex objects
|
54
62
|
serialized_args = base64.b64encode(pickle.dumps(args)).decode("utf-8")
|
nebu/processors/consumer.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
#!/usr/bin/env python3
|
2
2
|
import json
|
3
3
|
import os
|
4
|
+
import socket
|
4
5
|
import sys
|
5
6
|
import time
|
6
7
|
import traceback
|
@@ -8,6 +9,7 @@ from datetime import datetime
|
|
8
9
|
from typing import Dict, TypeVar
|
9
10
|
|
10
11
|
import redis
|
12
|
+
import socks
|
11
13
|
|
12
14
|
# Define TypeVar for generic models
|
13
15
|
T = TypeVar("T")
|
@@ -97,6 +99,26 @@ try:
|
|
97
99
|
exec("from nebu.processors.models import *", local_namespace)
|
98
100
|
exec("from nebu.processors.processor import *", local_namespace)
|
99
101
|
|
102
|
+
# Execute included object sources FIRST, as they might define types needed by others
|
103
|
+
print("[Consumer] Executing included object sources...")
|
104
|
+
for i, (obj_source, args_sources) in enumerate(included_object_sources):
|
105
|
+
try:
|
106
|
+
exec(obj_source, local_namespace)
|
107
|
+
print(f"[Consumer] Successfully executed included object {i} base source")
|
108
|
+
for j, arg_source in enumerate(args_sources):
|
109
|
+
try:
|
110
|
+
exec(arg_source, local_namespace)
|
111
|
+
print(
|
112
|
+
f"[Consumer] Successfully executed included object {i} arg {j} source"
|
113
|
+
)
|
114
|
+
except Exception as e:
|
115
|
+
print(f"Error executing included object {i} arg {j} source: {e}")
|
116
|
+
traceback.print_exc()
|
117
|
+
except Exception as e:
|
118
|
+
print(f"Error executing included object {i} base source: {e}")
|
119
|
+
traceback.print_exc()
|
120
|
+
print("[Consumer] Finished executing included object sources.")
|
121
|
+
|
100
122
|
# First try to import the module to get any needed dependencies
|
101
123
|
# This is a fallback in case the module is available
|
102
124
|
module_name = os.environ.get("MODULE_NAME")
|
@@ -176,22 +198,6 @@ try:
|
|
176
198
|
print(f"Error defining output model: {e}")
|
177
199
|
traceback.print_exc()
|
178
200
|
|
179
|
-
# Execute included object sources
|
180
|
-
for i, (obj_source, args_sources) in enumerate(included_object_sources):
|
181
|
-
try:
|
182
|
-
exec(obj_source, local_namespace)
|
183
|
-
print(f"Successfully executed included object {i} base source")
|
184
|
-
for j, arg_source in enumerate(args_sources):
|
185
|
-
try:
|
186
|
-
exec(arg_source, local_namespace)
|
187
|
-
print(f"Successfully executed included object {i} arg {j} source")
|
188
|
-
except Exception as e:
|
189
|
-
print(f"Error executing included object {i} arg {j} source: {e}")
|
190
|
-
traceback.print_exc()
|
191
|
-
except Exception as e:
|
192
|
-
print(f"Error executing included object {i} base source: {e}")
|
193
|
-
traceback.print_exc()
|
194
|
-
|
195
201
|
# Finally, execute the function code
|
196
202
|
try:
|
197
203
|
exec(function_source, local_namespace)
|
@@ -216,13 +222,24 @@ if not all([REDIS_URL, REDIS_CONSUMER_GROUP, REDIS_STREAM]):
|
|
216
222
|
print("Missing required Redis environment variables")
|
217
223
|
sys.exit(1)
|
218
224
|
|
225
|
+
# Configure SOCKS proxy before connecting to Redis
|
226
|
+
# Use the proxy settings provided by tailscaled
|
227
|
+
socks.set_default_proxy(socks.SOCKS5, "localhost", 1055)
|
228
|
+
socket.socket = socks.socksocket
|
229
|
+
print("Configured SOCKS5 proxy for socket connections via localhost:1055")
|
230
|
+
|
219
231
|
# Connect to Redis
|
220
232
|
try:
|
221
|
-
|
233
|
+
# Parse the Redis URL to handle potential credentials or specific DBs if needed
|
234
|
+
# Although from_url should work now with the patched socket
|
235
|
+
r = redis.from_url(
|
236
|
+
REDIS_URL, decode_responses=True
|
237
|
+
) # Added decode_responses for convenience
|
238
|
+
r.ping() # Test connection
|
222
239
|
redis_info = REDIS_URL.split("@")[-1] if "@" in REDIS_URL else REDIS_URL
|
223
|
-
print(f"Connected to Redis at {redis_info}")
|
240
|
+
print(f"Connected to Redis via SOCKS proxy at {redis_info}")
|
224
241
|
except Exception as e:
|
225
|
-
print(f"Failed to connect to Redis: {e}")
|
242
|
+
print(f"Failed to connect to Redis via SOCKS proxy: {e}")
|
226
243
|
traceback.print_exc()
|
227
244
|
sys.exit(1)
|
228
245
|
|
nebu/processors/decorate.py
CHANGED
@@ -1,7 +1,20 @@
|
|
1
|
+
import ast # For parsing notebook code
|
1
2
|
import inspect
|
3
|
+
import re # Import re for fallback check
|
2
4
|
import textwrap
|
3
|
-
from typing import
|
5
|
+
from typing import (
|
6
|
+
Any,
|
7
|
+
Callable,
|
8
|
+
Dict,
|
9
|
+
List,
|
10
|
+
Optional,
|
11
|
+
TypeVar,
|
12
|
+
get_args,
|
13
|
+
get_origin,
|
14
|
+
get_type_hints,
|
15
|
+
)
|
4
16
|
|
17
|
+
import dill # Add dill import
|
5
18
|
from pydantic import BaseModel
|
6
19
|
|
7
20
|
from nebu.containers.models import (
|
@@ -24,35 +37,300 @@ from .default import DEFAULT_MAX_REPLICAS, DEFAULT_MIN_REPLICAS, DEFAULT_SCALE
|
|
24
37
|
T = TypeVar("T", bound=BaseModel)
|
25
38
|
R = TypeVar("R", bound=BaseModel)
|
26
39
|
|
40
|
+
# Attribute name for explicitly stored source
|
41
|
+
_NEBU_EXPLICIT_SOURCE_ATTR = "_nebu_explicit_source"
|
42
|
+
|
43
|
+
# --- Jupyter Helper Functions ---
|
44
|
+
|
45
|
+
|
46
|
+
def is_jupyter_notebook():
|
47
|
+
"""
|
48
|
+
Determine if the current code is running inside a Jupyter notebook.
|
49
|
+
Returns bool: True if running inside a Jupyter notebook, False otherwise.
|
50
|
+
"""
|
51
|
+
print("[DEBUG Helper] Checking if running in Jupyter...")
|
52
|
+
try:
|
53
|
+
import IPython
|
54
|
+
|
55
|
+
ip = IPython.get_ipython()
|
56
|
+
if ip is None:
|
57
|
+
print("[DEBUG Helper] is_jupyter_notebook: No IPython instance found.")
|
58
|
+
return False
|
59
|
+
class_name = str(ip.__class__)
|
60
|
+
print(f"[DEBUG Helper] is_jupyter_notebook: IPython class name: {class_name}")
|
61
|
+
if "ZMQInteractiveShell" in class_name:
|
62
|
+
print(
|
63
|
+
"[DEBUG Helper] is_jupyter_notebook: Jupyter detected (ZMQInteractiveShell)."
|
64
|
+
)
|
65
|
+
return True
|
66
|
+
print(
|
67
|
+
"[DEBUG Helper] is_jupyter_notebook: Not Jupyter (IPython instance found, but not ZMQInteractiveShell)."
|
68
|
+
)
|
69
|
+
return False
|
70
|
+
except Exception as e:
|
71
|
+
print(f"[DEBUG Helper] is_jupyter_notebook: Exception occurred: {e}")
|
72
|
+
return False
|
73
|
+
|
74
|
+
|
75
|
+
def get_notebook_executed_code():
|
76
|
+
"""
|
77
|
+
Returns all executed code from the current notebook session.
|
78
|
+
Returns str or None: All executed code as a string, or None if not possible.
|
79
|
+
"""
|
80
|
+
print("[DEBUG Helper] Attempting to get notebook execution history...")
|
81
|
+
try:
|
82
|
+
import IPython
|
83
|
+
|
84
|
+
ip = IPython.get_ipython()
|
85
|
+
if ip is None or not hasattr(ip, "history_manager"):
|
86
|
+
print(
|
87
|
+
"[DEBUG Helper] get_notebook_executed_code: No IPython instance or history_manager."
|
88
|
+
)
|
89
|
+
return None
|
90
|
+
history_manager = ip.history_manager
|
91
|
+
# Limiting history range for debugging? Maybe get_tail(N)? For now, get all.
|
92
|
+
# history = history_manager.get_range(start=1) # type: ignore
|
93
|
+
history = list(history_manager.get_range(start=1)) # type: ignore # Convert to list to get length
|
94
|
+
print(
|
95
|
+
f"[DEBUG Helper] get_notebook_executed_code: Retrieved {len(history)} history entries."
|
96
|
+
)
|
97
|
+
source_code = ""
|
98
|
+
separator = "\n#<NEBU_CELL_SEP>#\n"
|
99
|
+
for _, _, content in history: # Use _ for unused session, lineno
|
100
|
+
if isinstance(content, str) and content.strip():
|
101
|
+
source_code += content + separator
|
102
|
+
print(
|
103
|
+
f"[DEBUG Helper] get_notebook_executed_code: Total history source length: {len(source_code)}"
|
104
|
+
)
|
105
|
+
return source_code
|
106
|
+
except Exception as e:
|
107
|
+
print(f"[DEBUG Helper] get_notebook_executed_code: Error getting history: {e}")
|
108
|
+
return None
|
109
|
+
|
110
|
+
|
111
|
+
def extract_definition_source_from_string(
|
112
|
+
source_string: str, def_name: str, def_type: type = ast.FunctionDef
|
113
|
+
) -> Optional[str]:
|
114
|
+
"""
|
115
|
+
Attempts to extract the source code of a function or class from a larger string
|
116
|
+
(like notebook history). Finds the *last* complete definition.
|
117
|
+
Uses AST parsing for robustness.
|
118
|
+
def_type can be ast.FunctionDef or ast.ClassDef.
|
119
|
+
"""
|
120
|
+
print(
|
121
|
+
f"[DEBUG Helper] Extracting '{def_name}' ({def_type.__name__}) from history string (len: {len(source_string)})..."
|
122
|
+
)
|
123
|
+
if not source_string or not def_name:
|
124
|
+
print("[DEBUG Helper] extract: Empty source string or def_name.")
|
125
|
+
return None
|
126
|
+
|
127
|
+
cells = source_string.split("#<NEBU_CELL_SEP>#")
|
128
|
+
print(f"[DEBUG Helper] extract: Split history into {len(cells)} potential cells.")
|
129
|
+
last_found_source = None
|
130
|
+
|
131
|
+
for i, cell in enumerate(reversed(cells)):
|
132
|
+
cell_num = len(cells) - 1 - i
|
133
|
+
cell = cell.strip()
|
134
|
+
if not cell:
|
135
|
+
continue
|
136
|
+
# print(f"[DEBUG Helper] extract: Analyzing cell #{cell_num}...") # Can be very verbose
|
137
|
+
try:
|
138
|
+
tree = ast.parse(cell)
|
139
|
+
found_in_cell = False
|
140
|
+
for node in ast.walk(tree):
|
141
|
+
if (
|
142
|
+
isinstance(node, def_type)
|
143
|
+
and hasattr(node, "name")
|
144
|
+
and node.name == def_name
|
145
|
+
):
|
146
|
+
print(
|
147
|
+
f"[DEBUG Helper] extract: Found node for '{def_name}' in cell #{cell_num}."
|
148
|
+
)
|
149
|
+
try:
|
150
|
+
# Use ast.get_source_segment for accurate extraction (Python 3.8+)
|
151
|
+
func_source = ast.get_source_segment(cell, node)
|
152
|
+
if func_source:
|
153
|
+
print(
|
154
|
+
f"[DEBUG Helper] extract: Successfully extracted source using get_source_segment for '{def_name}'."
|
155
|
+
)
|
156
|
+
last_found_source = func_source
|
157
|
+
found_in_cell = True
|
158
|
+
break # Stop searching this cell
|
159
|
+
except AttributeError: # Fallback for Python < 3.8
|
160
|
+
print(
|
161
|
+
f"[DEBUG Helper] extract: get_source_segment failed (likely Py < 3.8), using fallback for '{def_name}'."
|
162
|
+
)
|
163
|
+
start_lineno = getattr(node, "lineno", 1) - 1
|
164
|
+
end_lineno = getattr(node, "end_lineno", start_lineno + 1)
|
165
|
+
|
166
|
+
if hasattr(node, "decorator_list") and node.decorator_list:
|
167
|
+
first_decorator_start_line = (
|
168
|
+
getattr(
|
169
|
+
node.decorator_list[0], "lineno", start_lineno + 1
|
170
|
+
)
|
171
|
+
- 1
|
172
|
+
) # type: ignore
|
173
|
+
start_lineno = min(start_lineno, first_decorator_start_line)
|
174
|
+
|
175
|
+
lines = cell.splitlines()
|
176
|
+
if 0 <= start_lineno < len(lines) and end_lineno <= len(lines):
|
177
|
+
extracted_lines = lines[start_lineno:end_lineno]
|
178
|
+
if extracted_lines and (
|
179
|
+
extracted_lines[0].strip().startswith("@")
|
180
|
+
or extracted_lines[0]
|
181
|
+
.strip()
|
182
|
+
.startswith(("def ", "class "))
|
183
|
+
):
|
184
|
+
last_found_source = "\n".join(extracted_lines)
|
185
|
+
print(
|
186
|
+
f"[DEBUG Helper] extract: Extracted source via fallback for '{def_name}'."
|
187
|
+
)
|
188
|
+
found_in_cell = True
|
189
|
+
break
|
190
|
+
else:
|
191
|
+
print(
|
192
|
+
f"[DEBUG Helper] extract: Warning: Line numbers out of bounds for {def_name} in cell (fallback)."
|
193
|
+
)
|
194
|
+
|
195
|
+
if found_in_cell:
|
196
|
+
print(
|
197
|
+
f"[DEBUG Helper] extract: Found and returning source for '{def_name}' from cell #{cell_num}."
|
198
|
+
)
|
199
|
+
return last_found_source # Found last definition, return immediately
|
200
|
+
|
201
|
+
except (SyntaxError, ValueError) as e:
|
202
|
+
# print(f"[DEBUG Helper] extract: Skipping cell #{cell_num} due to parse error: {e}") # Can be verbose
|
203
|
+
continue
|
204
|
+
except Exception as e:
|
205
|
+
print(
|
206
|
+
f"[DEBUG Helper] extract: Warning: AST processing error for {def_name} in cell #{cell_num}: {e}"
|
207
|
+
)
|
208
|
+
continue
|
209
|
+
|
210
|
+
if not last_found_source:
|
211
|
+
print(
|
212
|
+
f"[DEBUG Helper] extract: Definition '{def_name}' of type {def_type.__name__} not found in history search."
|
213
|
+
)
|
214
|
+
return last_found_source
|
215
|
+
|
216
|
+
|
217
|
+
# --- End Jupyter Helper Functions ---
|
218
|
+
|
219
|
+
|
220
|
+
def include(obj: Any) -> Any:
|
221
|
+
"""
|
222
|
+
Decorator to explicitly capture the source code of a function or class,
|
223
|
+
intended for use in environments where inspect/dill might fail (e.g., Jupyter).
|
224
|
+
"""
|
225
|
+
try:
|
226
|
+
source = dill.source.getsource(obj)
|
227
|
+
dedented_source = textwrap.dedent(source)
|
228
|
+
setattr(obj, _NEBU_EXPLICIT_SOURCE_ATTR, dedented_source)
|
229
|
+
print(
|
230
|
+
f"[DEBUG @include] Successfully captured source for: {getattr(obj, '__name__', str(obj))}"
|
231
|
+
)
|
232
|
+
except Exception as e:
|
233
|
+
# Don't fail the definition, just warn
|
234
|
+
print(
|
235
|
+
f"Warning: @include could not capture source for {getattr(obj, '__name__', str(obj))}: {e}. Automatic source retrieval will be attempted later."
|
236
|
+
)
|
237
|
+
return obj
|
238
|
+
|
239
|
+
|
240
|
+
def get_model_source(
|
241
|
+
model_class: Any, notebook_code: Optional[str] = None
|
242
|
+
) -> Optional[str]:
|
243
|
+
"""
|
244
|
+
Get the source code of a model class.
|
245
|
+
Checks explicit source, then notebook history (if provided), then dill.
|
246
|
+
"""
|
247
|
+
model_name_str = getattr(model_class, "__name__", str(model_class))
|
248
|
+
print(f"[DEBUG get_model_source] Getting source for: {model_name_str}")
|
249
|
+
# 1. Check explicit source
|
250
|
+
explicit_source = getattr(model_class, _NEBU_EXPLICIT_SOURCE_ATTR, None)
|
251
|
+
if explicit_source:
|
252
|
+
print(
|
253
|
+
f"[DEBUG get_model_source] Using explicit source (@include) for: {model_name_str}"
|
254
|
+
)
|
255
|
+
return explicit_source
|
256
|
+
|
257
|
+
# 2. Check notebook history
|
258
|
+
if notebook_code and hasattr(model_class, "__name__"):
|
259
|
+
print(
|
260
|
+
f"[DEBUG get_model_source] Attempting notebook history extraction for: {model_class.__name__}"
|
261
|
+
)
|
262
|
+
extracted_source = extract_definition_source_from_string(
|
263
|
+
notebook_code, model_class.__name__, ast.ClassDef
|
264
|
+
)
|
265
|
+
if extracted_source:
|
266
|
+
print(
|
267
|
+
f"[DEBUG get_model_source] Using notebook history source for: {model_class.__name__}"
|
268
|
+
)
|
269
|
+
return extracted_source
|
270
|
+
else:
|
271
|
+
print(
|
272
|
+
f"[DEBUG get_model_source] Notebook history extraction failed for: {model_class.__name__}. Proceeding to dill."
|
273
|
+
)
|
27
274
|
|
28
|
-
|
29
|
-
"""Get the source code of a model class."""
|
275
|
+
# 3. Fallback to dill
|
30
276
|
try:
|
31
|
-
|
277
|
+
print(
|
278
|
+
f"[DEBUG get_model_source] Attempting dill fallback for: {model_name_str}"
|
279
|
+
)
|
280
|
+
source = dill.source.getsource(model_class)
|
281
|
+
print(f"[DEBUG get_model_source] Using dill source for: {model_name_str}")
|
32
282
|
return textwrap.dedent(source)
|
33
|
-
except (IOError, TypeError):
|
283
|
+
except (IOError, TypeError, OSError) as e:
|
284
|
+
print(
|
285
|
+
f"[DEBUG get_model_source] Failed dill fallback for: {model_name_str}: {e}"
|
286
|
+
)
|
34
287
|
return None
|
35
288
|
|
36
289
|
|
37
|
-
|
290
|
+
# Reintroduce get_type_source to handle generics
|
291
|
+
def get_type_source(
|
292
|
+
type_obj: Any, notebook_code: Optional[str] = None
|
293
|
+
) -> Optional[Any]:
|
38
294
|
"""Get the source code for a type, including generic parameters."""
|
39
|
-
|
40
|
-
|
41
|
-
|
295
|
+
type_obj_str = str(type_obj)
|
296
|
+
print(f"[DEBUG get_type_source] Getting source for type: {type_obj_str}")
|
297
|
+
origin = get_origin(type_obj)
|
298
|
+
args = get_args(type_obj)
|
42
299
|
|
43
|
-
|
44
|
-
|
45
|
-
|
300
|
+
if origin is not None:
|
301
|
+
# Use updated get_model_source for origin
|
302
|
+
print(
|
303
|
+
f"[DEBUG get_type_source] Detected generic type. Origin: {origin}, Args: {args}"
|
304
|
+
)
|
305
|
+
origin_source = get_model_source(origin, notebook_code)
|
46
306
|
args_sources = []
|
47
307
|
|
48
|
-
#
|
49
|
-
for arg in
|
50
|
-
|
308
|
+
# Recursively get sources for all type arguments
|
309
|
+
for arg in args:
|
310
|
+
print(
|
311
|
+
f"[DEBUG get_type_source] Recursively getting source for generic arg #{arg}"
|
312
|
+
)
|
313
|
+
arg_source = get_type_source(arg, notebook_code)
|
51
314
|
if arg_source:
|
52
315
|
args_sources.append(arg_source)
|
53
316
|
|
54
|
-
|
317
|
+
# Return tuple only if origin source or some arg sources were found
|
318
|
+
if origin_source or args_sources:
|
319
|
+
print(
|
320
|
+
f"[DEBUG get_type_source] Returning tuple source for generic: {type_obj_str}"
|
321
|
+
)
|
322
|
+
return (origin_source, args_sources)
|
323
|
+
|
324
|
+
# Fallback if not a class or recognizable generic alias
|
325
|
+
# Try get_model_source as a last resort for unknown types
|
326
|
+
fallback_source = get_model_source(type_obj, notebook_code)
|
327
|
+
if fallback_source:
|
328
|
+
print(
|
329
|
+
f"[DEBUG get_type_source] Using fallback get_model_source for: {type_obj_str}"
|
330
|
+
)
|
331
|
+
return fallback_source
|
55
332
|
|
333
|
+
print(f"[DEBUG get_type_source] Failed to get source for: {type_obj_str}")
|
56
334
|
return None
|
57
335
|
|
58
336
|
|
@@ -75,297 +353,435 @@ def processor(
|
|
75
353
|
no_delete: bool = False,
|
76
354
|
include: Optional[List[Any]] = None,
|
77
355
|
):
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
platform: Optional compute platform to run on
|
88
|
-
accelerators: Optional list of accelerator types
|
89
|
-
namespace: Optional namespace for the processor
|
90
|
-
labels: Optional labels to apply to the processor
|
91
|
-
env: Optional environment variables
|
92
|
-
volumes: Optional volume mounts
|
93
|
-
resources: Optional resource requirements
|
94
|
-
meters: Optional metering configuration
|
95
|
-
authz: Optional authorization configuration
|
96
|
-
python_cmd: Optional python command to use
|
97
|
-
no_delete: Whether to prevent deleting the processor on updates
|
98
|
-
include: Optional list of Python objects whose source code should be included
|
99
|
-
"""
|
356
|
+
def decorator(
|
357
|
+
func: Callable[[Any], Any],
|
358
|
+
) -> Processor:
|
359
|
+
# Moved init print here
|
360
|
+
print(
|
361
|
+
f"[DEBUG Decorator Init] @processor decorating function '{func.__name__}'"
|
362
|
+
)
|
363
|
+
all_env = env or []
|
364
|
+
processor_name = func.__name__
|
100
365
|
|
101
|
-
|
102
|
-
|
366
|
+
# --- Determine Environment and Get Notebook Code ---
|
367
|
+
print("[DEBUG Decorator] Determining execution environment...")
|
368
|
+
in_jupyter = is_jupyter_notebook()
|
369
|
+
notebook_code = None
|
370
|
+
if in_jupyter:
|
371
|
+
print("[DEBUG Decorator] Jupyter environment detected.")
|
372
|
+
notebook_code = get_notebook_executed_code()
|
373
|
+
if not notebook_code:
|
374
|
+
print(
|
375
|
+
"[DEBUG Decorator] Warning: Failed to get Jupyter execution history. Will attempt dill."
|
376
|
+
)
|
377
|
+
else:
|
378
|
+
print(
|
379
|
+
f"[DEBUG Decorator] Retrieved notebook history (length: {len(notebook_code)})."
|
380
|
+
)
|
381
|
+
else:
|
382
|
+
print("[DEBUG Decorator] Non-Jupyter environment detected.")
|
383
|
+
# --- End Environment Determination ---
|
384
|
+
|
385
|
+
# --- Process Manually Included Objects ---
|
386
|
+
included_sources: Dict[Any, Any] = {}
|
387
|
+
if include:
|
388
|
+
print(f"[DEBUG Decorator] Processing manually included objects: {include}")
|
389
|
+
for i, obj in enumerate(include):
|
390
|
+
obj_name_str = getattr(obj, "__name__", str(obj))
|
391
|
+
print(
|
392
|
+
f"[DEBUG Decorator] Getting source for manually included object: {obj_name_str}"
|
393
|
+
)
|
394
|
+
obj_source = get_model_source(obj, notebook_code)
|
395
|
+
if obj_source:
|
396
|
+
included_sources[obj] = obj_source
|
397
|
+
env_key = f"INCLUDED_OBJECT_{i}_SOURCE"
|
398
|
+
all_env.append(V1EnvVar(key=env_key, value=obj_source))
|
399
|
+
print(
|
400
|
+
f"[DEBUG Decorator] Added source to env for included obj: {obj_name_str}"
|
401
|
+
)
|
402
|
+
else:
|
403
|
+
print(
|
404
|
+
f"Warning: Could not retrieve source for manually included object: {obj_name_str}"
|
405
|
+
)
|
406
|
+
print(
|
407
|
+
f"[DEBUG Decorator] Finished processing included objects. Sources found: {len(included_sources)}"
|
408
|
+
)
|
409
|
+
else:
|
410
|
+
print("[DEBUG Decorator] No manually included objects specified.")
|
411
|
+
# --- End Manually Included Objects ---
|
412
|
+
|
413
|
+
# --- Validate Function Signature and Types ---
|
414
|
+
print(
|
415
|
+
f"[DEBUG Decorator] Validating signature and type hints for {processor_name}..."
|
416
|
+
)
|
103
417
|
sig = inspect.signature(func)
|
104
418
|
params = list(sig.parameters.values())
|
105
|
-
|
106
419
|
if len(params) != 1:
|
107
|
-
raise TypeError(f"
|
420
|
+
raise TypeError(f"{processor_name} must take one parameter")
|
421
|
+
|
422
|
+
try:
|
423
|
+
type_hints = get_type_hints(func, globalns=func.__globals__, localns=None)
|
424
|
+
print(f"[DEBUG Decorator] Raw type hints: {type_hints}")
|
425
|
+
except Exception as e:
|
426
|
+
print(f"[DEBUG Decorator] Error getting type hints: {e}")
|
427
|
+
raise TypeError(
|
428
|
+
f"Could not evaluate type hints for {processor_name}: {e}"
|
429
|
+
) from e
|
108
430
|
|
109
|
-
# Check parameter type
|
110
|
-
type_hints = get_type_hints(func)
|
111
431
|
param_name = params[0].name
|
112
432
|
if param_name not in type_hints:
|
113
433
|
raise TypeError(
|
114
|
-
f"
|
434
|
+
f"{processor_name} parameter '{param_name}' must have type hint"
|
115
435
|
)
|
116
|
-
|
117
436
|
param_type = type_hints[param_name]
|
437
|
+
param_type_str_repr = str(param_type) # Use string for regex
|
438
|
+
print(
|
439
|
+
f"[DEBUG Decorator] Parameter '{param_name}' type hint: {param_type_str_repr}"
|
440
|
+
)
|
118
441
|
|
119
|
-
|
442
|
+
if "return" not in type_hints:
|
443
|
+
raise TypeError(f"{processor_name} must have return type hint")
|
444
|
+
return_type = type_hints["return"]
|
445
|
+
print(f"[DEBUG Decorator] Return type hint: {return_type}")
|
446
|
+
|
447
|
+
# --- Determine Input Type (StreamMessage, ContentType) ---
|
448
|
+
print(
|
449
|
+
f"[DEBUG Decorator] Determining input type structure for param type hint: {param_type_str_repr}"
|
450
|
+
)
|
451
|
+
origin = get_origin(param_type)
|
452
|
+
args = get_args(param_type)
|
453
|
+
print(f"[DEBUG Decorator] get_origin result: {origin}, get_args result: {args}")
|
120
454
|
is_stream_message = False
|
121
455
|
content_type = None
|
122
456
|
|
123
|
-
#
|
124
|
-
if (
|
125
|
-
|
126
|
-
and
|
457
|
+
# Check 1: Standard introspection
|
458
|
+
if origin is V1StreamMessage or (
|
459
|
+
origin is not None
|
460
|
+
and origin.__name__ == V1StreamMessage.__name__
|
461
|
+
and origin.__module__ == V1StreamMessage.__module__
|
127
462
|
):
|
463
|
+
print(
|
464
|
+
"[DEBUG Decorator] Input type identified as V1StreamMessage via get_origin."
|
465
|
+
)
|
128
466
|
is_stream_message = True
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
467
|
+
if args:
|
468
|
+
content_type = args[0]
|
469
|
+
print(
|
470
|
+
f"[DEBUG Decorator] Content type extracted via get_args: {content_type}"
|
471
|
+
)
|
472
|
+
else:
|
473
|
+
print(
|
474
|
+
"[DEBUG Decorator] V1StreamMessage detected, but no generic arguments found via get_args."
|
475
|
+
)
|
476
|
+
# Check 2: Direct type check
|
477
|
+
elif isinstance(param_type, type) and param_type is V1StreamMessage:
|
478
|
+
print(
|
479
|
+
"[DEBUG Decorator] Input type identified as direct V1StreamMessage type."
|
480
|
+
)
|
134
481
|
is_stream_message = True
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
)
|
140
|
-
if not issubclass(actual_type, BaseModel):
|
141
|
-
raise TypeError(
|
142
|
-
f"Parameter {param_name} in function {func.__name__} must be a BaseModel"
|
482
|
+
# Check 3: Regex fallback on string representation
|
483
|
+
elif origin is None:
|
484
|
+
print(
|
485
|
+
f"[DEBUG Decorator] get_origin failed. Attempting regex fallback on type string: '{param_type_str_repr}'"
|
143
486
|
)
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
487
|
+
# Use param_type_str_repr in match calls
|
488
|
+
generic_match = re.match(
|
489
|
+
r"^<class '(?:[a-zA-Z0-9_.]+\.)?V1StreamMessage\[(.+?)\]'>$",
|
490
|
+
param_type_str_repr,
|
491
|
+
)
|
492
|
+
if generic_match:
|
493
|
+
print(
|
494
|
+
"[DEBUG Decorator] Regex matched generic V1StreamMessage pattern!"
|
495
|
+
)
|
496
|
+
is_stream_message = True
|
497
|
+
content_type_name_str = generic_match.group(1).strip()
|
498
|
+
print(
|
499
|
+
f"[DEBUG Decorator] Captured content type name via regex: '{content_type_name_str}'"
|
500
|
+
)
|
501
|
+
try:
|
502
|
+
resolved_type = eval(content_type_name_str, func.__globals__)
|
503
|
+
content_type = resolved_type
|
504
|
+
print(
|
505
|
+
f"[DEBUG Decorator] Successfully resolved content type name '{content_type_name_str}' to type: {content_type}"
|
506
|
+
)
|
507
|
+
except NameError:
|
508
|
+
print(
|
509
|
+
f"[DEBUG Decorator] Warning: Regex found content type name '{content_type_name_str}', but it's not defined in function's globals. Consumer might fail."
|
510
|
+
)
|
511
|
+
content_type = None
|
512
|
+
except Exception as e:
|
513
|
+
print(
|
514
|
+
f"[DEBUG Decorator] Warning: Error evaluating content type name '{content_type_name_str}': {e}"
|
515
|
+
)
|
516
|
+
content_type = None
|
517
|
+
else:
|
518
|
+
# Use param_type_str_repr in match calls
|
519
|
+
simple_match = re.match(
|
520
|
+
r"^<class '(?:[a-zA-Z0-9_.]+\.)?V1StreamMessage'>$",
|
521
|
+
param_type_str_repr,
|
522
|
+
)
|
523
|
+
if simple_match:
|
524
|
+
print(
|
525
|
+
"[DEBUG Decorator] Regex identified direct V1StreamMessage (no generic) from string."
|
526
|
+
)
|
527
|
+
is_stream_message = True
|
528
|
+
else:
|
529
|
+
print(
|
530
|
+
f"[DEBUG Decorator] Regex did not match V1StreamMessage pattern for string '{param_type_str_repr}'. Assuming not StreamMessage."
|
531
|
+
)
|
532
|
+
else:
|
533
|
+
print(
|
534
|
+
f"[DEBUG Decorator] Input parameter '{param_name}' type ({param_type_str_repr}) identified as non-StreamMessage type (origin was not None and not V1StreamMessage)."
|
149
535
|
)
|
150
536
|
|
151
|
-
|
152
|
-
|
153
|
-
return_type.__origin__
|
154
|
-
if hasattr(return_type, "__origin__")
|
155
|
-
else return_type
|
537
|
+
print(
|
538
|
+
f"[DEBUG Decorator] Final Input Type Determination: is_stream_message={is_stream_message}, content_type={content_type}"
|
156
539
|
)
|
157
|
-
|
158
|
-
|
159
|
-
|
540
|
+
# --- End Input Type Determination ---
|
541
|
+
|
542
|
+
# --- Validate Types are BaseModel ---
|
543
|
+
print(
|
544
|
+
"[DEBUG Decorator] Validating parameter and return types are BaseModel subclasses..."
|
545
|
+
)
|
546
|
+
|
547
|
+
def check_basemodel(type_to_check, desc):
|
548
|
+
print(
|
549
|
+
f"[DEBUG Decorator] check_basemodel: Checking {desc} - Type: {type_to_check}"
|
160
550
|
)
|
551
|
+
if not type_to_check:
|
552
|
+
print(
|
553
|
+
f"[DEBUG Decorator] check_basemodel: Skipping check for {desc} (type is None/empty)."
|
554
|
+
)
|
555
|
+
return
|
556
|
+
actual_type = get_origin(type_to_check) or type_to_check
|
557
|
+
print(
|
558
|
+
f"[DEBUG Decorator] check_basemodel: Actual type for {desc}: {actual_type}"
|
559
|
+
)
|
560
|
+
if isinstance(actual_type, type) and not issubclass(actual_type, BaseModel):
|
561
|
+
print(
|
562
|
+
f"[DEBUG Decorator] check_basemodel: Error - {desc} effective type ({actual_type.__name__}) is not a BaseModel subclass."
|
563
|
+
)
|
564
|
+
raise TypeError(
|
565
|
+
f"{desc} effective type ({actual_type.__name__}) must be BaseModel subclass"
|
566
|
+
)
|
567
|
+
elif not isinstance(actual_type, type):
|
568
|
+
print(
|
569
|
+
f"[DEBUG Decorator] check_basemodel: Warning - {desc} effective type '{actual_type}' is not a class. Cannot verify BaseModel subclass."
|
570
|
+
)
|
571
|
+
else:
|
572
|
+
print(
|
573
|
+
f"[DEBUG Decorator] check_basemodel: OK - {desc} effective type ({actual_type.__name__}) is a BaseModel subclass."
|
574
|
+
)
|
161
575
|
|
162
|
-
|
163
|
-
|
576
|
+
effective_param_type = (
|
577
|
+
content_type
|
578
|
+
if is_stream_message and content_type
|
579
|
+
else param_type
|
580
|
+
if not is_stream_message
|
581
|
+
else None
|
582
|
+
)
|
583
|
+
check_basemodel(effective_param_type, f"Parameter '{param_name}'")
|
584
|
+
check_basemodel(return_type, "Return value")
|
585
|
+
print("[DEBUG Decorator] Type validation complete.")
|
586
|
+
# --- End Type Validation ---
|
164
587
|
|
165
|
-
#
|
166
|
-
|
588
|
+
# --- Get Function Source ---
|
589
|
+
print(
|
590
|
+
f"[DEBUG Decorator] Getting source code for function '{processor_name}'..."
|
591
|
+
)
|
592
|
+
function_source = None
|
593
|
+
explicit_source = getattr(func, _NEBU_EXPLICIT_SOURCE_ATTR, None)
|
167
594
|
|
168
|
-
|
169
|
-
try:
|
170
|
-
raw_function_source = inspect.getsource(func)
|
595
|
+
if explicit_source:
|
171
596
|
print(
|
172
|
-
f"[DEBUG Decorator]
|
597
|
+
f"[DEBUG Decorator] Using explicit source (@include) for function {processor_name}"
|
173
598
|
)
|
174
|
-
|
175
|
-
|
176
|
-
dedented_function_source = textwrap.dedent(raw_function_source)
|
599
|
+
function_source = explicit_source
|
600
|
+
elif in_jupyter and notebook_code:
|
177
601
|
print(
|
178
|
-
f"[DEBUG Decorator]
|
602
|
+
f"[DEBUG Decorator] Attempting notebook history extraction for function '{processor_name}'..."
|
179
603
|
)
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
if stripped_line.startswith("def "):
|
188
|
-
func_def_index = i
|
189
|
-
break
|
190
|
-
# Simply continue if it's not the def line.
|
191
|
-
# This skips decorators and their arguments, regardless of multi-line formatting.
|
192
|
-
continue
|
193
|
-
|
194
|
-
if func_def_index != -1:
|
195
|
-
# Keep lines from the 'def' line onwards
|
196
|
-
function_source = "\n".join(
|
197
|
-
lines[func_def_index:]
|
198
|
-
) # Use \n for env var
|
604
|
+
function_source = extract_definition_source_from_string(
|
605
|
+
notebook_code, processor_name, ast.FunctionDef
|
606
|
+
)
|
607
|
+
if function_source:
|
608
|
+
print(
|
609
|
+
f"[DEBUG Decorator] Found function '{processor_name}' source in notebook history."
|
610
|
+
)
|
199
611
|
else:
|
200
|
-
|
201
|
-
|
202
|
-
f"Could not find function definition 'def' in source for {func.__name__}"
|
612
|
+
print(
|
613
|
+
f"[DEBUG Decorator] Failed to find function '{processor_name}' in notebook history, falling back to dill."
|
203
614
|
)
|
204
|
-
|
615
|
+
if function_source is None:
|
205
616
|
print(
|
206
|
-
f"[DEBUG Decorator]
|
617
|
+
f"[DEBUG Decorator] Using dill fallback for function '{processor_name}'..."
|
207
618
|
)
|
619
|
+
try:
|
620
|
+
raw_function_source = dill.source.getsource(func)
|
621
|
+
function_source = textwrap.dedent(raw_function_source)
|
622
|
+
print(
|
623
|
+
f"[DEBUG Decorator] Successfully got source via dill for '{processor_name}'."
|
624
|
+
)
|
625
|
+
except (IOError, TypeError, OSError) as e:
|
626
|
+
print(
|
627
|
+
f"[DEBUG Decorator] Dill fallback failed for '{processor_name}': {e}"
|
628
|
+
)
|
629
|
+
if not (in_jupyter and notebook_code):
|
630
|
+
raise ValueError(
|
631
|
+
f"Could not retrieve source for '{processor_name}' using dill: {e}"
|
632
|
+
) from e
|
208
633
|
|
209
|
-
|
210
|
-
print(f"[DEBUG Decorator] Error getting source for {func.__name__}: {e}")
|
634
|
+
if function_source is None: # Final check after all attempts
|
211
635
|
raise ValueError(
|
212
|
-
f"
|
213
|
-
)
|
636
|
+
f"Failed to obtain source code for function '{processor_name}' using any method."
|
637
|
+
)
|
214
638
|
|
215
|
-
|
639
|
+
print(f"[DEBUG Decorator] Final function source obtained for '{processor_name}' (len: {len(function_source)}). Source starts:\n-------\
|
640
|
+
{function_source[:250]}...\n-------")
|
641
|
+
# --- End Function Source ---
|
642
|
+
|
643
|
+
# --- Get Model Sources ---
|
644
|
+
print("[DEBUG Decorator] Getting model sources...")
|
216
645
|
input_model_source = None
|
217
646
|
output_model_source = None
|
218
647
|
content_type_source = None
|
648
|
+
print("[DEBUG Decorator] Getting base V1StreamMessage source...")
|
649
|
+
stream_message_source = get_type_source(V1StreamMessage, notebook_code)
|
219
650
|
|
220
|
-
# Get the V1StreamMessage class source
|
221
|
-
stream_message_source = get_model_source(V1StreamMessage)
|
222
|
-
|
223
|
-
# Get input model source
|
224
651
|
if is_stream_message:
|
225
|
-
|
652
|
+
print(
|
653
|
+
f"[DEBUG Decorator] Input is StreamMessage. Content type: {content_type}"
|
654
|
+
)
|
226
655
|
if content_type:
|
227
|
-
|
228
|
-
|
229
|
-
input_model_source = get_type_source(param_type)
|
230
|
-
|
231
|
-
# Get output model source
|
232
|
-
output_model_source = get_type_source(return_type)
|
233
|
-
|
234
|
-
# Add function source code to environment variables
|
235
|
-
print(
|
236
|
-
f"[DEBUG Decorator] Setting FUNCTION_SOURCE: {function_source[:100]}..."
|
237
|
-
) # Print first 100 chars
|
238
|
-
all_env.append(V1EnvVar(key="FUNCTION_SOURCE", value=function_source))
|
239
|
-
print(f"[DEBUG Decorator] Setting FUNCTION_NAME: {func.__name__}")
|
240
|
-
all_env.append(V1EnvVar(key="FUNCTION_NAME", value=func.__name__))
|
241
|
-
|
242
|
-
# Add model source codes
|
243
|
-
if input_model_source:
|
244
|
-
if isinstance(input_model_source, tuple):
|
245
|
-
all_env.append(
|
246
|
-
V1EnvVar(key="INPUT_MODEL_SOURCE", value=input_model_source[0])
|
656
|
+
print(
|
657
|
+
f"[DEBUG Decorator] Getting source for content_type: {content_type}"
|
247
658
|
)
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
659
|
+
content_type_source = get_type_source(content_type, notebook_code)
|
660
|
+
if content_type_source is None:
|
661
|
+
print(
|
662
|
+
f"Warning: Failed to get source for content_type: {content_type}"
|
252
663
|
)
|
253
|
-
|
254
|
-
|
255
|
-
|
664
|
+
else: # Not a stream message
|
665
|
+
print(
|
666
|
+
f"[DEBUG Decorator] Input is not StreamMessage. Getting source for param_type: {param_type}"
|
667
|
+
)
|
668
|
+
input_model_source = get_type_source(param_type, notebook_code)
|
669
|
+
if input_model_source is None:
|
670
|
+
print(
|
671
|
+
f"Warning: Failed to get source for input param_type: {param_type}"
|
256
672
|
)
|
257
673
|
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
)
|
263
|
-
# Add generic args sources
|
264
|
-
for i, arg_source in enumerate(output_model_source[1]):
|
265
|
-
all_env.append(
|
266
|
-
V1EnvVar(key=f"OUTPUT_MODEL_ARG_{i}_SOURCE", value=arg_source)
|
267
|
-
)
|
268
|
-
else:
|
269
|
-
all_env.append(
|
270
|
-
V1EnvVar(key="OUTPUT_MODEL_SOURCE", value=output_model_source)
|
271
|
-
)
|
674
|
+
print(f"[DEBUG Decorator] Getting source for return_type: {return_type}")
|
675
|
+
output_model_source = get_type_source(return_type, notebook_code)
|
676
|
+
if output_model_source is None:
|
677
|
+
print(f"Warning: Failed to get source for return_type: {return_type}")
|
272
678
|
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
679
|
+
print(
|
680
|
+
f"[DEBUG Decorator] Source Result - Content Type: {'Found' if content_type_source else 'Not Found or N/A'}"
|
681
|
+
)
|
682
|
+
print(
|
683
|
+
f"[DEBUG Decorator] Source Result - Input Model (non-stream): {'Found' if input_model_source else 'Not Found or N/A'}"
|
684
|
+
)
|
685
|
+
print(
|
686
|
+
f"[DEBUG Decorator] Source Result - Output Model: {'Found' if output_model_source else 'Not Found'}"
|
687
|
+
)
|
688
|
+
print(
|
689
|
+
f"[DEBUG Decorator] Source Result - Base StreamMessage: {'Found' if stream_message_source else 'Not Found'}"
|
690
|
+
)
|
691
|
+
# --- End Model Sources ---
|
277
692
|
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
else:
|
289
|
-
all_env.append(
|
290
|
-
V1EnvVar(key="CONTENT_TYPE_SOURCE", value=content_type_source)
|
693
|
+
# --- Populate Environment Variables ---
|
694
|
+
print("[DEBUG Decorator] Populating environment variables...")
|
695
|
+
all_env.append(V1EnvVar(key="FUNCTION_SOURCE", value=function_source))
|
696
|
+
all_env.append(V1EnvVar(key="FUNCTION_NAME", value=processor_name))
|
697
|
+
|
698
|
+
def add_source_to_env(key_base: str, source: Any):
|
699
|
+
print(f"[DEBUG Decorator] add_source_to_env: Processing key '{key_base}'")
|
700
|
+
if not source:
|
701
|
+
print(
|
702
|
+
f"[DEBUG Decorator] add_source_to_env: No source for '{key_base}', skipping."
|
291
703
|
)
|
704
|
+
return
|
292
705
|
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
706
|
+
if isinstance(source, tuple):
|
707
|
+
origin_src, arg_srcs = source
|
708
|
+
print(
|
709
|
+
f"[DEBUG Decorator] add_source_to_env: '{key_base}' is tuple source. Origin found: {bool(origin_src)}, Num args: {len(arg_srcs)}"
|
710
|
+
)
|
711
|
+
if origin_src and isinstance(origin_src, str):
|
712
|
+
all_env.append(V1EnvVar(key=f"{key_base}_SOURCE", value=origin_src))
|
713
|
+
print(f"[DEBUG Decorator] Added env var {key_base}_SOURCE (origin)")
|
714
|
+
for i, arg_src in enumerate(arg_srcs):
|
715
|
+
if isinstance(arg_src, str):
|
302
716
|
all_env.append(
|
303
|
-
V1EnvVar(
|
304
|
-
|
305
|
-
|
717
|
+
V1EnvVar(key=f"{key_base}_ARG_{i}_SOURCE", value=arg_src)
|
718
|
+
)
|
719
|
+
print(
|
720
|
+
f"[DEBUG Decorator] Added env var {key_base}_ARG_{i}_SOURCE"
|
306
721
|
)
|
307
|
-
|
722
|
+
elif isinstance(arg_src, tuple):
|
723
|
+
arg_origin_src, _ = arg_src
|
724
|
+
if arg_origin_src and isinstance(arg_origin_src, str):
|
308
725
|
all_env.append(
|
309
726
|
V1EnvVar(
|
310
|
-
key=f"
|
311
|
-
value=
|
727
|
+
key=f"{key_base}_ARG_{i}_SOURCE",
|
728
|
+
value=arg_origin_src,
|
312
729
|
)
|
313
730
|
)
|
314
|
-
|
315
|
-
|
316
|
-
V1EnvVar(
|
317
|
-
key=f"INCLUDED_OBJECT_{i}_SOURCE", value=obj_source
|
731
|
+
print(
|
732
|
+
f"[DEBUG Decorator] Added env var {key_base}_ARG_{i}_SOURCE (nested origin)"
|
318
733
|
)
|
734
|
+
else:
|
735
|
+
print(
|
736
|
+
f"[DEBUG Decorator] Skipping complex/non-string nested arg origin for {key_base}_ARG_{i}"
|
737
|
+
)
|
738
|
+
else:
|
739
|
+
print(
|
740
|
+
f"[DEBUG Decorator] Skipping complex/non-string arg source for {key_base}_ARG_{i}"
|
319
741
|
)
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
742
|
+
elif isinstance(source, str):
|
743
|
+
all_env.append(V1EnvVar(key=f"{key_base}_SOURCE", value=source))
|
744
|
+
print(f"[DEBUG Decorator] Added env var {key_base}_SOURCE (string)")
|
745
|
+
else:
|
746
|
+
print(
|
747
|
+
f"[DEBUG Decorator] Warning: Unknown source type for {key_base}: {type(source)}. Skipping."
|
748
|
+
)
|
325
749
|
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
value=param_type.__name__
|
331
|
-
if hasattr(param_type, "__name__")
|
332
|
-
else str(param_type),
|
333
|
-
)
|
334
|
-
)
|
335
|
-
all_env.append(
|
336
|
-
V1EnvVar(
|
337
|
-
key="RETURN_TYPE_NAME",
|
338
|
-
value=return_type.__name__
|
339
|
-
if hasattr(return_type, "__name__")
|
340
|
-
else str(return_type),
|
341
|
-
)
|
342
|
-
)
|
343
|
-
all_env.append(V1EnvVar(key="IS_STREAM_MESSAGE", value=str(is_stream_message)))
|
750
|
+
add_source_to_env("INPUT_MODEL", input_model_source)
|
751
|
+
add_source_to_env("OUTPUT_MODEL", output_model_source)
|
752
|
+
add_source_to_env("CONTENT_TYPE", content_type_source)
|
753
|
+
add_source_to_env("STREAM_MESSAGE", stream_message_source)
|
344
754
|
|
345
|
-
|
755
|
+
print("[DEBUG Decorator] Adding type info env vars...")
|
756
|
+
all_env.append(V1EnvVar(key="PARAM_TYPE_STR", value=param_type_str_repr))
|
757
|
+
all_env.append(V1EnvVar(key="RETURN_TYPE_STR", value=str(return_type)))
|
758
|
+
all_env.append(V1EnvVar(key="IS_STREAM_MESSAGE", value=str(is_stream_message)))
|
759
|
+
if content_type and hasattr(content_type, "__name__"):
|
346
760
|
all_env.append(
|
347
|
-
V1EnvVar(
|
348
|
-
key="CONTENT_TYPE_NAME",
|
349
|
-
value=content_type.__name__
|
350
|
-
if hasattr(content_type, "__name__")
|
351
|
-
else str(content_type),
|
352
|
-
)
|
761
|
+
V1EnvVar(key="CONTENT_TYPE_NAME", value=content_type.__name__)
|
353
762
|
)
|
354
|
-
|
355
|
-
# We still add the module for reference, but we won't rely on importing it
|
356
763
|
all_env.append(V1EnvVar(key="MODULE_NAME", value=func.__module__))
|
764
|
+
print("[DEBUG Decorator] Finished populating environment variables.")
|
765
|
+
# --- End Environment Variables ---
|
357
766
|
|
358
|
-
#
|
767
|
+
# --- Final Setup ---
|
768
|
+
print("[DEBUG Decorator] Preparing final Processor object...")
|
359
769
|
metadata = V1ResourceMetaRequest(
|
360
770
|
name=processor_name, namespace=namespace, labels=labels
|
361
771
|
)
|
362
|
-
|
363
|
-
# Create the command to run the consumer directly
|
364
772
|
consumer_command = f"{python_cmd} -m nebu.processors.consumer"
|
773
|
+
setup_commands = [
|
774
|
+
f"{python_cmd} -m pip install dill pydantic redis nebu", # Base deps
|
775
|
+
]
|
776
|
+
if setup_script:
|
777
|
+
print("[DEBUG Decorator] Adding setup script to command.")
|
778
|
+
setup_commands.append(f"\n{setup_script}\n")
|
779
|
+
setup_commands.append(consumer_command)
|
780
|
+
final_command = "\n".join(setup_commands)
|
781
|
+
print(
|
782
|
+
f"[DEBUG Decorator] Final container command:\n-------\n{final_command}\n-------"
|
783
|
+
)
|
365
784
|
|
366
|
-
final_command = f"{python_cmd} -m pip install redis nebu\n\n{setup_script}\n\n{consumer_command}"
|
367
|
-
|
368
|
-
# Create the V1ContainerRequest
|
369
785
|
container_request = V1ContainerRequest(
|
370
786
|
image=image,
|
371
787
|
command=final_command,
|
@@ -379,23 +795,29 @@ def processor(
|
|
379
795
|
platform=platform,
|
380
796
|
metadata=metadata,
|
381
797
|
)
|
382
|
-
print("
|
798
|
+
print("[DEBUG Decorator] Final Container Request Env Vars (Summary):")
|
799
|
+
for env_var in all_env:
|
800
|
+
if "SOURCE" in env_var.key:
|
801
|
+
print(f"[DEBUG Decorator] {env_var.key}: <source code present>")
|
802
|
+
else:
|
803
|
+
print(f"[DEBUG Decorator] {env_var.key}: {env_var.value}")
|
383
804
|
|
384
|
-
# Create the processor instance
|
385
805
|
processor_instance = Processor(
|
386
806
|
name=processor_name,
|
387
807
|
stream=processor_name,
|
388
808
|
namespace=namespace,
|
389
809
|
labels=labels,
|
390
810
|
container=container_request,
|
391
|
-
schema_=None,
|
811
|
+
schema_=None,
|
392
812
|
common_schema=None,
|
393
813
|
min_replicas=min_replicas,
|
394
814
|
max_replicas=max_replicas,
|
395
815
|
scale_config=scale,
|
396
816
|
no_delete=no_delete,
|
397
817
|
)
|
398
|
-
|
818
|
+
print(
|
819
|
+
f"[DEBUG Decorator] Processor instance '{processor_name}' created successfully."
|
820
|
+
)
|
399
821
|
return processor_instance
|
400
822
|
|
401
823
|
return decorator
|
@@ -1,13 +1,16 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: nebu
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.17
|
4
4
|
Summary: A globally distributed container runtime
|
5
5
|
Requires-Python: >=3.10.14
|
6
6
|
Description-Content-Type: text/markdown
|
7
7
|
License-File: LICENSE
|
8
|
+
Requires-Dist: dill>=0.3.8
|
8
9
|
Requires-Dist: openai>=1.68.2
|
9
10
|
Requires-Dist: pydantic>=2.10.6
|
11
|
+
Requires-Dist: pysocks>=1.7.1
|
10
12
|
Requires-Dist: pyyaml>=6.0.2
|
13
|
+
Requires-Dist: redis[socks]>=5.0
|
11
14
|
Requires-Dist: requests>=2.32.3
|
12
15
|
Dynamic: license-file
|
13
16
|
|
@@ -3,18 +3,18 @@ nebu/auth.py,sha256=rApCd-7_c3GpIb7gjCB79rR7SOcmkG7MmaTE6zMbvr0,1125
|
|
3
3
|
nebu/config.py,sha256=XBY7uKgcJX9d1HGxqqpx87o_9DuF3maUlUnKkcpUrKU,4565
|
4
4
|
nebu/meta.py,sha256=CzFHMND9seuewzq9zNNx9WTr6JvrCBExe7BLqDSr7lM,745
|
5
5
|
nebu/containers/container.py,sha256=yb7KaPTVXnEEAlrpdlUi4HNqF6P7z9bmwAILGlq6iqU,13502
|
6
|
-
nebu/containers/decorator.py,sha256=
|
6
|
+
nebu/containers/decorator.py,sha256=uFtzlAXRHYZECJ-NPusY7oN9GXvdHrHDd_JNrIGr8aQ,3244
|
7
7
|
nebu/containers/models.py,sha256=0j6NGy4yto-enRDh_4JH_ZTbHrLdSpuMOqNQPnIrwC4,6815
|
8
8
|
nebu/containers/server.py,sha256=yFa2Y9PzBn59E1HftKiv0iapPonli2rbGAiU6r-wwe0,2513
|
9
|
-
nebu/processors/consumer.py,sha256=
|
10
|
-
nebu/processors/decorate.py,sha256=
|
9
|
+
nebu/processors/consumer.py,sha256=0HJxRLoeRdN4xY6bjIxqr5bD5JpFSyKb5s-eS5oTy9s,16063
|
10
|
+
nebu/processors/decorate.py,sha256=BlpRF9u-1q9IS-UDn0XFzoYIBqEU4Xkjkoge394exJs,34786
|
11
11
|
nebu/processors/default.py,sha256=W4slJenG59rvyTlJ7gRp58eFfXcNOTT2Hfi6zzJAobI,365
|
12
12
|
nebu/processors/models.py,sha256=GvnI8UJrQSjHo2snP07cPfisCH90cEGTY-PZV5_AtXI,3654
|
13
13
|
nebu/processors/processor.py,sha256=oy2YdI-cy6qQWxrZhpZahJV46oWZlu_Im-jm811R_oo,9667
|
14
14
|
nebu/redis/models.py,sha256=coPovAcVXnOU1Xh_fpJL4PO3QctgK9nBe5QYoqEcnxg,1230
|
15
15
|
nebu/services/service.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
16
|
-
nebu-0.1.
|
17
|
-
nebu-0.1.
|
18
|
-
nebu-0.1.
|
19
|
-
nebu-0.1.
|
20
|
-
nebu-0.1.
|
16
|
+
nebu-0.1.17.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
17
|
+
nebu-0.1.17.dist-info/METADATA,sha256=m9oiMDrMt5x1x7wa51dsxwgzj-tUcX_hZrQrGN8NtyE,1678
|
18
|
+
nebu-0.1.17.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
19
|
+
nebu-0.1.17.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
|
20
|
+
nebu-0.1.17.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|