nebu 0.1.7__tar.gz → 0.1.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {nebu-0.1.7/src/nebu.egg-info → nebu-0.1.9}/PKG-INFO +1 -1
- {nebu-0.1.7 → nebu-0.1.9}/pyproject.toml +2 -1
- nebu-0.1.9/src/nebu/auth.py +35 -0
- {nebu-0.1.7 → nebu-0.1.9}/src/nebu/containers/models.py +1 -0
- nebu-0.1.9/src/nebu/processors/consumer.py +414 -0
- nebu-0.1.9/src/nebu/processors/decorate.py +360 -0
- nebu-0.1.9/src/nebu/processors/default.py +18 -0
- nebu-0.1.9/src/nebu/processors/models.py +155 -0
- nebu-0.1.9/src/nebu/processors/processor.py +277 -0
- {nebu-0.1.7 → nebu-0.1.9/src/nebu.egg-info}/PKG-INFO +1 -1
- {nebu-0.1.7 → nebu-0.1.9}/src/nebu.egg-info/SOURCES.txt +4 -0
- nebu-0.1.7/src/nebu/processors/models.py +0 -67
- nebu-0.1.7/src/nebu/processors/processor.py +0 -0
- {nebu-0.1.7 → nebu-0.1.9}/LICENSE +0 -0
- {nebu-0.1.7 → nebu-0.1.9}/README.md +0 -0
- {nebu-0.1.7 → nebu-0.1.9}/setup.cfg +0 -0
- {nebu-0.1.7 → nebu-0.1.9}/src/nebu/__init__.py +0 -0
- {nebu-0.1.7 → nebu-0.1.9}/src/nebu/config.py +0 -0
- {nebu-0.1.7 → nebu-0.1.9}/src/nebu/containers/container.py +0 -0
- {nebu-0.1.7 → nebu-0.1.9}/src/nebu/containers/decorator.py +0 -0
- {nebu-0.1.7 → nebu-0.1.9}/src/nebu/containers/server.py +0 -0
- {nebu-0.1.7 → nebu-0.1.9}/src/nebu/meta.py +0 -0
- {nebu-0.1.7 → nebu-0.1.9}/src/nebu/redis/models.py +0 -0
- {nebu-0.1.7 → nebu-0.1.9}/src/nebu/services/service.py +0 -0
- {nebu-0.1.7 → nebu-0.1.9}/src/nebu.egg-info/dependency_links.txt +0 -0
- {nebu-0.1.7 → nebu-0.1.9}/src/nebu.egg-info/requires.txt +0 -0
- {nebu-0.1.7 → nebu-0.1.9}/src/nebu.egg-info/top_level.txt +0 -0
- {nebu-0.1.7 → nebu-0.1.9}/tests/test_containers.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
[project]
|
2
2
|
name = "nebu"
|
3
|
-
version = "0.1.
|
3
|
+
version = "0.1.9"
|
4
4
|
description = "A globally distributed container runtime"
|
5
5
|
readme = "README.md"
|
6
6
|
requires-python = ">=3.10.14"
|
@@ -15,6 +15,7 @@ dependencies = [
|
|
15
15
|
dev = [
|
16
16
|
"ipykernel>=6.29.5",
|
17
17
|
"pytest>=8.3.5",
|
18
|
+
"trl>=0.16.1",
|
18
19
|
]
|
19
20
|
|
20
21
|
[tool.pyright]
|
@@ -0,0 +1,35 @@
|
|
1
|
+
from typing import Dict, Optional
|
2
|
+
|
3
|
+
import requests
|
4
|
+
from pydantic import BaseModel
|
5
|
+
|
6
|
+
from nebu.config import GlobalConfig
|
7
|
+
|
8
|
+
|
9
|
+
class V1UserProfile(BaseModel):
|
10
|
+
email: str
|
11
|
+
display_name: Optional[str] = None
|
12
|
+
handle: Optional[str] = None
|
13
|
+
picture: Optional[str] = None
|
14
|
+
organization: Optional[str] = None
|
15
|
+
role: Optional[str] = None
|
16
|
+
external_id: Optional[str] = None
|
17
|
+
actor: Optional[str] = None
|
18
|
+
# structure is {"org_id": {"org_name": <name>, "org_role": <role>}}
|
19
|
+
organizations: Optional[Dict[str, Dict[str, str]]] = None
|
20
|
+
created: Optional[int] = None
|
21
|
+
updated: Optional[int] = None
|
22
|
+
token: Optional[str] = None
|
23
|
+
|
24
|
+
|
25
|
+
def get_user_profile(api_key: str) -> V1UserProfile:
|
26
|
+
config = GlobalConfig.read()
|
27
|
+
current_server_config = config.get_current_server_config()
|
28
|
+
if current_server_config is None:
|
29
|
+
raise ValueError("No current server config found")
|
30
|
+
url = f"{current_server_config.server}/v1/users/me"
|
31
|
+
|
32
|
+
response = requests.get(url, headers={"Authorization": f"Bearer {api_key}"})
|
33
|
+
response.raise_for_status()
|
34
|
+
|
35
|
+
return V1UserProfile.model_validate(response.json())
|
@@ -0,0 +1,414 @@
|
|
1
|
+
#!/usr/bin/env python3
|
2
|
+
import json
|
3
|
+
import os
|
4
|
+
import sys
|
5
|
+
import time
|
6
|
+
import traceback
|
7
|
+
from datetime import datetime
|
8
|
+
from typing import Dict, TypeVar
|
9
|
+
|
10
|
+
import redis
|
11
|
+
|
12
|
+
# Define TypeVar for generic models
|
13
|
+
T = TypeVar("T")
|
14
|
+
|
15
|
+
# Get function and model source code and create them dynamically
|
16
|
+
try:
|
17
|
+
function_source = os.environ.get("FUNCTION_SOURCE")
|
18
|
+
function_name = os.environ.get("FUNCTION_NAME")
|
19
|
+
stream_message_source = os.environ.get("STREAM_MESSAGE_SOURCE")
|
20
|
+
input_model_source = os.environ.get("INPUT_MODEL_SOURCE")
|
21
|
+
output_model_source = os.environ.get("OUTPUT_MODEL_SOURCE")
|
22
|
+
content_type_source = os.environ.get("CONTENT_TYPE_SOURCE")
|
23
|
+
is_stream_message = os.environ.get("IS_STREAM_MESSAGE") == "True"
|
24
|
+
param_type_name = os.environ.get("PARAM_TYPE_NAME")
|
25
|
+
return_type_name = os.environ.get("RETURN_TYPE_NAME")
|
26
|
+
content_type_name = os.environ.get("CONTENT_TYPE_NAME")
|
27
|
+
|
28
|
+
# Check for generic type arguments
|
29
|
+
input_model_args = []
|
30
|
+
output_model_args = []
|
31
|
+
content_type_args = []
|
32
|
+
|
33
|
+
# Get input model arg sources
|
34
|
+
i = 0
|
35
|
+
while True:
|
36
|
+
arg_source = os.environ.get(f"INPUT_MODEL_ARG_{i}_SOURCE")
|
37
|
+
if arg_source:
|
38
|
+
input_model_args.append(arg_source)
|
39
|
+
i += 1
|
40
|
+
else:
|
41
|
+
break
|
42
|
+
|
43
|
+
# Get output model arg sources
|
44
|
+
i = 0
|
45
|
+
while True:
|
46
|
+
arg_source = os.environ.get(f"OUTPUT_MODEL_ARG_{i}_SOURCE")
|
47
|
+
if arg_source:
|
48
|
+
output_model_args.append(arg_source)
|
49
|
+
i += 1
|
50
|
+
else:
|
51
|
+
break
|
52
|
+
|
53
|
+
# Get content type arg sources
|
54
|
+
i = 0
|
55
|
+
while True:
|
56
|
+
arg_source = os.environ.get(f"CONTENT_TYPE_ARG_{i}_SOURCE")
|
57
|
+
if arg_source:
|
58
|
+
content_type_args.append(arg_source)
|
59
|
+
i += 1
|
60
|
+
else:
|
61
|
+
break
|
62
|
+
|
63
|
+
# Get included object sources
|
64
|
+
included_object_sources = []
|
65
|
+
i = 0
|
66
|
+
while True:
|
67
|
+
obj_source = os.environ.get(f"INCLUDED_OBJECT_{i}_SOURCE")
|
68
|
+
if obj_source:
|
69
|
+
args = []
|
70
|
+
j = 0
|
71
|
+
while True:
|
72
|
+
arg_source = os.environ.get(f"INCLUDED_OBJECT_{i}_ARG_{j}_SOURCE")
|
73
|
+
if arg_source:
|
74
|
+
args.append(arg_source)
|
75
|
+
j += 1
|
76
|
+
else:
|
77
|
+
break
|
78
|
+
included_object_sources.append((obj_source, args))
|
79
|
+
i += 1
|
80
|
+
else:
|
81
|
+
break
|
82
|
+
|
83
|
+
if not function_source or not function_name:
|
84
|
+
print("FUNCTION_SOURCE or FUNCTION_NAME environment variables not set")
|
85
|
+
sys.exit(1)
|
86
|
+
|
87
|
+
# Create a local namespace for executing the function
|
88
|
+
local_namespace = {}
|
89
|
+
|
90
|
+
# Include pydantic BaseModel and typing tools for type annotations
|
91
|
+
exec("from pydantic import BaseModel, Field", local_namespace)
|
92
|
+
exec(
|
93
|
+
"from typing import Optional, List, Dict, Any, Generic, TypeVar",
|
94
|
+
local_namespace,
|
95
|
+
)
|
96
|
+
exec("T = TypeVar('T')", local_namespace)
|
97
|
+
|
98
|
+
# First try to import the module to get any needed dependencies
|
99
|
+
# This is a fallback in case the module is available
|
100
|
+
module_name = os.environ.get("MODULE_NAME")
|
101
|
+
try:
|
102
|
+
if module_name:
|
103
|
+
exec(f"import {module_name}", local_namespace)
|
104
|
+
print(f"Successfully imported module {module_name}")
|
105
|
+
except Exception as e:
|
106
|
+
print(f"Warning: Could not import module {module_name}: {e}")
|
107
|
+
print(
|
108
|
+
"This is expected if running in a Jupyter notebook. Will use dynamic execution."
|
109
|
+
)
|
110
|
+
|
111
|
+
# Define the models
|
112
|
+
# First define stream message class if needed
|
113
|
+
if stream_message_source:
|
114
|
+
try:
|
115
|
+
exec(stream_message_source, local_namespace)
|
116
|
+
print("Successfully defined V1StreamMessage class")
|
117
|
+
except Exception as e:
|
118
|
+
print(f"Error defining V1StreamMessage: {e}")
|
119
|
+
traceback.print_exc()
|
120
|
+
|
121
|
+
# Define content type if available
|
122
|
+
if content_type_source:
|
123
|
+
try:
|
124
|
+
exec(content_type_source, local_namespace)
|
125
|
+
print(f"Successfully defined content type {content_type_name}")
|
126
|
+
|
127
|
+
# Define any content type args
|
128
|
+
for arg_source in content_type_args:
|
129
|
+
try:
|
130
|
+
exec(arg_source, local_namespace)
|
131
|
+
print("Successfully defined content type argument")
|
132
|
+
except Exception as e:
|
133
|
+
print(f"Error defining content type argument: {e}")
|
134
|
+
traceback.print_exc()
|
135
|
+
except Exception as e:
|
136
|
+
print(f"Error defining content type: {e}")
|
137
|
+
traceback.print_exc()
|
138
|
+
|
139
|
+
# Define input model if different from stream message
|
140
|
+
if input_model_source and (
|
141
|
+
not is_stream_message or input_model_source != stream_message_source
|
142
|
+
):
|
143
|
+
try:
|
144
|
+
exec(input_model_source, local_namespace)
|
145
|
+
print(f"Successfully defined input model {param_type_name}")
|
146
|
+
|
147
|
+
# Define any input model args
|
148
|
+
for arg_source in input_model_args:
|
149
|
+
try:
|
150
|
+
exec(arg_source, local_namespace)
|
151
|
+
print("Successfully defined input model argument")
|
152
|
+
except Exception as e:
|
153
|
+
print(f"Error defining input model argument: {e}")
|
154
|
+
traceback.print_exc()
|
155
|
+
except Exception as e:
|
156
|
+
print(f"Error defining input model: {e}")
|
157
|
+
traceback.print_exc()
|
158
|
+
|
159
|
+
# Define output model
|
160
|
+
if output_model_source:
|
161
|
+
try:
|
162
|
+
exec(output_model_source, local_namespace)
|
163
|
+
print(f"Successfully defined output model {return_type_name}")
|
164
|
+
|
165
|
+
# Define any output model args
|
166
|
+
for arg_source in output_model_args:
|
167
|
+
try:
|
168
|
+
exec(arg_source, local_namespace)
|
169
|
+
print("Successfully defined output model argument")
|
170
|
+
except Exception as e:
|
171
|
+
print(f"Error defining output model argument: {e}")
|
172
|
+
traceback.print_exc()
|
173
|
+
except Exception as e:
|
174
|
+
print(f"Error defining output model: {e}")
|
175
|
+
traceback.print_exc()
|
176
|
+
|
177
|
+
# Execute included object sources
|
178
|
+
for i, (obj_source, args_sources) in enumerate(included_object_sources):
|
179
|
+
try:
|
180
|
+
exec(obj_source, local_namespace)
|
181
|
+
print(f"Successfully executed included object {i} base source")
|
182
|
+
for j, arg_source in enumerate(args_sources):
|
183
|
+
try:
|
184
|
+
exec(arg_source, local_namespace)
|
185
|
+
print(f"Successfully executed included object {i} arg {j} source")
|
186
|
+
except Exception as e:
|
187
|
+
print(f"Error executing included object {i} arg {j} source: {e}")
|
188
|
+
traceback.print_exc()
|
189
|
+
except Exception as e:
|
190
|
+
print(f"Error executing included object {i} base source: {e}")
|
191
|
+
traceback.print_exc()
|
192
|
+
|
193
|
+
# Finally, execute the function code
|
194
|
+
try:
|
195
|
+
exec(function_source, local_namespace)
|
196
|
+
target_function = local_namespace[function_name]
|
197
|
+
print(f"Successfully loaded function {function_name}")
|
198
|
+
except Exception as e:
|
199
|
+
print(f"Error creating function from source: {e}")
|
200
|
+
traceback.print_exc()
|
201
|
+
sys.exit(1)
|
202
|
+
|
203
|
+
except Exception as e:
|
204
|
+
print(f"Error setting up function: {e}")
|
205
|
+
traceback.print_exc()
|
206
|
+
sys.exit(1)
|
207
|
+
|
208
|
+
# Get Redis connection parameters from environment
|
209
|
+
REDIS_URL = os.environ.get("REDIS_URL", "")
|
210
|
+
REDIS_CONSUMER_GROUP = os.environ.get("REDIS_CONSUMER_GROUP")
|
211
|
+
REDIS_STREAM = os.environ.get("REDIS_STREAM")
|
212
|
+
|
213
|
+
if not all([REDIS_URL, REDIS_CONSUMER_GROUP, REDIS_STREAM]):
|
214
|
+
print("Missing required Redis environment variables")
|
215
|
+
sys.exit(1)
|
216
|
+
|
217
|
+
# Connect to Redis
|
218
|
+
try:
|
219
|
+
r = redis.from_url(REDIS_URL)
|
220
|
+
redis_info = REDIS_URL.split("@")[-1] if "@" in REDIS_URL else REDIS_URL
|
221
|
+
print(f"Connected to Redis at {redis_info}")
|
222
|
+
except Exception as e:
|
223
|
+
print(f"Failed to connect to Redis: {e}")
|
224
|
+
traceback.print_exc()
|
225
|
+
sys.exit(1)
|
226
|
+
|
227
|
+
# Create consumer group if it doesn't exist
|
228
|
+
try:
|
229
|
+
r.xgroup_create(REDIS_STREAM, REDIS_CONSUMER_GROUP, id="0", mkstream=True)
|
230
|
+
print(f"Created consumer group {REDIS_CONSUMER_GROUP} for stream {REDIS_STREAM}")
|
231
|
+
except redis.exceptions.ResponseError as e:
|
232
|
+
if "BUSYGROUP" in str(e):
|
233
|
+
print(f"Consumer group {REDIS_CONSUMER_GROUP} already exists")
|
234
|
+
else:
|
235
|
+
print(f"Error creating consumer group: {e}")
|
236
|
+
traceback.print_exc()
|
237
|
+
|
238
|
+
|
239
|
+
# Function to process messages
|
240
|
+
def process_message(message_id: bytes, message_data: Dict[bytes, bytes]) -> None:
|
241
|
+
# Initialize variables that need to be accessible in the except block
|
242
|
+
return_stream = None
|
243
|
+
user_id = None
|
244
|
+
|
245
|
+
try:
|
246
|
+
# Get the message content from field 'data'
|
247
|
+
if b"data" not in message_data:
|
248
|
+
print(f"Message {message_id} has no 'data' field")
|
249
|
+
return
|
250
|
+
|
251
|
+
# Parse the message data
|
252
|
+
raw_payload = json.loads(message_data[b"data"].decode("utf-8"))
|
253
|
+
|
254
|
+
# Extract fields from the Rust structure
|
255
|
+
# These fields are extracted for completeness and potential future use
|
256
|
+
_ = raw_payload.get("kind", "") # kind
|
257
|
+
msg_id = raw_payload.get("id", "") # msg_id
|
258
|
+
content_raw = raw_payload.get("content", {})
|
259
|
+
created_at = raw_payload.get("created_at", 0) # created_at
|
260
|
+
return_stream = raw_payload.get("return_stream")
|
261
|
+
user_id = raw_payload.get("user_id")
|
262
|
+
orgs = raw_payload.get("organizations") # organizations
|
263
|
+
handle = raw_payload.get("handle") # handle
|
264
|
+
adapter = raw_payload.get("adapter") # adapter
|
265
|
+
|
266
|
+
# Parse the content field if it's a string
|
267
|
+
if isinstance(content_raw, str):
|
268
|
+
try:
|
269
|
+
content = json.loads(content_raw)
|
270
|
+
except json.JSONDecodeError:
|
271
|
+
content = content_raw
|
272
|
+
else:
|
273
|
+
content = content_raw
|
274
|
+
|
275
|
+
# For StreamMessage, construct the proper input object
|
276
|
+
if is_stream_message and "V1StreamMessage" in local_namespace:
|
277
|
+
# If we have a content type, try to construct it
|
278
|
+
if content_type_name and content_type_name in local_namespace:
|
279
|
+
# Try to create the content type model first
|
280
|
+
try:
|
281
|
+
content_model = local_namespace[content_type_name](**content)
|
282
|
+
input_obj = local_namespace["V1StreamMessage"](
|
283
|
+
kind=_,
|
284
|
+
id=msg_id,
|
285
|
+
content=content_model,
|
286
|
+
created_at=created_at,
|
287
|
+
return_stream=return_stream,
|
288
|
+
user_id=user_id,
|
289
|
+
orgs=orgs,
|
290
|
+
handle=handle,
|
291
|
+
adapter=adapter,
|
292
|
+
)
|
293
|
+
except Exception as e:
|
294
|
+
print(f"Error creating content type model: {e}")
|
295
|
+
# Fallback to using raw content
|
296
|
+
input_obj = local_namespace["V1StreamMessage"](
|
297
|
+
kind=_,
|
298
|
+
id=msg_id,
|
299
|
+
content=content,
|
300
|
+
created_at=created_at,
|
301
|
+
return_stream=return_stream,
|
302
|
+
user_id=user_id,
|
303
|
+
orgs=orgs,
|
304
|
+
handle=handle,
|
305
|
+
adapter=adapter,
|
306
|
+
)
|
307
|
+
else:
|
308
|
+
# Just use the raw content
|
309
|
+
input_obj = local_namespace["V1StreamMessage"](
|
310
|
+
kind=_,
|
311
|
+
id=msg_id,
|
312
|
+
content=content,
|
313
|
+
created_at=created_at,
|
314
|
+
return_stream=return_stream,
|
315
|
+
user_id=user_id,
|
316
|
+
orgs=orgs,
|
317
|
+
handle=handle,
|
318
|
+
adapter=adapter,
|
319
|
+
)
|
320
|
+
else:
|
321
|
+
# Otherwise use the param type directly
|
322
|
+
try:
|
323
|
+
if param_type_name in local_namespace:
|
324
|
+
input_obj = local_namespace[param_type_name](**content)
|
325
|
+
else:
|
326
|
+
# If we can't find the exact type, just pass the content directly
|
327
|
+
input_obj = content
|
328
|
+
except Exception as e:
|
329
|
+
print(f"Error creating input model: {e}, using raw content")
|
330
|
+
input_obj = content
|
331
|
+
|
332
|
+
# Execute the function
|
333
|
+
result = target_function(input_obj)
|
334
|
+
|
335
|
+
# If the result is a Pydantic model, convert to dict
|
336
|
+
if hasattr(result, "model_dump"):
|
337
|
+
result = result.model_dump()
|
338
|
+
|
339
|
+
# Prepare the response
|
340
|
+
response = {
|
341
|
+
"kind": "StreamResponseMessage",
|
342
|
+
"id": message_id.decode("utf-8"),
|
343
|
+
"content": result,
|
344
|
+
"status": "success",
|
345
|
+
"created_at": datetime.now().isoformat(),
|
346
|
+
"user_id": user_id,
|
347
|
+
}
|
348
|
+
|
349
|
+
# Send the result to the return stream
|
350
|
+
if return_stream:
|
351
|
+
r.xadd(return_stream, {"data": json.dumps(response)})
|
352
|
+
print(
|
353
|
+
f"Processed message {message_id.decode('utf-8')}, result sent to {return_stream}"
|
354
|
+
)
|
355
|
+
|
356
|
+
# Acknowledge the message
|
357
|
+
r.xack(REDIS_STREAM, REDIS_CONSUMER_GROUP, message_id)
|
358
|
+
|
359
|
+
except Exception as e:
|
360
|
+
print(f"Error processing message {message_id.decode('utf-8')}: {e}")
|
361
|
+
traceback.print_exc()
|
362
|
+
|
363
|
+
# Prepare the error response
|
364
|
+
error_response = {
|
365
|
+
"kind": "StreamResponseMessage",
|
366
|
+
"id": message_id.decode("utf-8"),
|
367
|
+
"content": {
|
368
|
+
"error": str(e),
|
369
|
+
"traceback": traceback.format_exc(),
|
370
|
+
},
|
371
|
+
"status": "error",
|
372
|
+
"created_at": datetime.now().isoformat(),
|
373
|
+
"user_id": user_id,
|
374
|
+
}
|
375
|
+
|
376
|
+
# Send the error to the return stream
|
377
|
+
if return_stream:
|
378
|
+
r.xadd(return_stream, {"data": json.dumps(error_response)})
|
379
|
+
else:
|
380
|
+
r.xadd(f"{REDIS_STREAM}.errors", {"data": json.dumps(error_response)})
|
381
|
+
|
382
|
+
# Still acknowledge the message so we don't reprocess it
|
383
|
+
r.xack(REDIS_STREAM, REDIS_CONSUMER_GROUP, message_id)
|
384
|
+
|
385
|
+
|
386
|
+
# Main loop
|
387
|
+
print(f"Starting consumer for stream {REDIS_STREAM} in group {REDIS_CONSUMER_GROUP}")
|
388
|
+
consumer_name = f"consumer-{os.getpid()}"
|
389
|
+
|
390
|
+
while True:
|
391
|
+
try:
|
392
|
+
# Read from stream with blocking
|
393
|
+
streams = {REDIS_STREAM: ">"} # '>' means read only new messages
|
394
|
+
messages = r.xreadgroup(
|
395
|
+
REDIS_CONSUMER_GROUP, consumer_name, streams, count=1, block=5000
|
396
|
+
)
|
397
|
+
|
398
|
+
if not messages:
|
399
|
+
# No messages received, continue waiting
|
400
|
+
continue
|
401
|
+
|
402
|
+
stream_name, stream_messages = messages[0]
|
403
|
+
|
404
|
+
for message_id, message_data in stream_messages:
|
405
|
+
process_message(message_id, message_data)
|
406
|
+
|
407
|
+
except redis.exceptions.ConnectionError as e:
|
408
|
+
print(f"Redis connection error: {e}")
|
409
|
+
time.sleep(5) # Wait before retrying
|
410
|
+
|
411
|
+
except Exception as e:
|
412
|
+
print(f"Unexpected error: {e}")
|
413
|
+
traceback.print_exc()
|
414
|
+
time.sleep(1) # Brief pause before continuing
|