nebu 0.1.48__tar.gz → 0.1.51__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {nebu-0.1.48/src/nebu.egg-info → nebu-0.1.51}/PKG-INFO +1 -1
- {nebu-0.1.48 → nebu-0.1.51}/pyproject.toml +2 -1
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/__init__.py +0 -2
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/processors/consumer.py +2 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/processors/decorate.py +61 -13
- {nebu-0.1.48 → nebu-0.1.51/src/nebu.egg-info}/PKG-INFO +1 -1
- {nebu-0.1.48 → nebu-0.1.51}/LICENSE +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/README.md +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/setup.cfg +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/auth.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/builders/builder.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/builders/models.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/cache.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/config.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/containers/container.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/containers/decorator.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/containers/models.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/containers/server.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/data.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/meta.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/processors/default.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/processors/models.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/processors/processor.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/processors/remote.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/redis/models.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu/services/service.py +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu.egg-info/SOURCES.txt +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu.egg-info/dependency_links.txt +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu.egg-info/requires.txt +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/src/nebu.egg-info/top_level.txt +0 -0
- {nebu-0.1.48 → nebu-0.1.51}/tests/test_containers.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
[project]
|
2
2
|
name = "nebu"
|
3
|
-
version = "0.1.
|
3
|
+
version = "0.1.51"
|
4
4
|
description = "A globally distributed container runtime"
|
5
5
|
readme = "README.md"
|
6
6
|
requires-python = ">=3.10.14"
|
@@ -18,6 +18,7 @@ dependencies = [
|
|
18
18
|
|
19
19
|
[dependency-groups]
|
20
20
|
dev = [
|
21
|
+
"chatmux>=0.1.4",
|
21
22
|
"datamodel-code-generator>=0.28.5",
|
22
23
|
"ipykernel>=6.29.5",
|
23
24
|
"nbformat>=5.10.4",
|
@@ -374,6 +374,7 @@ def process_message(message_id: str, message_data: Dict[str, str]) -> None:
|
|
374
374
|
created_at = (
|
375
375
|
datetime.fromisoformat(created_at_str)
|
376
376
|
if created_at_str
|
377
|
+
and isinstance(created_at_str, str) # Check type explicitly
|
377
378
|
else datetime.now(timezone.utc)
|
378
379
|
)
|
379
380
|
except ValueError:
|
@@ -651,6 +652,7 @@ consumer_name = f"consumer-{os.getpid()}-{socket.gethostname()}" # More unique
|
|
651
652
|
|
652
653
|
try:
|
653
654
|
while True:
|
655
|
+
print("reading from stream...")
|
654
656
|
try:
|
655
657
|
# --- Check for Code Updates ---
|
656
658
|
if entrypoint_abs_path: # Should always be set after init
|
@@ -24,6 +24,8 @@ import requests # Add requests import
|
|
24
24
|
from botocore.exceptions import ClientError # Import ClientError
|
25
25
|
from pydantic import BaseModel
|
26
26
|
|
27
|
+
from nebu.auth import get_user_profile # Import get_user_profile
|
28
|
+
from nebu.config import GlobalConfig # Add this import
|
27
29
|
from nebu.containers.models import (
|
28
30
|
V1AuthzConfig,
|
29
31
|
V1ContainerHealthCheck,
|
@@ -60,8 +62,7 @@ CONTAINER_CODE_DIR = "/app/src"
|
|
60
62
|
S3_CODE_PREFIX = "nebu-code"
|
61
63
|
# Define the token endpoint URL (replace with actual URL)
|
62
64
|
# Use environment variable for flexibility, provide a default for local dev
|
63
|
-
NEBU_API_BASE_URL = os.environ.get("NEBU_API_BASE_URL", "http://localhost:
|
64
|
-
S3_TOKEN_ENDPOINT = f"{NEBU_API_BASE_URL}/iam/s3-token"
|
65
|
+
NEBU_API_BASE_URL = os.environ.get("NEBU_API_BASE_URL", "http://localhost:3000")
|
65
66
|
|
66
67
|
# --- Jupyter Helper Functions ---
|
67
68
|
|
@@ -437,13 +438,57 @@ def processor(
|
|
437
438
|
raise ValueError(
|
438
439
|
"Could not determine function directory or relative path for S3 upload."
|
439
440
|
)
|
441
|
+
# --- Get API Key ---
|
442
|
+
print("[DEBUG Decorator] Loading Nebu configuration...")
|
443
|
+
try:
|
444
|
+
config = GlobalConfig.read()
|
445
|
+
current_server = config.get_current_server_config()
|
446
|
+
if not current_server or not current_server.api_key:
|
447
|
+
raise ValueError("Nebu server configuration or API key not found.")
|
448
|
+
api_key = current_server.api_key
|
449
|
+
print("[DEBUG Decorator] Nebu API key loaded successfully.")
|
450
|
+
except Exception as e:
|
451
|
+
print(f"ERROR: Failed to load Nebu configuration or API key: {e}")
|
452
|
+
raise RuntimeError(
|
453
|
+
f"Failed to load Nebu configuration or API key: {e}"
|
454
|
+
) from e
|
455
|
+
# --- End Get API Key ---
|
456
|
+
|
457
|
+
# --- Determine Namespace ---
|
458
|
+
effective_namespace = namespace # Start with the provided namespace
|
459
|
+
if effective_namespace is None:
|
460
|
+
print("[DEBUG Decorator] Namespace not provided, fetching user profile...")
|
461
|
+
try:
|
462
|
+
user_profile = get_user_profile(api_key)
|
463
|
+
if user_profile.handle:
|
464
|
+
effective_namespace = user_profile.handle
|
465
|
+
print(
|
466
|
+
f"[DEBUG Decorator] Using user handle '{effective_namespace}' as namespace."
|
467
|
+
)
|
468
|
+
else:
|
469
|
+
raise ValueError("User profile does not contain a handle.")
|
470
|
+
except Exception as e:
|
471
|
+
print(
|
472
|
+
f"ERROR: Failed to get user profile or handle for default namespace: {e}"
|
473
|
+
)
|
474
|
+
raise RuntimeError(
|
475
|
+
f"Failed to get user profile or handle for default namespace: {e}"
|
476
|
+
) from e
|
477
|
+
# --- End Determine Namespace ---
|
440
478
|
|
479
|
+
# Use processor_name instead of name
|
480
|
+
S3_TOKEN_ENDPOINT = f"{NEBU_API_BASE_URL}/v1/auth/temp-s3-tokens/{effective_namespace}/{processor_name}"
|
441
481
|
print(f"[DEBUG Decorator] Fetching S3 token from: {S3_TOKEN_ENDPOINT}")
|
442
482
|
try:
|
443
|
-
|
483
|
+
headers = {"Authorization": f"Bearer {api_key}"} # Add headers here
|
484
|
+
|
485
|
+
# Try GET request instead of POST for this endpoint
|
486
|
+
response = requests.get(S3_TOKEN_ENDPOINT, headers=headers, timeout=10)
|
444
487
|
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
|
445
488
|
s3_token_data = response.json()
|
446
489
|
|
490
|
+
print(f"[DEBUG Decorator] S3 token data: {s3_token_data}")
|
491
|
+
|
447
492
|
aws_access_key_id = s3_token_data.get("access_key_id")
|
448
493
|
aws_secret_access_key = s3_token_data.get("secret_access_key")
|
449
494
|
aws_session_token = s3_token_data.get(
|
@@ -456,8 +501,8 @@ def processor(
|
|
456
501
|
"Missing required fields (access_key_id, secret_access_key, s3_base_uri) in S3 token response."
|
457
502
|
)
|
458
503
|
|
459
|
-
# Construct unique S3 path: s3://<base_bucket>/<base_prefix>/<code_prefix>/<processor_name
|
460
|
-
unique_suffix = f"{
|
504
|
+
# Construct unique S3 path: s3://<base_bucket>/<base_prefix>/<code_prefix>/<namespace>/<processor_name>/
|
505
|
+
unique_suffix = f"{effective_namespace}/{processor_name}"
|
461
506
|
parsed_base = urlparse(s3_base_uri)
|
462
507
|
if not parsed_base.scheme == "s3" or not parsed_base.netloc:
|
463
508
|
raise ValueError(f"Invalid s3_base_uri received: {s3_base_uri}")
|
@@ -466,7 +511,8 @@ def processor(
|
|
466
511
|
s3_dest_components = [S3_CODE_PREFIX, unique_suffix]
|
467
512
|
if base_path:
|
468
513
|
# Handle potential multiple path segments in base_path
|
469
|
-
|
514
|
+
path_components = [comp for comp in base_path.split("/") if comp]
|
515
|
+
s3_dest_components = path_components + s3_dest_components
|
470
516
|
|
471
517
|
# Filter out empty strings that might result from split
|
472
518
|
s3_destination_key_components = [
|
@@ -499,7 +545,7 @@ def processor(
|
|
499
545
|
s3_bucket.sync(
|
500
546
|
source=func_dir,
|
501
547
|
destination=s3_destination_uri,
|
502
|
-
delete=
|
548
|
+
delete=True,
|
503
549
|
dry_run=False,
|
504
550
|
)
|
505
551
|
print("[DEBUG Decorator] S3 code upload completed.")
|
@@ -889,7 +935,7 @@ def processor(
|
|
889
935
|
# --- Final Setup ---
|
890
936
|
print("[DEBUG Decorator] Preparing final Processor object...")
|
891
937
|
metadata = V1ResourceMetaRequest(
|
892
|
-
name=processor_name, namespace=
|
938
|
+
name=processor_name, namespace=effective_namespace, labels=labels
|
893
939
|
)
|
894
940
|
# Base command now just runs the consumer module, relies on PYTHONPATH finding code
|
895
941
|
consumer_module = "nebu.processors.consumer"
|
@@ -900,10 +946,12 @@ def processor(
|
|
900
946
|
consumer_execution_command = f"{python_cmd} -u -m {consumer_module}"
|
901
947
|
|
902
948
|
# Setup commands: Base dependencies needed by consumer.py itself or the framework
|
903
|
-
#
|
904
|
-
|
905
|
-
|
906
|
-
|
949
|
+
# Install required dependencies for the consumer to run properly
|
950
|
+
base_deps_install = (
|
951
|
+
"pip install nebu redis PySocks pydantic dill boto3 requests"
|
952
|
+
)
|
953
|
+
setup_commands_list = [base_deps_install]
|
954
|
+
|
907
955
|
if setup_script:
|
908
956
|
print("[DEBUG Decorator] Adding user setup script to setup commands.")
|
909
957
|
setup_commands_list.append(setup_script.strip())
|
@@ -950,7 +998,7 @@ def processor(
|
|
950
998
|
|
951
999
|
processor_instance = Processor(
|
952
1000
|
name=processor_name,
|
953
|
-
namespace=
|
1001
|
+
namespace=effective_namespace,
|
954
1002
|
labels=labels,
|
955
1003
|
container=container_request,
|
956
1004
|
schema_=None, # Schema info might be derived differently now if needed
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|