nebu 0.1.0__py3-none-any.whl → 0.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -220,6 +220,7 @@ class Container:
220
220
  self.queue = queue
221
221
  self.timeout = timeout
222
222
  self.ssh_keys = ssh_keys
223
+ self.status = self.container.status
223
224
 
224
225
  @classmethod
225
226
  def from_request(cls, request: V1ContainerRequest) -> V1Container:
@@ -0,0 +1,174 @@
1
+ import base64
2
+ import inspect
3
+ import pickle
4
+ import time
5
+ from typing import Any, Callable, List, Optional
6
+
7
+ import requests
8
+
9
+ from nebu.containers.container import Container
10
+ from nebu.containers.models import V1ContainerRequest, V1EnvVar, V1ResourceMetaRequest
11
+
12
+
13
+ def container(
14
+ image: str,
15
+ name: Optional[str] = None,
16
+ namespace: str = "default",
17
+ accelerators: Optional[List[str]] = None,
18
+ platform: str = "runpod",
19
+ python_cmd: str = "python",
20
+ ):
21
+ def decorator(func: Callable):
22
+ nonlocal name
23
+ if name is None:
24
+ name = func.__name__
25
+
26
+ def wrapper(*args: Any, **kwargs: Any):
27
+ nonlocal name
28
+ # Create your container with the server script
29
+ cont = Container(
30
+ name=name, # type: ignore
31
+ namespace=namespace,
32
+ platform=platform,
33
+ image=image,
34
+ accelerators=accelerators,
35
+ # Command to start our function execution server
36
+ command=f"{python_cmd} -m nebu.containers.server", # TODO: need to get the server code into the container
37
+ proxy_port=8080,
38
+ )
39
+
40
+ # Wait for container to be running
41
+ while (
42
+ cont.container.status
43
+ and cont.container.status.status
44
+ and cont.container.status.status.lower() != "running"
45
+ ):
46
+ print(
47
+ f"Container '{cont.container.metadata.name}' not running yet; waiting..."
48
+ )
49
+ time.sleep(1)
50
+
51
+ # Get function source code
52
+ func_code = inspect.getsource(func)
53
+
54
+ # Serialize arguments using pickle for complex objects
55
+ serialized_args = base64.b64encode(pickle.dumps(args)).decode("utf-8")
56
+ serialized_kwargs = base64.b64encode(pickle.dumps(kwargs)).decode("utf-8")
57
+
58
+ # Prepare payload
59
+ payload = {
60
+ "function_code": func_code,
61
+ "args": serialized_args,
62
+ "kwargs": serialized_kwargs,
63
+ }
64
+
65
+ # Get container URL
66
+ container_url = (
67
+ cont.status.tailnet_url
68
+ if cont.status and hasattr(cont.status, "tailnet_url")
69
+ else "http://localhost:8080"
70
+ )
71
+
72
+ # Send to container and get result
73
+ response = requests.post(f"{container_url}/execute", json=payload)
74
+
75
+ if response.status_code != 200:
76
+ raise RuntimeError(f"Function execution failed: {response.text}")
77
+
78
+ # Deserialize the result
79
+ serialized_result = response.json()["result"]
80
+ result = pickle.loads(base64.b64decode(serialized_result))
81
+
82
+ return result
83
+
84
+ return wrapper
85
+
86
+ return decorator
87
+
88
+
89
+ def on_feedback(
90
+ human: Human,
91
+ accelerators: Optional[List[str]] = None,
92
+ platform: str = "runpod",
93
+ python_cmd: str = "python",
94
+ timeout: Optional[str] = None,
95
+ env: Optional[List[V1EnvVar]] = None,
96
+ ):
97
+ def decorator(func: Callable):
98
+ nonlocal name
99
+ if name is None:
100
+ name = func.__name__
101
+
102
+ # Get function source code
103
+ func_code = inspect.getsource(func)
104
+
105
+ command = """
106
+
107
+ """
108
+
109
+ # Create the container request
110
+ container_request = V1ContainerRequest(
111
+ kind="Container",
112
+ platform=platform,
113
+ metadata=V1ResourceMetaRequest(
114
+ name=name,
115
+ namespace=namespace,
116
+ ),
117
+ image=image,
118
+ env=env,
119
+ command=f"{python_cmd} -m nebu.containers.server",
120
+ accelerators=accelerators,
121
+ timeout=timeout,
122
+ proxy_port=8080,
123
+ restart="Never", # Jobs should not restart
124
+ )
125
+
126
+ def run(*args: Any, **kwargs: Any):
127
+ # Create a container from the request
128
+ cont = Container.from_request(container_request)
129
+
130
+ # Wait for container to be running
131
+ while (
132
+ cont.status
133
+ and cont.status.status
134
+ and cont.status.status.lower() != "running"
135
+ ):
136
+ print(f"Job '{cont.metadata.name}' not running yet; waiting...")
137
+ time.sleep(1)
138
+
139
+ # Serialize arguments using pickle for complex objects
140
+ serialized_args = base64.b64encode(pickle.dumps(args)).decode("utf-8")
141
+ serialized_kwargs = base64.b64encode(pickle.dumps(kwargs)).decode("utf-8")
142
+
143
+ # Prepare payload
144
+ payload = {
145
+ "function_code": func_code,
146
+ "args": serialized_args,
147
+ "kwargs": serialized_kwargs,
148
+ }
149
+
150
+ # Get container URL
151
+ container_url = (
152
+ cont.status.tailnet_url
153
+ if cont.status and hasattr(cont.status, "tailnet_url")
154
+ else "http://localhost:8080"
155
+ )
156
+
157
+ # Send to container and get result
158
+ response = requests.post(f"{container_url}/execute", json=payload)
159
+
160
+ if response.status_code != 200:
161
+ raise RuntimeError(f"Function execution failed: {response.text}")
162
+
163
+ # Deserialize the result
164
+ serialized_result = response.json()["result"]
165
+ result = pickle.loads(base64.b64decode(serialized_result))
166
+
167
+ return result
168
+
169
+ # Attach the run method to the container request
170
+ container_request.run = run # type: ignore
171
+
172
+ return container_request
173
+
174
+ return decorator
nebu/containers/models.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from enum import Enum
2
2
  from typing import Dict, List, Optional
3
3
 
4
- from pydantic import BaseModel, Field
4
+ from pydantic import BaseModel, ConfigDict, Field
5
5
 
6
6
  from nebu.meta import V1ResourceMeta, V1ResourceMetaRequest
7
7
 
@@ -59,17 +59,21 @@ class V1VolumePath(BaseModel):
59
59
  continuous: bool = False
60
60
  driver: V1VolumeDriver = V1VolumeDriver.RCLONE_SYNC
61
61
 
62
+ model_config = ConfigDict(use_enum_values=True)
63
+
62
64
 
63
65
  class V1VolumeConfig(BaseModel):
64
66
  paths: List[V1VolumePath]
65
67
  cache_dir: str = "/nebu/cache"
66
68
 
69
+ model_config = ConfigDict(use_enum_values=True)
70
+
67
71
 
68
72
  class V1ContainerStatus(BaseModel):
69
73
  status: Optional[str] = None
70
74
  message: Optional[str] = None
71
75
  accelerator: Optional[str] = None
72
- public_ip: Optional[str] = None
76
+ tailnet_url: Optional[str] = None
73
77
  cost_per_hr: Optional[float] = None
74
78
 
75
79
 
@@ -112,6 +116,16 @@ class V1AuthzConfig(BaseModel):
112
116
  rules: Optional[List[V1AuthzRule]] = None
113
117
 
114
118
 
119
+ class V1ContainerHealthCheck(BaseModel):
120
+ interval: Optional[str] = None
121
+ timeout: Optional[str] = None
122
+ retries: Optional[int] = None
123
+ start_period: Optional[str] = None
124
+ path: Optional[str] = None
125
+ port: Optional[int] = None
126
+ protocol: Optional[str] = None
127
+
128
+
115
129
  class V1PortRequest(BaseModel):
116
130
  port: int
117
131
  protocol: Optional[str] = None
@@ -142,6 +156,9 @@ class V1ContainerRequest(BaseModel):
142
156
  ports: Optional[List[V1PortRequest]] = None
143
157
  proxy_port: Optional[int] = None
144
158
  authz: Optional[V1AuthzConfig] = None
159
+ health_check: Optional[V1ContainerHealthCheck] = None
160
+
161
+ model_config = ConfigDict(use_enum_values=True)
145
162
 
146
163
 
147
164
  class V1Container(BaseModel):
@@ -163,6 +180,9 @@ class V1Container(BaseModel):
163
180
  ports: Optional[List[V1Port]] = None
164
181
  proxy_port: Optional[int] = None
165
182
  authz: Optional[V1AuthzConfig] = None
183
+ health_check: Optional[V1ContainerHealthCheck] = None
184
+
185
+ model_config = ConfigDict(use_enum_values=True)
166
186
 
167
187
 
168
188
  class V1UpdateContainer(BaseModel):
@@ -181,7 +201,14 @@ class V1UpdateContainer(BaseModel):
181
201
  timeout: Optional[str] = None
182
202
  resources: Optional[V1ContainerResources] = None
183
203
  proxy_port: Optional[int] = None
204
+ authz: Optional[V1AuthzConfig] = None
205
+ health_check: Optional[V1ContainerHealthCheck] = None
206
+ no_delete: Optional[bool] = None
207
+
208
+ model_config = ConfigDict(use_enum_values=True)
184
209
 
185
210
 
186
211
  class V1Containers(BaseModel):
187
212
  containers: List[V1Container]
213
+
214
+ model_config = ConfigDict(use_enum_values=True)
@@ -0,0 +1,70 @@
1
+ import base64
2
+ import json
3
+ import pickle
4
+ from http.server import BaseHTTPRequestHandler, HTTPServer
5
+
6
+
7
+ class FunctionExecutionHandler(BaseHTTPRequestHandler):
8
+ def do_POST(self):
9
+ content_length = int(self.headers["Content-Length"])
10
+ post_data = self.rfile.read(content_length)
11
+ payload = json.loads(post_data.decode("utf-8"))
12
+
13
+ if self.path == "/execute":
14
+ try:
15
+ # Extract function code, args and kwargs
16
+ func_code = payload["function_code"]
17
+ serialized_args = payload["args"]
18
+ serialized_kwargs = payload["kwargs"]
19
+
20
+ # Deserialize arguments
21
+ args = pickle.loads(base64.b64decode(serialized_args))
22
+ kwargs = pickle.loads(base64.b64decode(serialized_kwargs))
23
+
24
+ # Create a local namespace and execute the function
25
+ local_namespace = {}
26
+ exec(func_code, globals(), local_namespace)
27
+
28
+ # Find the function object in the local namespace
29
+ func_name = None
30
+ for name, obj in local_namespace.items():
31
+ if callable(obj) and not name.startswith("__"):
32
+ func_name = name
33
+ break
34
+
35
+ if not func_name:
36
+ raise ValueError("No function found in the provided code")
37
+
38
+ # Execute the function
39
+ result = local_namespace[func_name](*args, **kwargs)
40
+
41
+ # Serialize the result
42
+ serialized_result = base64.b64encode(pickle.dumps(result)).decode(
43
+ "utf-8"
44
+ )
45
+
46
+ # Send response
47
+ self.send_response(200)
48
+ self.send_header("Content-type", "application/json")
49
+ self.end_headers()
50
+ self.wfile.write(json.dumps({"result": serialized_result}).encode())
51
+
52
+ except Exception as e:
53
+ self.send_response(500)
54
+ self.send_header("Content-type", "application/json")
55
+ self.end_headers()
56
+ self.wfile.write(json.dumps({"error": str(e)}).encode())
57
+ else:
58
+ self.send_response(404)
59
+ self.end_headers()
60
+
61
+
62
+ def run_server(port: int = 8080) -> None:
63
+ server_address = ("", port)
64
+ httpd = HTTPServer(server_address, FunctionExecutionHandler)
65
+ print(f"Starting server on port {port}")
66
+ httpd.serve_forever()
67
+
68
+
69
+ if __name__ == "__main__":
70
+ run_server()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nebu
3
- Version: 0.1.0
3
+ Version: 0.1.3
4
4
  Summary: A globally distributed container runtime
5
5
  Requires-Python: >=3.10.14
6
6
  Description-Content-Type: text/markdown
@@ -12,7 +12,8 @@ Requires-Dist: requests>=2.32.3
12
12
  Dynamic: license-file
13
13
 
14
14
  # nebulous-py
15
- A python library for the [Nebulous runtime](https://github.com/agentsea/nebulous)
15
+
16
+ A declarative python library for the [Nebulous runtime](https://github.com/agentsea/nebulous)
16
17
 
17
18
  ## Installation
18
19
 
@@ -22,14 +23,14 @@ pip install nebu
22
23
 
23
24
  ## Usage
24
25
 
26
+ Create a pytorch container on runpod with 1 A100 GPU
27
+
25
28
  ```python
26
- from nebu import Container, V1EnvVar, V1ResourceMeta
29
+ from nebu import Container, V1EnvVar
27
30
 
28
31
  container = Container(
29
- metadata=V1ResourceMeta(
30
- name="pytorch-example",
31
- namespace="test",
32
- ),
32
+ name="pytorch-example",
33
+ namespace="test",
33
34
  image="pytorch/pytorch:latest",
34
35
  platform="runpod",
35
36
  env=[V1EnvVar(name="MY_ENV_VAR", value="my-value")],
@@ -47,6 +48,21 @@ print(f"Container '{container.metadata.name}' is running")
47
48
  print(f"You can access the container at {container.status.tailnet_url}")
48
49
  ```
49
50
 
51
+ ### Decorator
52
+
53
+ Run a python function in a container [in progress]
54
+
55
+ ```python
56
+ from nebu import container
57
+
58
+ @container(image="python:3.10-slim", accelerators=["1:A100_SXM"])
59
+ def my_function(x: int, y: int) -> int:
60
+ return x + y
61
+
62
+ result = my_function(1, 2)
63
+ print(result)
64
+ ```
65
+
50
66
  ## Contributing
51
67
 
52
68
  Please open an issue or a PR to contribute to the project.
@@ -0,0 +1,16 @@
1
+ nebu/__init__.py,sha256=EbdC8ZKnRTt6jkX0WN0p1pnaDEzb2InqZ1r8QZWzph0,195
2
+ nebu/config.py,sha256=XBY7uKgcJX9d1HGxqqpx87o_9DuF3maUlUnKkcpUrKU,4565
3
+ nebu/meta.py,sha256=AnvrtP0mc7a-YP4zVhErHPsU0FSmwMejYgKWnV8wqqE,566
4
+ nebu/containers/container.py,sha256=WCXBadAHhvkFQdpDjHu7nh6NjLlMxRsJbMbLXH8zaCs,9283
5
+ nebu/containers/decorator.py,sha256=sZQ4ZwqQk_jLDJ-n9P6N0MlxvWL9Ac7eCPIHVxYq58c,5576
6
+ nebu/containers/models.py,sha256=o4jHBiXpcnxdxvUSR4q38ctHdL0w6M0uRj4DD-5B0o8,5932
7
+ nebu/containers/server.py,sha256=yFa2Y9PzBn59E1HftKiv0iapPonli2rbGAiU6r-wwe0,2513
8
+ nebu/processors/models.py,sha256=6XSw4iM77XYJf6utm8QReN9fyMS0dK40a5sVwsC7RRA,1970
9
+ nebu/processors/processor.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ nebu/redis/models.py,sha256=coPovAcVXnOU1Xh_fpJL4PO3QctgK9nBe5QYoqEcnxg,1230
11
+ nebu/services/service.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
+ nebu-0.1.3.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
13
+ nebu-0.1.3.dist-info/METADATA,sha256=QAQ6mgaNa41T-MxXPykyYFibeki6AUZw1kNPoWQHPHo,1587
14
+ nebu-0.1.3.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
15
+ nebu-0.1.3.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
16
+ nebu-0.1.3.dist-info/RECORD,,
@@ -1,14 +0,0 @@
1
- nebu/__init__.py,sha256=EbdC8ZKnRTt6jkX0WN0p1pnaDEzb2InqZ1r8QZWzph0,195
2
- nebu/config.py,sha256=XBY7uKgcJX9d1HGxqqpx87o_9DuF3maUlUnKkcpUrKU,4565
3
- nebu/meta.py,sha256=AnvrtP0mc7a-YP4zVhErHPsU0FSmwMejYgKWnV8wqqE,566
4
- nebu/containers/container.py,sha256=cE8BChcsHXAtpvaP7w62mMQmHq8y7U-ssKtxS-kQ0CQ,9239
5
- nebu/containers/models.py,sha256=yJerkN7V03s_V5Yr3WdMghzlj3kLpawousGy5UElxJ4,5065
6
- nebu/processors/models.py,sha256=6XSw4iM77XYJf6utm8QReN9fyMS0dK40a5sVwsC7RRA,1970
7
- nebu/processors/processor.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- nebu/redis/models.py,sha256=coPovAcVXnOU1Xh_fpJL4PO3QctgK9nBe5QYoqEcnxg,1230
9
- nebu/services/service.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- nebu-0.1.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
11
- nebu-0.1.0.dist-info/METADATA,sha256=noow9VAkFgyhZIRUZJcaZUtB1wULdyU1Inmm5nuMPM4,1305
12
- nebu-0.1.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
13
- nebu-0.1.0.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
14
- nebu-0.1.0.dist-info/RECORD,,
File without changes