nebu 0.1.6__tar.gz → 0.1.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nebu
3
- Version: 0.1.6
3
+ Version: 0.1.7
4
4
  Summary: A globally distributed container runtime
5
5
  Requires-Python: >=3.10.14
6
6
  Description-Content-Type: text/markdown
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "nebu"
3
- version = "0.1.6"
3
+ version = "0.1.7"
4
4
  description = "A globally distributed container runtime"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.10.14"
@@ -26,7 +26,7 @@ class Container:
26
26
  self,
27
27
  name: str,
28
28
  image: str,
29
- namespace: str = "default",
29
+ namespace: Optional[str] = None,
30
30
  platform: Optional[str] = None,
31
31
  env: Optional[List[V1EnvVar]] = None,
32
32
  command: Optional[str] = None,
@@ -208,8 +208,8 @@ class Container:
208
208
 
209
209
  # Save constructor params to `self` for reference, like you do in ReplayBuffer.
210
210
  self.kind = "Container"
211
- self.namespace = namespace
212
- self.name = name
211
+ self.namespace = self.container.metadata.namespace
212
+ self.name = self.container.metadata.name
213
213
  self.platform = platform
214
214
  self.metadata = meta_request
215
215
  self.image = image
@@ -290,7 +290,7 @@ class Container:
290
290
  def load(
291
291
  cls,
292
292
  name: str,
293
- namespace: str = "default",
293
+ namespace: Optional[str] = None,
294
294
  config: Optional[GlobalConfig] = None,
295
295
  ):
296
296
  """
@@ -0,0 +1,85 @@
1
+ import base64
2
+ import inspect
3
+ import pickle
4
+ import time
5
+ from typing import Any, Callable, List, Optional
6
+
7
+ import requests
8
+
9
+ from nebu.containers.container import Container
10
+
11
+
12
+ def container(
13
+ image: str,
14
+ name: Optional[str] = None,
15
+ namespace: Optional[str] = None,
16
+ accelerators: Optional[List[str]] = None,
17
+ platform: str = "runpod",
18
+ python_cmd: str = "python",
19
+ ):
20
+ def decorator(func: Callable):
21
+ nonlocal name
22
+ if name is None:
23
+ name = func.__name__
24
+
25
+ def wrapper(*args: Any, **kwargs: Any):
26
+ nonlocal name
27
+ # Create your container with the server script
28
+ cont = Container(
29
+ name=name, # type: ignore
30
+ namespace=namespace,
31
+ platform=platform,
32
+ image=image,
33
+ accelerators=accelerators,
34
+ # Command to start our function execution server
35
+ command=f"{python_cmd} -m nebu.containers.server", # TODO: need to get the server code into the container
36
+ proxy_port=8080,
37
+ )
38
+
39
+ # Wait for container to be running
40
+ while (
41
+ cont.container.status
42
+ and cont.container.status.status
43
+ and cont.container.status.status.lower() != "running"
44
+ ):
45
+ print(
46
+ f"Container '{cont.container.metadata.name}' not running yet; waiting..."
47
+ )
48
+ time.sleep(1)
49
+
50
+ # Get function source code
51
+ func_code = inspect.getsource(func)
52
+
53
+ # Serialize arguments using pickle for complex objects
54
+ serialized_args = base64.b64encode(pickle.dumps(args)).decode("utf-8")
55
+ serialized_kwargs = base64.b64encode(pickle.dumps(kwargs)).decode("utf-8")
56
+
57
+ # Prepare payload
58
+ payload = {
59
+ "function_code": func_code,
60
+ "args": serialized_args,
61
+ "kwargs": serialized_kwargs,
62
+ }
63
+
64
+ # Get container URL
65
+ container_url = (
66
+ cont.status.tailnet_url
67
+ if cont.status and hasattr(cont.status, "tailnet_url")
68
+ else "http://localhost:8080"
69
+ )
70
+
71
+ # Send to container and get result
72
+ response = requests.post(f"{container_url}/execute", json=payload)
73
+
74
+ if response.status_code != 200:
75
+ raise RuntimeError(f"Function execution failed: {response.text}")
76
+
77
+ # Deserialize the result
78
+ serialized_result = response.json()["result"]
79
+ result = pickle.loads(base64.b64decode(serialized_result))
80
+
81
+ return result
82
+
83
+ return wrapper
84
+
85
+ return decorator
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nebu
3
- Version: 0.1.6
3
+ Version: 0.1.7
4
4
  Summary: A globally distributed container runtime
5
5
  Requires-Python: >=3.10.14
6
6
  Description-Content-Type: text/markdown
@@ -1,174 +0,0 @@
1
- import base64
2
- import inspect
3
- import pickle
4
- import time
5
- from typing import Any, Callable, List, Optional
6
-
7
- import requests
8
-
9
- from nebu.containers.container import Container
10
- from nebu.containers.models import V1ContainerRequest, V1EnvVar, V1ResourceMetaRequest
11
-
12
-
13
- def container(
14
- image: str,
15
- name: Optional[str] = None,
16
- namespace: str = "default",
17
- accelerators: Optional[List[str]] = None,
18
- platform: str = "runpod",
19
- python_cmd: str = "python",
20
- ):
21
- def decorator(func: Callable):
22
- nonlocal name
23
- if name is None:
24
- name = func.__name__
25
-
26
- def wrapper(*args: Any, **kwargs: Any):
27
- nonlocal name
28
- # Create your container with the server script
29
- cont = Container(
30
- name=name, # type: ignore
31
- namespace=namespace,
32
- platform=platform,
33
- image=image,
34
- accelerators=accelerators,
35
- # Command to start our function execution server
36
- command=f"{python_cmd} -m nebu.containers.server", # TODO: need to get the server code into the container
37
- proxy_port=8080,
38
- )
39
-
40
- # Wait for container to be running
41
- while (
42
- cont.container.status
43
- and cont.container.status.status
44
- and cont.container.status.status.lower() != "running"
45
- ):
46
- print(
47
- f"Container '{cont.container.metadata.name}' not running yet; waiting..."
48
- )
49
- time.sleep(1)
50
-
51
- # Get function source code
52
- func_code = inspect.getsource(func)
53
-
54
- # Serialize arguments using pickle for complex objects
55
- serialized_args = base64.b64encode(pickle.dumps(args)).decode("utf-8")
56
- serialized_kwargs = base64.b64encode(pickle.dumps(kwargs)).decode("utf-8")
57
-
58
- # Prepare payload
59
- payload = {
60
- "function_code": func_code,
61
- "args": serialized_args,
62
- "kwargs": serialized_kwargs,
63
- }
64
-
65
- # Get container URL
66
- container_url = (
67
- cont.status.tailnet_url
68
- if cont.status and hasattr(cont.status, "tailnet_url")
69
- else "http://localhost:8080"
70
- )
71
-
72
- # Send to container and get result
73
- response = requests.post(f"{container_url}/execute", json=payload)
74
-
75
- if response.status_code != 200:
76
- raise RuntimeError(f"Function execution failed: {response.text}")
77
-
78
- # Deserialize the result
79
- serialized_result = response.json()["result"]
80
- result = pickle.loads(base64.b64decode(serialized_result))
81
-
82
- return result
83
-
84
- return wrapper
85
-
86
- return decorator
87
-
88
-
89
- def on_feedback(
90
- human: Human,
91
- accelerators: Optional[List[str]] = None,
92
- platform: str = "runpod",
93
- python_cmd: str = "python",
94
- timeout: Optional[str] = None,
95
- env: Optional[List[V1EnvVar]] = None,
96
- ):
97
- def decorator(func: Callable):
98
- nonlocal name
99
- if name is None:
100
- name = func.__name__
101
-
102
- # Get function source code
103
- func_code = inspect.getsource(func)
104
-
105
- command = """
106
-
107
- """
108
-
109
- # Create the container request
110
- container_request = V1ContainerRequest(
111
- kind="Container",
112
- platform=platform,
113
- metadata=V1ResourceMetaRequest(
114
- name=name,
115
- namespace=namespace,
116
- ),
117
- image=image,
118
- env=env,
119
- command=f"{python_cmd} -m nebu.containers.server",
120
- accelerators=accelerators,
121
- timeout=timeout,
122
- proxy_port=8080,
123
- restart="Never", # Jobs should not restart
124
- )
125
-
126
- def run(*args: Any, **kwargs: Any):
127
- # Create a container from the request
128
- cont = Container.from_request(container_request)
129
-
130
- # Wait for container to be running
131
- while (
132
- cont.status
133
- and cont.status.status
134
- and cont.status.status.lower() != "running"
135
- ):
136
- print(f"Job '{cont.metadata.name}' not running yet; waiting...")
137
- time.sleep(1)
138
-
139
- # Serialize arguments using pickle for complex objects
140
- serialized_args = base64.b64encode(pickle.dumps(args)).decode("utf-8")
141
- serialized_kwargs = base64.b64encode(pickle.dumps(kwargs)).decode("utf-8")
142
-
143
- # Prepare payload
144
- payload = {
145
- "function_code": func_code,
146
- "args": serialized_args,
147
- "kwargs": serialized_kwargs,
148
- }
149
-
150
- # Get container URL
151
- container_url = (
152
- cont.status.tailnet_url
153
- if cont.status and hasattr(cont.status, "tailnet_url")
154
- else "http://localhost:8080"
155
- )
156
-
157
- # Send to container and get result
158
- response = requests.post(f"{container_url}/execute", json=payload)
159
-
160
- if response.status_code != 200:
161
- raise RuntimeError(f"Function execution failed: {response.text}")
162
-
163
- # Deserialize the result
164
- serialized_result = response.json()["result"]
165
- result = pickle.loads(base64.b64decode(serialized_result))
166
-
167
- return result
168
-
169
- # Attach the run method to the container request
170
- container_request.run = run # type: ignore
171
-
172
- return container_request
173
-
174
- return decorator
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes