nebu 0.1.91__py3-none-any.whl → 0.1.93__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nebu/cache.py +15 -11
- nebu/containers/container.py +13 -10
- nebu/data.py +112 -92
- nebu/logging.py +33 -0
- nebu/namespaces/namespace.py +7 -4
- nebu/processors/consumer.py +184 -152
- nebu/processors/consumer_process_worker.py +179 -96
- nebu/processors/decorate.py +226 -223
- nebu/processors/processor.py +38 -28
- {nebu-0.1.91.dist-info → nebu-0.1.93.dist-info}/METADATA +2 -1
- nebu-0.1.93.dist-info/RECORD +28 -0
- nebu/containers/decorator.py +0 -93
- nebu/containers/server.py +0 -70
- nebu/processors/remote.py +0 -47
- nebu-0.1.91.dist-info/RECORD +0 -30
- {nebu-0.1.91.dist-info → nebu-0.1.93.dist-info}/WHEEL +0 -0
- {nebu-0.1.91.dist-info → nebu-0.1.93.dist-info}/licenses/LICENSE +0 -0
- {nebu-0.1.91.dist-info → nebu-0.1.93.dist-info}/top_level.txt +0 -0
nebu/processors/processor.py
CHANGED
@@ -6,6 +6,7 @@ import requests
|
|
6
6
|
from pydantic import BaseModel
|
7
7
|
|
8
8
|
from nebu.config import GlobalConfig
|
9
|
+
from nebu.logging import logger
|
9
10
|
from nebu.meta import V1ResourceMetaRequest, V1ResourceReference
|
10
11
|
from nebu.processors.models import (
|
11
12
|
V1ContainerRequest,
|
@@ -26,11 +27,13 @@ def _fetch_and_print_logs(log_url: str, api_key: str, processor_name: str):
|
|
26
27
|
"""Helper function to fetch logs in a separate thread."""
|
27
28
|
try:
|
28
29
|
headers = {"Authorization": f"Bearer {api_key}"}
|
29
|
-
|
30
|
+
logger.info(
|
31
|
+
f"--- Attempting to stream logs for {processor_name} from {log_url} ---"
|
32
|
+
)
|
30
33
|
# Use stream=True for potentially long-lived connections and timeout
|
31
34
|
with requests.get(log_url, headers=headers, stream=True, timeout=300) as r:
|
32
35
|
r.raise_for_status()
|
33
|
-
|
36
|
+
logger.info(f"--- Streaming logs for {processor_name} ---")
|
34
37
|
for line in r.iter_lines():
|
35
38
|
if not line:
|
36
39
|
continue
|
@@ -45,37 +48,42 @@ def _fetch_and_print_logs(log_url: str, api_key: str, processor_name: str):
|
|
45
48
|
for container, log_content in log_data.items():
|
46
49
|
# Ensure log_content is a string before printing
|
47
50
|
if isinstance(log_content, str):
|
48
|
-
|
51
|
+
logger.info(
|
52
|
+
f"[{processor_name}][{container}] {log_content}"
|
53
|
+
)
|
49
54
|
else:
|
50
55
|
# Handle cases where log_content might not be a string
|
51
|
-
|
56
|
+
logger.warning(
|
52
57
|
f"[{processor_name}][{container}] Unexpected log format: {log_content}"
|
53
58
|
)
|
54
59
|
else:
|
55
60
|
# If not a dict, print the raw line with a warning
|
56
|
-
|
61
|
+
logger.warning(
|
57
62
|
f"[{processor_name}] Unexpected log structure (not a dict): {decoded_line}"
|
58
63
|
)
|
59
64
|
|
60
65
|
except json.JSONDecodeError:
|
61
66
|
# If JSON parsing fails, print the original line as fallback
|
62
|
-
|
67
|
+
logger.warning(
|
68
|
+
f"[{processor_name}] {line.decode('utf-8')} (raw/non-JSON)"
|
69
|
+
)
|
63
70
|
except Exception as e:
|
64
71
|
# Catch other potential errors during line processing
|
65
|
-
|
72
|
+
logger.error(f"Error processing log line for {processor_name}: {e}")
|
66
73
|
|
67
|
-
|
74
|
+
logger.info(f"--- Log stream ended for {processor_name} ---")
|
68
75
|
except requests.exceptions.Timeout:
|
69
|
-
|
76
|
+
logger.warning(f"Log stream connection timed out for {processor_name}.")
|
70
77
|
except requests.exceptions.RequestException as e:
|
71
78
|
# Handle potential API errors gracefully
|
72
|
-
|
79
|
+
logger.error(f"Error fetching logs for {processor_name} from {log_url}: {e}")
|
73
80
|
if e.response is not None:
|
74
|
-
|
81
|
+
# Log response details at a debug level or keep as error if critical
|
82
|
+
logger.error(
|
75
83
|
f"Response status: {e.response.status_code}, Response body: {e.response.text}"
|
76
84
|
)
|
77
85
|
except Exception as e:
|
78
|
-
|
86
|
+
logger.error(
|
79
87
|
f"An unexpected error occurred while fetching logs for {processor_name}: {e}"
|
80
88
|
)
|
81
89
|
|
@@ -130,10 +138,10 @@ class Processor(Generic[InputType, OutputType]):
|
|
130
138
|
if not namespace:
|
131
139
|
namespace = "-"
|
132
140
|
|
133
|
-
|
141
|
+
logger.info(f"Using namespace: {namespace}")
|
134
142
|
|
135
143
|
existing_processors = V1Processors.model_validate(response.json())
|
136
|
-
|
144
|
+
logger.debug(f"Existing processors: {existing_processors}")
|
137
145
|
self.processor: Optional[V1Processor] = next(
|
138
146
|
(
|
139
147
|
processor_val
|
@@ -143,11 +151,11 @@ class Processor(Generic[InputType, OutputType]):
|
|
143
151
|
),
|
144
152
|
None,
|
145
153
|
)
|
146
|
-
|
154
|
+
logger.debug(f"Processor: {self.processor}")
|
147
155
|
|
148
156
|
# If not found, create
|
149
157
|
if not self.processor:
|
150
|
-
|
158
|
+
logger.info("Creating processor")
|
151
159
|
# Create metadata and processor request
|
152
160
|
metadata = V1ResourceMetaRequest(
|
153
161
|
name=name, namespace=namespace, labels=labels
|
@@ -163,8 +171,8 @@ class Processor(Generic[InputType, OutputType]):
|
|
163
171
|
scale=scale_config,
|
164
172
|
)
|
165
173
|
|
166
|
-
|
167
|
-
|
174
|
+
logger.debug("Request:")
|
175
|
+
logger.debug(processor_request.model_dump(exclude_none=True))
|
168
176
|
create_response = requests.post(
|
169
177
|
self.processors_url,
|
170
178
|
json=processor_request.model_dump(exclude_none=True),
|
@@ -172,10 +180,10 @@ class Processor(Generic[InputType, OutputType]):
|
|
172
180
|
)
|
173
181
|
create_response.raise_for_status()
|
174
182
|
self.processor = V1Processor.model_validate(create_response.json())
|
175
|
-
|
183
|
+
logger.info(f"Created Processor {self.processor.metadata.name}")
|
176
184
|
else:
|
177
185
|
# Else, update
|
178
|
-
|
186
|
+
logger.info(
|
179
187
|
f"Found Processor {self.processor.metadata.name}, updating if necessary"
|
180
188
|
)
|
181
189
|
|
@@ -189,8 +197,8 @@ class Processor(Generic[InputType, OutputType]):
|
|
189
197
|
no_delete=no_delete,
|
190
198
|
)
|
191
199
|
|
192
|
-
|
193
|
-
|
200
|
+
logger.debug("Update request:")
|
201
|
+
logger.debug(update_processor.model_dump(exclude_none=True))
|
194
202
|
patch_response = requests.patch(
|
195
203
|
f"{self.processors_url}/{self.processor.metadata.namespace}/{self.processor.metadata.name}",
|
196
204
|
json=update_processor.model_dump(exclude_none=True),
|
@@ -198,7 +206,7 @@ class Processor(Generic[InputType, OutputType]):
|
|
198
206
|
)
|
199
207
|
patch_response.raise_for_status()
|
200
208
|
self.processor = V1Processor.model_validate(patch_response.json())
|
201
|
-
|
209
|
+
logger.info(f"Updated Processor {self.processor.metadata.name}")
|
202
210
|
|
203
211
|
def __call__(
|
204
212
|
self,
|
@@ -264,14 +272,16 @@ class Processor(Generic[InputType, OutputType]):
|
|
264
272
|
)
|
265
273
|
try:
|
266
274
|
self._log_thread.start()
|
267
|
-
|
275
|
+
logger.info(
|
276
|
+
f"Started background log fetching for {processor_name}..."
|
277
|
+
)
|
268
278
|
except Exception as e:
|
269
|
-
|
279
|
+
logger.error(
|
270
280
|
f"Failed to start log fetching thread for {processor_name}: {e}"
|
271
281
|
)
|
272
282
|
self._log_thread = None # Reset if start fails
|
273
283
|
else:
|
274
|
-
|
284
|
+
logger.info(f"Log fetching is already running for {processor_name}.")
|
275
285
|
|
276
286
|
return send_response_json
|
277
287
|
|
@@ -411,9 +421,9 @@ class Processor(Generic[InputType, OutputType]):
|
|
411
421
|
# Setting the reference to None allows a new thread to be created if needed.
|
412
422
|
# The OS will eventually clean up the daemon thread when the main process exits,
|
413
423
|
# or potentially sooner if the network request completes or errors out.
|
414
|
-
|
424
|
+
logger.info(
|
415
425
|
f"Disassociating from active log stream for {self.name}. A new stream can be started."
|
416
426
|
)
|
417
427
|
self._log_thread = None
|
418
428
|
else:
|
419
|
-
|
429
|
+
logger.info(f"No active log stream to stop for {self.name}.")
|
@@ -1,12 +1,13 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: nebu
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.93
|
4
4
|
Summary: A globally distributed container runtime
|
5
5
|
Requires-Python: >=3.10.14
|
6
6
|
Description-Content-Type: text/markdown
|
7
7
|
License-File: LICENSE
|
8
8
|
Requires-Dist: boto3>=1.37.30
|
9
9
|
Requires-Dist: dill>=0.3.8
|
10
|
+
Requires-Dist: loguru>=0.7.3
|
10
11
|
Requires-Dist: openai>=1.68.2
|
11
12
|
Requires-Dist: pillow>=10.4.0
|
12
13
|
Requires-Dist: pydantic>=2.10.6
|
@@ -0,0 +1,28 @@
|
|
1
|
+
nebu/__init__.py,sha256=xNtWiN29MJZK_WBEUP-9hDmlkfLxoASVI-f4tNTXO58,454
|
2
|
+
nebu/auth.py,sha256=N_v6SPFD9HU_UoRDTaouH03g2Hmo9C-xxqInE1FweXE,1471
|
3
|
+
nebu/cache.py,sha256=JqRb4FdZrRrO4ePlwvsKdxRC8dNEFMxfTWag0aJz8Gw,4893
|
4
|
+
nebu/config.py,sha256=C5Jt9Bd0i0HrgzBSVNJ-Ml3KwX_gaYbYYZEtNL2gvJg,7031
|
5
|
+
nebu/data.py,sha256=randOsFQxOQdw7Yhg7mb6PvH5Efjfc-KpvqdBdpWTEE,40923
|
6
|
+
nebu/errors.py,sha256=bBnK5YQ6qZg4OMY81AN2k03ppefg89FUwF_SHEMlqCA,170
|
7
|
+
nebu/logging.py,sha256=VzpjCEoXm3c4i0sKJL5GTsPIhTQ6Y4BPUTzPmwhve7o,950
|
8
|
+
nebu/meta.py,sha256=CzFHMND9seuewzq9zNNx9WTr6JvrCBExe7BLqDSr7lM,745
|
9
|
+
nebu/orign.py,sha256=SkVfHgpadwik58KCZCrjdV5EHY0dhpEhDvijzLxY11Y,2052
|
10
|
+
nebu/builders/builder.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
11
|
+
nebu/builders/models.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
12
|
+
nebu/containers/container.py,sha256=Mrh_gvMsTvDkj3CwpqIPzJ72IMw0gQIg64y548vq0yg,13803
|
13
|
+
nebu/containers/models.py,sha256=0j6NGy4yto-enRDh_4JH_ZTbHrLdSpuMOqNQPnIrwC4,6815
|
14
|
+
nebu/namespaces/models.py,sha256=EqUOpzhVBhvJw2P92ONDUbIgC31M9jMmcaG5vyOrsWg,497
|
15
|
+
nebu/namespaces/namespace.py,sha256=oeZyGqsIGIrppyjif1ZONsdTmqRgd9oSLFE1BChXTTE,5247
|
16
|
+
nebu/processors/consumer.py,sha256=j6iKF_wc8RUNKrFqjB5keUX-Gj9hGZUbmAjEyTm-Oj0,55367
|
17
|
+
nebu/processors/consumer_process_worker.py,sha256=h--eNFKaLbUayxn88mB8oGGdrU2liE1dnwm_TPlewX8,36960
|
18
|
+
nebu/processors/decorate.py,sha256=pGA0m8cxTnEu7ullKTaAgjf_FFP5RhRqBnQnsdSwtJs,55476
|
19
|
+
nebu/processors/default.py,sha256=W4slJenG59rvyTlJ7gRp58eFfXcNOTT2Hfi6zzJAobI,365
|
20
|
+
nebu/processors/models.py,sha256=FnBJFxtaJkp-uIOs90qkJUBvOR80l2cdGnfmOIWIvVA,4058
|
21
|
+
nebu/processors/processor.py,sha256=dCQ9si_P03Lw_dLQESE_TUD6ZGGTYZce28GmiyWm1Hg,16525
|
22
|
+
nebu/redis/models.py,sha256=coPovAcVXnOU1Xh_fpJL4PO3QctgK9nBe5QYoqEcnxg,1230
|
23
|
+
nebu/services/service.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
24
|
+
nebu-0.1.93.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
25
|
+
nebu-0.1.93.dist-info/METADATA,sha256=3MTAfHaYqznKvUqtvvB_CBslT2Xnh8j1TLWz05ec8PM,1760
|
26
|
+
nebu-0.1.93.dist-info/WHEEL,sha256=wXxTzcEDnjrTwFYjLPcsW_7_XihufBwmpiBeiXNBGEA,91
|
27
|
+
nebu-0.1.93.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
|
28
|
+
nebu-0.1.93.dist-info/RECORD,,
|
nebu/containers/decorator.py
DELETED
@@ -1,93 +0,0 @@
|
|
1
|
-
import base64
|
2
|
-
import pickle
|
3
|
-
import time
|
4
|
-
from typing import Any, Callable, List, Optional
|
5
|
-
|
6
|
-
import dill # Import dill
|
7
|
-
import requests
|
8
|
-
|
9
|
-
from nebu.containers.container import Container
|
10
|
-
|
11
|
-
|
12
|
-
def container(
|
13
|
-
image: str,
|
14
|
-
name: Optional[str] = None,
|
15
|
-
namespace: Optional[str] = None,
|
16
|
-
accelerators: Optional[List[str]] = None,
|
17
|
-
platform: str = "runpod",
|
18
|
-
python_cmd: str = "python",
|
19
|
-
):
|
20
|
-
def decorator(func: Callable):
|
21
|
-
nonlocal name
|
22
|
-
if name is None:
|
23
|
-
name = func.__name__
|
24
|
-
|
25
|
-
def wrapper(*args: Any, **kwargs: Any):
|
26
|
-
nonlocal name
|
27
|
-
# Create your container with the server script
|
28
|
-
cont = Container(
|
29
|
-
name=name, # type: ignore
|
30
|
-
namespace=namespace,
|
31
|
-
platform=platform,
|
32
|
-
image=image,
|
33
|
-
accelerators=accelerators,
|
34
|
-
# Command to start our function execution server
|
35
|
-
command=f"{python_cmd} -m nebu.containers.server", # TODO: need to get the server code into the container
|
36
|
-
proxy_port=8080,
|
37
|
-
)
|
38
|
-
|
39
|
-
# Wait for container to be running
|
40
|
-
while (
|
41
|
-
cont.container.status
|
42
|
-
and cont.container.status.status
|
43
|
-
and cont.container.status.status.lower() != "running"
|
44
|
-
):
|
45
|
-
print(
|
46
|
-
f"Container '{cont.container.metadata.name}' not running yet; waiting..."
|
47
|
-
)
|
48
|
-
time.sleep(1)
|
49
|
-
|
50
|
-
# Get function source code using dill
|
51
|
-
try:
|
52
|
-
func_code = dill.source.getsource(func)
|
53
|
-
except (OSError, TypeError) as e:
|
54
|
-
raise RuntimeError(
|
55
|
-
f"Failed to retrieve source code for function '{func.__name__}'. "
|
56
|
-
"This can happen with functions defined dynamically or interactively "
|
57
|
-
"(e.g., in a Jupyter notebook or REPL). Ensure the function is defined "
|
58
|
-
f"in a standard Python module if possible. Original error: {e}"
|
59
|
-
)
|
60
|
-
|
61
|
-
# Serialize arguments using pickle for complex objects
|
62
|
-
serialized_args = base64.b64encode(pickle.dumps(args)).decode("utf-8")
|
63
|
-
serialized_kwargs = base64.b64encode(pickle.dumps(kwargs)).decode("utf-8")
|
64
|
-
|
65
|
-
# Prepare payload
|
66
|
-
payload = {
|
67
|
-
"function_code": func_code,
|
68
|
-
"args": serialized_args,
|
69
|
-
"kwargs": serialized_kwargs,
|
70
|
-
}
|
71
|
-
|
72
|
-
# Get container URL
|
73
|
-
container_url = (
|
74
|
-
cont.status.tailnet_url
|
75
|
-
if cont.status and hasattr(cont.status, "tailnet_url")
|
76
|
-
else "http://localhost:8080"
|
77
|
-
)
|
78
|
-
|
79
|
-
# Send to container and get result
|
80
|
-
response = requests.post(f"{container_url}/execute", json=payload)
|
81
|
-
|
82
|
-
if response.status_code != 200:
|
83
|
-
raise RuntimeError(f"Function execution failed: {response.text}")
|
84
|
-
|
85
|
-
# Deserialize the result
|
86
|
-
serialized_result = response.json()["result"]
|
87
|
-
result = pickle.loads(base64.b64decode(serialized_result))
|
88
|
-
|
89
|
-
return result
|
90
|
-
|
91
|
-
return wrapper
|
92
|
-
|
93
|
-
return decorator
|
nebu/containers/server.py
DELETED
@@ -1,70 +0,0 @@
|
|
1
|
-
import base64
|
2
|
-
import json
|
3
|
-
import pickle
|
4
|
-
from http.server import BaseHTTPRequestHandler, HTTPServer
|
5
|
-
|
6
|
-
|
7
|
-
class FunctionExecutionHandler(BaseHTTPRequestHandler):
|
8
|
-
def do_POST(self):
|
9
|
-
content_length = int(self.headers["Content-Length"])
|
10
|
-
post_data = self.rfile.read(content_length)
|
11
|
-
payload = json.loads(post_data.decode("utf-8"))
|
12
|
-
|
13
|
-
if self.path == "/execute":
|
14
|
-
try:
|
15
|
-
# Extract function code, args and kwargs
|
16
|
-
func_code = payload["function_code"]
|
17
|
-
serialized_args = payload["args"]
|
18
|
-
serialized_kwargs = payload["kwargs"]
|
19
|
-
|
20
|
-
# Deserialize arguments
|
21
|
-
args = pickle.loads(base64.b64decode(serialized_args))
|
22
|
-
kwargs = pickle.loads(base64.b64decode(serialized_kwargs))
|
23
|
-
|
24
|
-
# Create a local namespace and execute the function
|
25
|
-
local_namespace = {}
|
26
|
-
exec(func_code, globals(), local_namespace)
|
27
|
-
|
28
|
-
# Find the function object in the local namespace
|
29
|
-
func_name = None
|
30
|
-
for name, obj in local_namespace.items():
|
31
|
-
if callable(obj) and not name.startswith("__"):
|
32
|
-
func_name = name
|
33
|
-
break
|
34
|
-
|
35
|
-
if not func_name:
|
36
|
-
raise ValueError("No function found in the provided code")
|
37
|
-
|
38
|
-
# Execute the function
|
39
|
-
result = local_namespace[func_name](*args, **kwargs)
|
40
|
-
|
41
|
-
# Serialize the result
|
42
|
-
serialized_result = base64.b64encode(pickle.dumps(result)).decode(
|
43
|
-
"utf-8"
|
44
|
-
)
|
45
|
-
|
46
|
-
# Send response
|
47
|
-
self.send_response(200)
|
48
|
-
self.send_header("Content-type", "application/json")
|
49
|
-
self.end_headers()
|
50
|
-
self.wfile.write(json.dumps({"result": serialized_result}).encode())
|
51
|
-
|
52
|
-
except Exception as e:
|
53
|
-
self.send_response(500)
|
54
|
-
self.send_header("Content-type", "application/json")
|
55
|
-
self.end_headers()
|
56
|
-
self.wfile.write(json.dumps({"error": str(e)}).encode())
|
57
|
-
else:
|
58
|
-
self.send_response(404)
|
59
|
-
self.end_headers()
|
60
|
-
|
61
|
-
|
62
|
-
def run_server(port: int = 8080) -> None:
|
63
|
-
server_address = ("", port)
|
64
|
-
httpd = HTTPServer(server_address, FunctionExecutionHandler)
|
65
|
-
print(f"Starting server on port {port}")
|
66
|
-
httpd.serve_forever()
|
67
|
-
|
68
|
-
|
69
|
-
if __name__ == "__main__":
|
70
|
-
run_server()
|
nebu/processors/remote.py
DELETED
@@ -1,47 +0,0 @@
|
|
1
|
-
from abc import ABC, abstractmethod
|
2
|
-
from typing import Any, Dict, Optional, Type, TypeVar
|
3
|
-
|
4
|
-
from pydantic import BaseModel
|
5
|
-
|
6
|
-
from nebu.config import GlobalConfig
|
7
|
-
from nebu.processors.models import V1ContainerRequest, V1Scale
|
8
|
-
|
9
|
-
from .models import Message
|
10
|
-
from .processor import Processor
|
11
|
-
|
12
|
-
I = TypeVar("I", bound=BaseModel)
|
13
|
-
O = TypeVar("O", bound=BaseModel)
|
14
|
-
|
15
|
-
|
16
|
-
class RemoteProcessor(ABC, Processor):
|
17
|
-
def __init__(
|
18
|
-
self,
|
19
|
-
name: str,
|
20
|
-
namespace: Optional[str] = None,
|
21
|
-
labels: Optional[Dict[str, str]] = None,
|
22
|
-
container: Optional[V1ContainerRequest] = None,
|
23
|
-
schema_: Optional[Any] = None,
|
24
|
-
common_schema: Optional[str] = None,
|
25
|
-
min_replicas: Optional[int] = None,
|
26
|
-
max_replicas: Optional[int] = None,
|
27
|
-
scale_config: Optional[V1Scale] = None,
|
28
|
-
config: Optional[GlobalConfig] = None,
|
29
|
-
no_delete: bool = False,
|
30
|
-
):
|
31
|
-
super().__init__(
|
32
|
-
name,
|
33
|
-
namespace,
|
34
|
-
labels,
|
35
|
-
container,
|
36
|
-
schema_,
|
37
|
-
common_schema,
|
38
|
-
min_replicas,
|
39
|
-
max_replicas,
|
40
|
-
scale_config,
|
41
|
-
config,
|
42
|
-
no_delete,
|
43
|
-
)
|
44
|
-
|
45
|
-
@abstractmethod
|
46
|
-
def process(self, message: Message[I]) -> Type[BaseModel]:
|
47
|
-
pass
|
nebu-0.1.91.dist-info/RECORD
DELETED
@@ -1,30 +0,0 @@
|
|
1
|
-
nebu/__init__.py,sha256=xNtWiN29MJZK_WBEUP-9hDmlkfLxoASVI-f4tNTXO58,454
|
2
|
-
nebu/auth.py,sha256=N_v6SPFD9HU_UoRDTaouH03g2Hmo9C-xxqInE1FweXE,1471
|
3
|
-
nebu/cache.py,sha256=jmluqvWnE9N8uNq6nppXSxEJK7DKWaB79GicaGg9KmY,4718
|
4
|
-
nebu/config.py,sha256=C5Jt9Bd0i0HrgzBSVNJ-Ml3KwX_gaYbYYZEtNL2gvJg,7031
|
5
|
-
nebu/data.py,sha256=X0aAJYuHNVcTCRHpIDDm546HwMqIZpv40lGrozlL41A,39797
|
6
|
-
nebu/errors.py,sha256=bBnK5YQ6qZg4OMY81AN2k03ppefg89FUwF_SHEMlqCA,170
|
7
|
-
nebu/meta.py,sha256=CzFHMND9seuewzq9zNNx9WTr6JvrCBExe7BLqDSr7lM,745
|
8
|
-
nebu/orign.py,sha256=SkVfHgpadwik58KCZCrjdV5EHY0dhpEhDvijzLxY11Y,2052
|
9
|
-
nebu/builders/builder.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
10
|
-
nebu/builders/models.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
11
|
-
nebu/containers/container.py,sha256=jyqG-WLFMAFzYLW9Bagi74zJ-zUPq_jHmrxF_2HVWjQ,13652
|
12
|
-
nebu/containers/decorator.py,sha256=uFtzlAXRHYZECJ-NPusY7oN9GXvdHrHDd_JNrIGr8aQ,3244
|
13
|
-
nebu/containers/models.py,sha256=0j6NGy4yto-enRDh_4JH_ZTbHrLdSpuMOqNQPnIrwC4,6815
|
14
|
-
nebu/containers/server.py,sha256=yFa2Y9PzBn59E1HftKiv0iapPonli2rbGAiU6r-wwe0,2513
|
15
|
-
nebu/namespaces/models.py,sha256=EqUOpzhVBhvJw2P92ONDUbIgC31M9jMmcaG5vyOrsWg,497
|
16
|
-
nebu/namespaces/namespace.py,sha256=LsbGiGBzVtFUtxCRayGrqr2X1tDzRep4RnNklfWCC1k,5160
|
17
|
-
nebu/processors/consumer.py,sha256=RNc4DCuAcmFfSDIyprJrACuVfog6V5cWoqWt3zDRGqQ,53141
|
18
|
-
nebu/processors/consumer_process_worker.py,sha256=UAmhrR1wilQnRPbbHYZ9jaIrDKs0LKsSHxbj4VFvFcQ,31969
|
19
|
-
nebu/processors/decorate.py,sha256=w8ZJwe8MNhBoW_LJzUWHOgXvo-39RnPsGvRUXSd9Hk4,55784
|
20
|
-
nebu/processors/default.py,sha256=W4slJenG59rvyTlJ7gRp58eFfXcNOTT2Hfi6zzJAobI,365
|
21
|
-
nebu/processors/models.py,sha256=FnBJFxtaJkp-uIOs90qkJUBvOR80l2cdGnfmOIWIvVA,4058
|
22
|
-
nebu/processors/processor.py,sha256=h1ZmD1rqTSuC9QpHkobx6VCUZoVkKXoDBLaB_xxfjys,16037
|
23
|
-
nebu/processors/remote.py,sha256=TeAIPGEMqnDIb7H1iett26IEZrBlcbPB_-DSm6jcH1E,1285
|
24
|
-
nebu/redis/models.py,sha256=coPovAcVXnOU1Xh_fpJL4PO3QctgK9nBe5QYoqEcnxg,1230
|
25
|
-
nebu/services/service.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
26
|
-
nebu-0.1.91.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
27
|
-
nebu-0.1.91.dist-info/METADATA,sha256=LVAMxSj83DGy_CAHMuY1RxH3UYAn2wQ5JSoecIxQeY0,1731
|
28
|
-
nebu-0.1.91.dist-info/WHEEL,sha256=wXxTzcEDnjrTwFYjLPcsW_7_XihufBwmpiBeiXNBGEA,91
|
29
|
-
nebu-0.1.91.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
|
30
|
-
nebu-0.1.91.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|