nebu 0.1.39__py3-none-any.whl → 0.1.41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nebu/adapter.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import time
2
+ from typing import List
2
3
 
3
4
  from pydantic import BaseModel, Field
4
5
 
@@ -14,3 +15,5 @@ class Adapter(BaseModel):
14
15
  lora_rank: int = Field(default=8)
15
16
  lora_alpha: int = Field(default=16)
16
17
  lora_dropout: float = Field(default=0.1)
18
+ lora_target_modules: List[str] = Field(default=[])
19
+ learning_rate: float = Field(default=0.0001)
nebu/cache.py CHANGED
@@ -1,8 +1,10 @@
1
1
  import os
2
+ import socket # Add socket import
2
3
  import time
3
4
  from typing import Any, Optional, cast
4
5
 
5
6
  import redis
7
+ import socks # Add socks import
6
8
  from pydantic import BaseModel, Field
7
9
 
8
10
 
@@ -36,6 +38,19 @@ class Cache:
36
38
  self.redis_client = None
37
39
  connection_info = ""
38
40
 
41
+ # Configure SOCKS proxy before connecting to Redis
42
+ try:
43
+ # Use the proxy settings provided by tailscaled
44
+ socks.set_default_proxy(socks.SOCKS5, "localhost", 1055)
45
+ socket.socket = socks.socksocket
46
+ print("Configured SOCKS5 proxy for socket connections via localhost:1055")
47
+ except Exception as proxy_err:
48
+ print(f"Failed to configure SOCKS proxy: {proxy_err}")
49
+ # Depending on requirements, you might want to raise an error here
50
+ # or proceed without the proxy if it's optional.
51
+ # For now, we'll print the error and continue, but the Redis connection
52
+ # will likely fail if the proxy is required.
53
+
39
54
  try:
40
55
  if redis_url:
41
56
  # Use REDIS_URL if available
@@ -15,6 +15,9 @@ from redis import ConnectionError, ResponseError
15
15
  # Define TypeVar for generic models
16
16
  T = TypeVar("T")
17
17
 
18
+ # Environment variable name used as a guard in the decorator
19
+ _NEBU_INSIDE_CONSUMER_ENV_VAR = "_NEBU_INSIDE_CONSUMER_EXEC"
20
+
18
21
  # Get function and model source code and create them dynamically
19
22
  try:
20
23
  function_source = os.environ.get("FUNCTION_SOURCE")
@@ -26,8 +29,15 @@ try:
26
29
  is_stream_message = os.environ.get("IS_STREAM_MESSAGE") == "True"
27
30
  param_type_name = os.environ.get("PARAM_TYPE_NAME")
28
31
  return_type_name = os.environ.get("RETURN_TYPE_NAME")
32
+ param_type_str = os.environ.get("PARAM_TYPE_STR")
33
+ return_type_str = os.environ.get("RETURN_TYPE_STR")
29
34
  content_type_name = os.environ.get("CONTENT_TYPE_NAME")
30
35
 
36
+ # Get source for the file containing the decorated function
37
+ decorated_func_file_source = os.environ.get("DECORATED_FUNC_FILE_SOURCE")
38
+ # Get sources for the directory containing the decorated function
39
+ # decorated_dir_sources_json = os.environ.get("DECORATED_DIR_SOURCES") # Removed
40
+
31
41
  # Get init_func source if provided
32
42
  init_func_source = os.environ.get("INIT_FUNC_SOURCE")
33
43
  init_func_name = os.environ.get("INIT_FUNC_NAME")
@@ -103,136 +113,190 @@ try:
103
113
  exec("T = TypeVar('T')", local_namespace)
104
114
  exec("from nebu.processors.models import *", local_namespace)
105
115
  exec("from nebu.processors.processor import *", local_namespace)
116
+ # Add import for the processor decorator itself
117
+ exec("from nebu.processors.decorate import processor", local_namespace)
106
118
  # Add import for chatx openai types
107
119
  exec("from nebu.chatx.openai import *", local_namespace)
108
120
 
109
- # Execute included object sources FIRST, as they might define types needed by others
110
- print("[Consumer] Executing included object sources...")
111
- for i, (obj_source, args_sources) in enumerate(included_object_sources):
112
- try:
113
- exec(obj_source, local_namespace)
114
- print(f"[Consumer] Successfully executed included object {i} base source")
115
- for j, arg_source in enumerate(args_sources):
116
- try:
117
- exec(arg_source, local_namespace)
118
- print(
119
- f"[Consumer] Successfully executed included object {i} arg {j} source"
120
- )
121
- except Exception as e:
122
- print(f"Error executing included object {i} arg {j} source: {e}")
123
- traceback.print_exc()
124
- except Exception as e:
125
- print(f"Error executing included object {i} base source: {e}")
126
- traceback.print_exc()
127
- print("[Consumer] Finished executing included object sources.")
121
+ # Set the guard environment variable before executing any source code
122
+ os.environ[_NEBU_INSIDE_CONSUMER_ENV_VAR] = "1"
123
+ print(f"[Consumer] Set environment variable {_NEBU_INSIDE_CONSUMER_ENV_VAR}=1")
128
124
 
129
- # First try to import the module to get any needed dependencies
130
- # This is a fallback in case the module is available
131
- module_name = os.environ.get("MODULE_NAME")
132
125
  try:
133
- if module_name:
134
- exec(f"import {module_name}", local_namespace)
135
- print(f"Successfully imported module {module_name}")
136
- except Exception as e:
137
- print(f"Warning: Could not import module {module_name}: {e}")
138
- print(
139
- "This is expected if running in a Jupyter notebook. Will use dynamic execution."
140
- )
126
+ # Execute the source file of the decorated function FIRST
127
+ if decorated_func_file_source:
128
+ print("[Consumer] Executing decorated function's file source...")
129
+ try:
130
+ exec(decorated_func_file_source, local_namespace)
131
+ print(
132
+ "[Consumer] Successfully executed decorated function's file source."
133
+ )
134
+ except Exception as e:
135
+ print(f"Error executing decorated function's file source: {e}")
136
+ traceback.print_exc() # Warn and continue
137
+ else:
138
+ print(
139
+ "[Consumer] No decorated function's file source found in environment."
140
+ )
141
141
 
142
- # Define the models
143
- # First define stream message class if needed
144
- if stream_message_source:
145
- try:
146
- exec(stream_message_source, local_namespace)
147
- print("Successfully defined Message class")
148
- except Exception as e:
149
- print(f"Error defining Message: {e}")
150
- traceback.print_exc()
142
+ # Execute the sources from the decorated function's directory
143
+ # if decorated_dir_sources_json:
144
+ # print("[Consumer] Executing decorated function's directory sources...")
145
+ # try:
146
+ # dir_sources = json.loads(decorated_dir_sources_json)
147
+ # # Sort by relative path for some predictability (e.g., __init__.py first)
148
+ # for rel_path, source_code in sorted(dir_sources.items()):
149
+ # print(f"[Consumer] Executing source from: {rel_path}...")
150
+ # try:
151
+ # exec(source_code, local_namespace)
152
+ # print(f"[Consumer] Successfully executed source from: {rel_path}")
153
+ # except Exception as e:
154
+ # print(f"Error executing source from {rel_path}: {e}")
155
+ # traceback.print_exc() # Warn and continue
156
+ # except json.JSONDecodeError as e:
157
+ # print(f"Error decoding DECORATED_DIR_SOURCES JSON: {e}")
158
+ # traceback.print_exc()
159
+ # except Exception as e:
160
+ # print(f"Unexpected error processing directory sources: {e}")
161
+ # traceback.print_exc()
162
+ # else:
163
+ # print("[Consumer] No decorated function's directory sources found in environment.")
164
+
165
+ # Execute included object sources NEXT, as they might define types needed by others
166
+ print("[Consumer] Executing included object sources...")
167
+ for i, (obj_source, args_sources) in enumerate(included_object_sources):
168
+ try:
169
+ exec(obj_source, local_namespace)
170
+ print(
171
+ f"[Consumer] Successfully executed included object {i} base source"
172
+ )
173
+ for j, arg_source in enumerate(args_sources):
174
+ try:
175
+ exec(arg_source, local_namespace)
176
+ print(
177
+ f"[Consumer] Successfully executed included object {i} arg {j} source"
178
+ )
179
+ except Exception as e:
180
+ print(
181
+ f"Error executing included object {i} arg {j} source: {e}"
182
+ )
183
+ traceback.print_exc()
184
+ except Exception as e:
185
+ print(f"Error executing included object {i} base source: {e}")
186
+ traceback.print_exc()
187
+ print("[Consumer] Finished executing included object sources.")
151
188
 
152
- # Define content type if available
153
- if content_type_source:
189
+ # First try to import the module to get any needed dependencies
190
+ # This is a fallback in case the module is available
191
+ module_name = os.environ.get("MODULE_NAME")
154
192
  try:
155
- exec(content_type_source, local_namespace)
156
- print(f"Successfully defined content type {content_type_name}")
157
-
158
- # Define any content type args
159
- for arg_source in content_type_args:
160
- try:
161
- exec(arg_source, local_namespace)
162
- print("Successfully defined content type argument")
163
- except Exception as e:
164
- print(f"Error defining content type argument: {e}")
165
- traceback.print_exc()
193
+ if module_name:
194
+ exec(f"import {module_name}", local_namespace)
195
+ print(f"Successfully imported module {module_name}")
166
196
  except Exception as e:
167
- print(f"Error defining content type: {e}")
168
- traceback.print_exc()
169
-
170
- # Define input model if different from stream message
171
- if input_model_source and (
172
- not is_stream_message or input_model_source != stream_message_source
173
- ):
174
- try:
175
- exec(input_model_source, local_namespace)
176
- print(f"Successfully defined input model {param_type_name}")
197
+ print(f"Warning: Could not import module {module_name}: {e}")
198
+ print(
199
+ "This is expected if running in a Jupyter notebook. Will use dynamic execution."
200
+ )
177
201
 
178
- # Define any input model args
179
- for arg_source in input_model_args:
180
- try:
181
- exec(arg_source, local_namespace)
182
- print("Successfully defined input model argument")
183
- except Exception as e:
184
- print(f"Error defining input model argument: {e}")
185
- traceback.print_exc()
186
- except Exception as e:
187
- print(f"Error defining input model: {e}")
188
- traceback.print_exc()
202
+ # Define the models
203
+ # First define stream message class if needed
204
+ if stream_message_source:
205
+ try:
206
+ exec(stream_message_source, local_namespace)
207
+ print("Successfully defined Message class")
208
+ except Exception as e:
209
+ print(f"Error defining Message: {e}")
210
+ traceback.print_exc()
189
211
 
190
- # Define output model
191
- if output_model_source:
192
- try:
193
- exec(output_model_source, local_namespace)
194
- print(f"Successfully defined output model {return_type_name}")
212
+ # Define content type if available
213
+ if content_type_source:
214
+ try:
215
+ exec(content_type_source, local_namespace)
216
+ print(f"Successfully defined content type {content_type_name}")
217
+
218
+ # Define any content type args
219
+ for arg_source in content_type_args:
220
+ try:
221
+ exec(arg_source, local_namespace)
222
+ print("Successfully defined content type argument")
223
+ except Exception as e:
224
+ print(f"Error defining content type argument: {e}")
225
+ traceback.print_exc()
226
+ except Exception as e:
227
+ print(f"Error defining content type: {e}")
228
+ traceback.print_exc()
195
229
 
196
- # Define any output model args
197
- for arg_source in output_model_args:
198
- try:
199
- exec(arg_source, local_namespace)
200
- print("Successfully defined output model argument")
201
- except Exception as e:
202
- print(f"Error defining output model argument: {e}")
203
- traceback.print_exc()
204
- except Exception as e:
205
- print(f"Error defining output model: {e}")
206
- traceback.print_exc()
230
+ # Define input model if different from stream message
231
+ if input_model_source and (
232
+ not is_stream_message or input_model_source != stream_message_source
233
+ ):
234
+ try:
235
+ exec(input_model_source, local_namespace)
236
+ print(f"Successfully defined input model {param_type_str}")
237
+
238
+ # Define any input model args
239
+ for arg_source in input_model_args:
240
+ try:
241
+ exec(arg_source, local_namespace)
242
+ print("Successfully defined input model argument")
243
+ except Exception as e:
244
+ print(f"Error defining input model argument: {e}")
245
+ traceback.print_exc()
246
+ except Exception as e:
247
+ print(f"Error defining input model: {e}")
248
+ traceback.print_exc()
207
249
 
208
- # Finally, execute the function code
209
- try:
210
- exec(function_source, local_namespace)
211
- target_function = local_namespace[function_name]
212
- print(f"Successfully loaded function {function_name}")
213
- except Exception as e:
214
- print(f"Error creating function from source: {e}")
215
- traceback.print_exc()
216
- sys.exit(1)
250
+ # Define output model
251
+ if output_model_source:
252
+ try:
253
+ exec(output_model_source, local_namespace)
254
+ print(f"Successfully defined output model {return_type_str}")
255
+
256
+ # Define any output model args
257
+ for arg_source in output_model_args:
258
+ try:
259
+ exec(arg_source, local_namespace)
260
+ print("Successfully defined output model argument")
261
+ except Exception as e:
262
+ print(f"Error defining output model argument: {e}")
263
+ traceback.print_exc()
264
+ except Exception as e:
265
+ print(f"Error defining output model: {e}")
266
+ traceback.print_exc()
217
267
 
218
- # Execute init_func if provided
219
- if init_func_source and init_func_name:
220
- print(f"Executing init_func: {init_func_name}...")
268
+ # Finally, execute the function code
221
269
  try:
222
- exec(init_func_source, local_namespace)
223
- init_function = local_namespace[init_func_name]
224
- print(
225
- f"[Consumer] Environment before calling init_func {init_func_name}: {os.environ}"
226
- )
227
- init_function() # Call the function
228
- print(f"Successfully executed init_func: {init_func_name}")
270
+ exec(function_source, local_namespace)
271
+ target_function = local_namespace[function_name]
272
+ print(f"Successfully loaded function {function_name}")
229
273
  except Exception as e:
230
- print(f"Error executing init_func '{init_func_name}': {e}")
274
+ print(f"Error creating function from source: {e}")
231
275
  traceback.print_exc()
232
- # Decide if failure is critical. For now, let's exit.
233
- print("Exiting due to init_func failure.")
234
276
  sys.exit(1)
235
277
 
278
+ # Execute init_func if provided
279
+ if init_func_source and init_func_name:
280
+ print(f"Executing init_func: {init_func_name}...")
281
+ try:
282
+ exec(init_func_source, local_namespace)
283
+ init_function = local_namespace[init_func_name]
284
+ print(
285
+ f"[Consumer] Environment before calling init_func {init_func_name}: {os.environ}"
286
+ )
287
+ init_function() # Call the function
288
+ print(f"Successfully executed init_func: {init_func_name}")
289
+ except Exception as e:
290
+ print(f"Error executing init_func '{init_func_name}': {e}")
291
+ traceback.print_exc()
292
+ # Decide if failure is critical. For now, let's exit.
293
+ print("Exiting due to init_func failure.")
294
+ sys.exit(1)
295
+ finally:
296
+ # Unset the guard environment variable after all execs are done
297
+ os.environ.pop(_NEBU_INSIDE_CONSUMER_ENV_VAR, None)
298
+ print(f"[Consumer] Unset environment variable {_NEBU_INSIDE_CONSUMER_ENV_VAR}")
299
+
236
300
  except Exception as e:
237
301
  print(f"Error setting up function: {e}")
238
302
  traceback.print_exc()
@@ -412,9 +476,9 @@ def process_message(message_id: str, message_data: Dict[str, str]) -> None:
412
476
  else:
413
477
  # Otherwise use the param type directly
414
478
  try:
415
- if param_type_name in local_namespace:
416
- print(f"Validating content against {param_type_name}")
417
- input_obj = local_namespace[param_type_name].model_validate(content)
479
+ if param_type_str in local_namespace:
480
+ print(f"Validating content against {param_type_str}")
481
+ input_obj = local_namespace[param_type_str].model_validate(content)
418
482
  else:
419
483
  # If we can't find the exact type, just pass the content directly
420
484
  input_obj = content
@@ -1,5 +1,6 @@
1
1
  import ast # For parsing notebook code
2
2
  import inspect
3
+ import os # Add os import
3
4
  import re # Import re for fallback check
4
5
  import textwrap
5
6
  from typing import (
@@ -39,6 +40,8 @@ R = TypeVar("R", bound=BaseModel)
39
40
 
40
41
  # Attribute name for explicitly stored source
41
42
  _NEBU_EXPLICIT_SOURCE_ATTR = "_nebu_explicit_source"
43
+ # Environment variable to prevent decorator recursion inside consumer
44
+ _NEBU_INSIDE_CONSUMER_ENV_VAR = "_NEBU_INSIDE_CONSUMER_EXEC"
42
45
 
43
46
  # --- Jupyter Helper Functions ---
44
47
 
@@ -356,7 +359,17 @@ def processor(
356
359
  ):
357
360
  def decorator(
358
361
  func: Callable[[Any], Any],
359
- ) -> Processor:
362
+ ) -> Processor | Callable[[Any], Any]: # Return type can now be original func
363
+ # --- Prevent Recursion Guard ---
364
+ # If this env var is set, we are inside the consumer's exec context.
365
+ # Return the original function without applying the decorator again.
366
+ if os.environ.get(_NEBU_INSIDE_CONSUMER_ENV_VAR) == "1":
367
+ print(
368
+ f"[DEBUG Decorator] Guard triggered for '{func.__name__}'. Returning original function."
369
+ )
370
+ return func
371
+ # --- End Guard ---
372
+
360
373
  # Moved init print here
361
374
  print(
362
375
  f"[DEBUG Decorator Init] @processor decorating function '{func.__name__}'"
@@ -833,6 +846,33 @@ def processor(
833
846
  print("[DEBUG Decorator] Finished populating environment variables.")
834
847
  # --- End Environment Variables ---
835
848
 
849
+ # --- Get Decorated Function's File Source ---
850
+ print("[DEBUG Decorator] Getting source file for decorated function...")
851
+ func_file_source = None
852
+ try:
853
+ func_file_path = inspect.getfile(func)
854
+ print(f"[DEBUG Decorator] Found file path: {func_file_path}")
855
+ with open(func_file_path, "r") as f:
856
+ func_file_source = f.read()
857
+ print(
858
+ f"[DEBUG Decorator] Successfully read source file: {func_file_path} (len: {len(func_file_source)})"
859
+ )
860
+ all_env.append(
861
+ V1EnvVar(key="DECORATED_FUNC_FILE_SOURCE", value=func_file_source)
862
+ )
863
+ print("[DEBUG Decorator] Added DECORATED_FUNC_FILE_SOURCE to env.")
864
+ except (TypeError, OSError) as e:
865
+ # TypeError: If func is a built-in or C function
866
+ # OSError: If the file cannot be opened
867
+ print(
868
+ f"Warning: Could not read source file for {processor_name}: {e}. Definitions in that file might be unavailable in the consumer."
869
+ )
870
+ except Exception as e:
871
+ print(
872
+ f"Warning: An unexpected error occurred while reading source file for {processor_name}: {e}"
873
+ )
874
+ # --- End Decorated Function's File Source ---
875
+
836
876
  # --- Final Setup ---
837
877
  print("[DEBUG Decorator] Preparing final Processor object...")
838
878
  metadata = V1ResourceMetaRequest(
@@ -1,6 +1,7 @@
1
1
  from typing import Any, Dict, List, Optional
2
2
 
3
3
  import requests
4
+ from pydantic import BaseModel
4
5
 
5
6
  from nebu.auth import get_user_profile
6
7
  from nebu.config import GlobalConfig
@@ -141,7 +142,13 @@ class Processor:
141
142
  patch_response.raise_for_status()
142
143
  print(f"Updated Processor {self.processor.metadata.name}")
143
144
 
144
- def send(self, data: Dict[str, Any], wait: bool = False) -> Dict[str, Any]:
145
+ def __call__(self, data: BaseModel, wait: bool = False) -> Dict[str, Any]:
146
+ """
147
+ Allows the Processor instance to be called like a function, sending data.
148
+ """
149
+ return self.send(data=data, wait=wait)
150
+
151
+ def send(self, data: BaseModel, wait: bool = False) -> Dict[str, Any]:
145
152
  """
146
153
  Send data to the processor.
147
154
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nebu
3
- Version: 0.1.39
3
+ Version: 0.1.41
4
4
  Summary: A globally distributed container runtime
5
5
  Requires-Python: >=3.10.14
6
6
  Description-Content-Type: text/markdown
@@ -10,7 +10,6 @@ Requires-Dist: dill>=0.3.8
10
10
  Requires-Dist: openai>=1.68.2
11
11
  Requires-Dist: pillow>=11.1.0
12
12
  Requires-Dist: pydantic>=2.10.6
13
- Requires-Dist: pydantic-ai-slim[openai]>=0.0.55
14
13
  Requires-Dist: pysocks>=1.7.1
15
14
  Requires-Dist: pyyaml>=6.0.2
16
15
  Requires-Dist: redis[socks]>=5.0
@@ -1,7 +1,7 @@
1
1
  nebu/__init__.py,sha256=5sepbzdAdoA_8TIxws60S4ugFY1apQd_savzn20a4cY,465
2
- nebu/adapter.py,sha256=X9Lc0-pm4U6v7UDgOd_RuxJqENqAO8Si4jrwGYTsMGI,418
2
+ nebu/adapter.py,sha256=yELhnYDsb56vI4xb_fpgZehsr9GQwiNY1tcasO2IQQo,546
3
3
  nebu/auth.py,sha256=N_v6SPFD9HU_UoRDTaouH03g2Hmo9C-xxqInE1FweXE,1471
4
- nebu/cache.py,sha256=nKssfJ4mqkxkzoU776cr7RjOSbKoueqNvmHvQ0Ufj70,3929
4
+ nebu/cache.py,sha256=jmluqvWnE9N8uNq6nppXSxEJK7DKWaB79GicaGg9KmY,4718
5
5
  nebu/config.py,sha256=aZzQltkobtOLHFCGcIkpKoE3ITn3Z11Dp0E72w84TA0,5769
6
6
  nebu/data.py,sha256=kIH9-JJ1-iO7P2t28bku6Gn0Y5tgQszGeTW_rpmO03A,38725
7
7
  nebu/meta.py,sha256=CzFHMND9seuewzq9zNNx9WTr6JvrCBExe7BLqDSr7lM,745
@@ -11,16 +11,16 @@ nebu/containers/container.py,sha256=yb7KaPTVXnEEAlrpdlUi4HNqF6P7z9bmwAILGlq6iqU,
11
11
  nebu/containers/decorator.py,sha256=uFtzlAXRHYZECJ-NPusY7oN9GXvdHrHDd_JNrIGr8aQ,3244
12
12
  nebu/containers/models.py,sha256=0j6NGy4yto-enRDh_4JH_ZTbHrLdSpuMOqNQPnIrwC4,6815
13
13
  nebu/containers/server.py,sha256=yFa2Y9PzBn59E1HftKiv0iapPonli2rbGAiU6r-wwe0,2513
14
- nebu/processors/consumer.py,sha256=vVbA6PdgkTKI5iqCDSnHO0dtbyFPiK7_1ys3ph7xPrk,20786
15
- nebu/processors/decorate.py,sha256=KZ43vOPzl3HVPB9P3QX6U94yTBnVi7ISoXQhUuEMO7g,38156
14
+ nebu/processors/consumer.py,sha256=qQleTn5BmaZvlH4YkrZqqEZNvMJDv5RpKRVRHX28Qs0,24496
15
+ nebu/processors/decorate.py,sha256=ltYo2DMBsMJek2AZe4o0w9eFVo3qFCfGg7oRYlskRvU,40144
16
16
  nebu/processors/default.py,sha256=W4slJenG59rvyTlJ7gRp58eFfXcNOTT2Hfi6zzJAobI,365
17
17
  nebu/processors/models.py,sha256=y40HoW-MEzDWB2dm_tsYlUy3Nf3s6eiLC0iGO9BoNog,3956
18
- nebu/processors/processor.py,sha256=068hLQKapWabNlhb_DtzqAJ7N7MGdr5UcjfZrb_MkFo,9732
18
+ nebu/processors/processor.py,sha256=lN1T665WV-d45gerGBgb8HxVVc2k52U3hxIBaAfe-Nk,9991
19
19
  nebu/processors/remote.py,sha256=TeAIPGEMqnDIb7H1iett26IEZrBlcbPB_-DSm6jcH1E,1285
20
20
  nebu/redis/models.py,sha256=coPovAcVXnOU1Xh_fpJL4PO3QctgK9nBe5QYoqEcnxg,1230
21
21
  nebu/services/service.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
- nebu-0.1.39.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
23
- nebu-0.1.39.dist-info/METADATA,sha256=aAJh9bd6ya9FoUBkqpxnAsk6T6Vwtwtz6g5hhMl28PY,1786
24
- nebu-0.1.39.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
25
- nebu-0.1.39.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
26
- nebu-0.1.39.dist-info/RECORD,,
22
+ nebu-0.1.41.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
23
+ nebu-0.1.41.dist-info/METADATA,sha256=OohCbzIvc4-hH0OzVDsWQJpOfAsi7kH9WqHOIqGo_f4,1738
24
+ nebu-0.1.41.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
25
+ nebu-0.1.41.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
26
+ nebu-0.1.41.dist-info/RECORD,,
File without changes