nebu 0.1.29__py3-none-any.whl → 0.1.30__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nebu/cache.py CHANGED
@@ -25,29 +25,42 @@ class Cache:
25
25
  Initializes the Redis connection.
26
26
  Pulls connection details from environment variables REDIS_HOST,
27
27
  REDIS_PORT, and REDIS_DB if available, otherwise uses defaults.
28
+ Also checks for REDIS_URL and prefers that if set.
28
29
  """
29
- redis_host = os.environ.get("REDIS_HOST", host)
30
- redis_port = int(os.environ.get("REDIS_PORT", port))
31
- redis_db = int(os.environ.get("REDIS_DB", db))
30
+ redis_url = os.environ.get("REDIS_URL")
32
31
  namespace = os.environ.get("NEBU_NAMESPACE")
33
32
  if not namespace:
34
33
  raise ValueError("NEBU_NAMESPACE environment variable is not set")
35
34
 
35
+ self.redis_client = None
36
+ connection_info = ""
37
+
36
38
  try:
37
- # decode_responses=True ensures keys and values are returned as strings
38
- self.redis_client = redis.StrictRedis(
39
- host=redis_host, port=redis_port, db=redis_db, decode_responses=True
40
- )
39
+ if redis_url:
40
+ # Use REDIS_URL if available
41
+ self.redis_client = redis.StrictRedis.from_url(
42
+ redis_url, decode_responses=True
43
+ )
44
+ connection_info = f"URL {redis_url}"
45
+ else:
46
+ # Fallback to individual host, port, db
47
+ redis_host = os.environ.get("REDIS_HOST", host)
48
+ redis_port = int(os.environ.get("REDIS_PORT", port))
49
+ redis_db = int(os.environ.get("REDIS_DB", db))
50
+ self.redis_client = redis.StrictRedis(
51
+ host=redis_host, port=redis_port, db=redis_db, decode_responses=True
52
+ )
53
+ connection_info = f"{redis_host}:{redis_port}/{redis_db}"
54
+
41
55
  # Ping the server to ensure connection is established
42
56
  self.redis_client.ping()
43
- print(
44
- f"Successfully connected to Redis at {redis_host}:{redis_port}/{redis_db}"
45
- )
57
+ print(f"Successfully connected to Redis using {connection_info}")
46
58
 
47
59
  self.prefix = f"cache:{namespace}"
48
60
  except Exception as e:
49
61
  print(f"Error connecting to Redis: {e}")
50
- self.redis_client = None # Set client to None if connection fails
62
+ # Ensure client is None if connection fails at any point
63
+ self.redis_client = None
51
64
 
52
65
  def get(self, key: str) -> str | None:
53
66
  """
nebu/chatx/convert.py CHANGED
@@ -9,7 +9,7 @@ from PIL import Image, UnidentifiedImageError
9
9
 
10
10
 
11
11
  def convert_to_unsloth_inference(
12
- old_schema: List[Dict[str, Any]],
12
+ old_schema: Dict[str, Any],
13
13
  ) -> Tuple[List[Dict[str, Any]], List[Image.Image]]:
14
14
  """
15
15
  Convert from an old OpenAI message format that may look like:
@@ -84,26 +84,31 @@ def convert_to_unsloth_inference(
84
84
 
85
85
 
86
86
  def oai_to_unsloth(
87
- messages_input: List[Dict[str, Any]],
87
+ messages_input: Dict[
88
+ str, Any
89
+ ], # Assume input is always dict like {'messages': [...]}
88
90
  ) -> Dict[str, List[Dict[str, Any]]]:
89
91
  """
90
- Converts a list of messages from an OpenAI-like chat format to the Nebulous conversation format.
92
+ Converts messages from a JSON object containing a 'messages' key
93
+ (typical in JSON Lines format) to the Nebulous conversation format.
91
94
  Images specified by URLs or base64 strings are loaded into PIL.Image objects.
92
95
 
93
- Input format example:
94
- [
95
- {
96
- "role": "user",
97
- "content": [
98
- {"type": "input_text", "text": "Describe the image."},
99
- {"type": "input_image", "image_url": "http://... or base64 string"},
100
- ]
101
- },
102
- {
103
- "role": "assistant",
104
- "content": [{"type": "text", "text": "This is an image of..."}] # Or potentially just a string
105
- }
106
- ]
96
+ Input format example (as dict from JSON line):
97
+ {
98
+ "messages": [
99
+ {
100
+ "role": "user",
101
+ "content": [
102
+ {"type": "input_text", "text": "Describe the image."},
103
+ {"type": "input_image", "image_url": "http://... or base64 string"},
104
+ ]
105
+ },
106
+ {
107
+ "role": "assistant",
108
+ "content": [{"type": "text", "text": "This is an image of..."}] # Or potentially just a string
109
+ }
110
+ ]
111
+ }
107
112
 
108
113
  Output format example:
109
114
  {
@@ -122,8 +127,23 @@ def oai_to_unsloth(
122
127
  ]
123
128
  }
124
129
  """
130
+ # Directly extract the list of messages, assuming input structure
131
+ messages_to_process = messages_input.get("messages", [])
132
+
133
+ # Validate that 'messages' key contained a list
134
+ if not isinstance(messages_to_process, list):
135
+ print(
136
+ f"Warning: Input dict provided, but 'messages' key does not contain a list: {type(messages_to_process)}. Returning empty."
137
+ )
138
+ return {"messages": []}
139
+
125
140
  nebu_conversation = []
126
- for message in messages_input:
141
+ for message in messages_to_process: # Use the extracted list
142
+ # Add check here for robustness against malformed items *within* the list
143
+ if not isinstance(message, dict):
144
+ print(f"Warning: Skipping non-dictionary item in message list: {message!r}")
145
+ continue
146
+
127
147
  role = message.get("role")
128
148
  input_content = message.get("content") # Can be list or string
129
149
 
@@ -342,7 +342,9 @@ def process_message(message_id: str, message_data: Dict[str, str]) -> None:
342
342
  if content_type_name and content_type_name in local_namespace:
343
343
  # Try to create the content type model first
344
344
  try:
345
- content_model = local_namespace[content_type_name](**content)
345
+ content_model = local_namespace[content_type_name].model_validate(
346
+ content
347
+ )
346
348
  print(f"Content model: {content_model}")
347
349
  input_obj = local_namespace["Message"](
348
350
  kind=kind,
@@ -12,6 +12,7 @@ from nebu.processors.models import (
12
12
  V1Processors,
13
13
  V1ProcessorScaleRequest,
14
14
  V1Scale,
15
+ V1StreamData,
15
16
  V1UpdateProcessor,
16
17
  )
17
18
 
@@ -140,7 +141,7 @@ class Processor:
140
141
  patch_response.raise_for_status()
141
142
  print(f"Updated Processor {self.processor.metadata.name}")
142
143
 
143
- def send(self, data: Dict[str, Any]) -> Dict[str, Any]:
144
+ def send(self, data: Dict[str, Any], wait: bool = False) -> Dict[str, Any]:
144
145
  """
145
146
  Send data to the processor.
146
147
  """
@@ -149,9 +150,14 @@ class Processor:
149
150
 
150
151
  url = f"{self.processors_url}/{self.processor.metadata.namespace}/{self.processor.metadata.name}/messages"
151
152
 
153
+ stream_data = V1StreamData(
154
+ content=data,
155
+ wait=wait,
156
+ )
157
+
152
158
  response = requests.post(
153
159
  url,
154
- json=data,
160
+ json=stream_data.model_dump(exclude_none=True),
155
161
  headers={"Authorization": f"Bearer {self.api_key}"},
156
162
  )
157
163
  response.raise_for_status()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nebu
3
- Version: 0.1.29
3
+ Version: 0.1.30
4
4
  Summary: A globally distributed container runtime
5
5
  Requires-Python: >=3.10.14
6
6
  Description-Content-Type: text/markdown
@@ -1,26 +1,26 @@
1
1
  nebu/__init__.py,sha256=5sepbzdAdoA_8TIxws60S4ugFY1apQd_savzn20a4cY,465
2
2
  nebu/adapter.py,sha256=yWQCpAn2lJxXPSyQacmFpfVzAL3RrKtKgrSMws0BoT8,211
3
3
  nebu/auth.py,sha256=N_v6SPFD9HU_UoRDTaouH03g2Hmo9C-xxqInE1FweXE,1471
4
- nebu/cache.py,sha256=ngfotHgmT7cvu16kI1Y43pPBBdQt0kUMOk2s3ssNHC4,3292
4
+ nebu/cache.py,sha256=1aY1plIXWOPmUY6GGq_s_QDXzIi5UMuG34XYBA8PpW8,3803
5
5
  nebu/config.py,sha256=aZzQltkobtOLHFCGcIkpKoE3ITn3Z11Dp0E72w84TA0,5769
6
6
  nebu/data.py,sha256=kIH9-JJ1-iO7P2t28bku6Gn0Y5tgQszGeTW_rpmO03A,38725
7
7
  nebu/meta.py,sha256=CzFHMND9seuewzq9zNNx9WTr6JvrCBExe7BLqDSr7lM,745
8
- nebu/chatx/convert.py,sha256=t-8ATcIKEv0oYCMFcYzZIdOZcjDHcvdBazQZOLH7A0k,7906
8
+ nebu/chatx/convert.py,sha256=4rjccr9bI0xmCAumKlxyFpVzW0up9Yj-gJYvDjH6C54,8798
9
9
  nebu/chatx/openai.py,sha256=LLSPvVGnauUViAXY7OifwoWlkUGZWfgxEjxR4mjSqZg,44961
10
10
  nebu/containers/container.py,sha256=yb7KaPTVXnEEAlrpdlUi4HNqF6P7z9bmwAILGlq6iqU,13502
11
11
  nebu/containers/decorator.py,sha256=uFtzlAXRHYZECJ-NPusY7oN9GXvdHrHDd_JNrIGr8aQ,3244
12
12
  nebu/containers/models.py,sha256=0j6NGy4yto-enRDh_4JH_ZTbHrLdSpuMOqNQPnIrwC4,6815
13
13
  nebu/containers/server.py,sha256=yFa2Y9PzBn59E1HftKiv0iapPonli2rbGAiU6r-wwe0,2513
14
- nebu/processors/consumer.py,sha256=Nd5s3C5aYCJvnj5VqhwYekIX9Gkafy1eJtWFTbVQJrM,19635
14
+ nebu/processors/consumer.py,sha256=becQxjQoerlaDdr2_wxzIux3wl8SVmo2LA93RZn0Jk8,19694
15
15
  nebu/processors/decorate.py,sha256=9mf25RawOX6_6WyQcRLBIHQC3oCDtofQZijJ13aQM9E,34576
16
16
  nebu/processors/default.py,sha256=W4slJenG59rvyTlJ7gRp58eFfXcNOTT2Hfi6zzJAobI,365
17
17
  nebu/processors/models.py,sha256=y40HoW-MEzDWB2dm_tsYlUy3Nf3s6eiLC0iGO9BoNog,3956
18
- nebu/processors/processor.py,sha256=STyzq2CiKD2KCnijVuNebguakNtFQHFf8ef-UTJeyBk,9548
18
+ nebu/processors/processor.py,sha256=EQ3-dBf432fSAQE2A_9ATX-cG5LkJ4fjVaOtlxCoXvc,9719
19
19
  nebu/processors/remote.py,sha256=TeAIPGEMqnDIb7H1iett26IEZrBlcbPB_-DSm6jcH1E,1285
20
20
  nebu/redis/models.py,sha256=coPovAcVXnOU1Xh_fpJL4PO3QctgK9nBe5QYoqEcnxg,1230
21
21
  nebu/services/service.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
- nebu-0.1.29.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
23
- nebu-0.1.29.dist-info/METADATA,sha256=3Wi5lrW1j5AsloJEypjpCl0RlEKOjvFQugRovKNHvyA,1786
24
- nebu-0.1.29.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
25
- nebu-0.1.29.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
26
- nebu-0.1.29.dist-info/RECORD,,
22
+ nebu-0.1.30.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
23
+ nebu-0.1.30.dist-info/METADATA,sha256=Y_o6hvOdihVgFJMRQPIpALA2V6JfaXybtrlSaQyMNtI,1786
24
+ nebu-0.1.30.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
25
+ nebu-0.1.30.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
26
+ nebu-0.1.30.dist-info/RECORD,,
File without changes