nebu 0.1.41__py3-none-any.whl → 0.1.44__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nebu/adapter.py CHANGED
@@ -17,3 +17,4 @@ class Adapter(BaseModel):
17
17
  lora_dropout: float = Field(default=0.1)
18
18
  lora_target_modules: List[str] = Field(default=[])
19
19
  learning_rate: float = Field(default=0.0001)
20
+ examples_trained: int = Field(default=0)
File without changes
@@ -878,15 +878,32 @@ def processor(
878
878
  metadata = V1ResourceMetaRequest(
879
879
  name=processor_name, namespace=namespace, labels=labels
880
880
  )
881
- consumer_command = f"{python_cmd} -u -m nebu.processors.consumer"
882
- setup_commands = [
883
- f"{python_cmd} -m pip install dill pydantic redis nebu", # Base deps
881
+ # Separate the final execution command from setup
882
+ consumer_module = "nebu.processors.consumer"
883
+ if "accelerate launch" in python_cmd:
884
+ # python_cmd is the launcher prefix (e.g., "accelerate launch")
885
+ # Append the module flag and the module name.
886
+ # Remove -u as accelerate likely handles buffering.
887
+ consumer_execution_command = f"{python_cmd.strip()} -m {consumer_module}"
888
+ else:
889
+ # Assume python_cmd is just the interpreter (e.g., "python")
890
+ consumer_execution_command = f"{python_cmd} -u -m {consumer_module}"
891
+
892
+ # Build setup commands list - run these with standard python/shell
893
+ setup_commands_list = [
894
+ "python -m pip install dill pydantic redis nebu", # Base deps (use standard python)
884
895
  ]
885
896
  if setup_script:
886
- print("[DEBUG Decorator] Adding setup script to command.")
887
- setup_commands.append(f"\n{setup_script}\n")
888
- setup_commands.append(consumer_command)
889
- final_command = "\n".join(setup_commands)
897
+ print("[DEBUG Decorator] Adding setup script to setup commands.")
898
+ # Add setup script as raw commands
899
+ setup_commands_list.append(setup_script.strip())
900
+
901
+ # Combine setup commands and the final execution command
902
+ all_commands = setup_commands_list + [consumer_execution_command]
903
+ final_command = "\n\n".join(
904
+ all_commands
905
+ ) # Use double newline for clarity in logs
906
+
890
907
  print(
891
908
  f"[DEBUG Decorator] Final container command:\n-------\n{final_command}\n-------"
892
909
  )
@@ -1,3 +1,5 @@
1
+ import json
2
+ import threading
1
3
  from typing import Any, Dict, List, Optional
2
4
 
3
5
  import requests
@@ -18,6 +20,64 @@ from nebu.processors.models import (
18
20
  )
19
21
 
20
22
 
23
+ def _fetch_and_print_logs(log_url: str, api_key: str, processor_name: str):
24
+ """Helper function to fetch logs in a separate thread."""
25
+ try:
26
+ headers = {"Authorization": f"Bearer {api_key}"}
27
+ print(f"--- Attempting to stream logs for {processor_name} from {log_url} ---")
28
+ # Use stream=True for potentially long-lived connections and timeout
29
+ with requests.get(log_url, headers=headers, stream=True, timeout=300) as r:
30
+ r.raise_for_status()
31
+ print(f"--- Streaming logs for {processor_name} ---")
32
+ for line in r.iter_lines():
33
+ if not line:
34
+ continue
35
+ try:
36
+ # Decode bytes to string
37
+ decoded_line = line.decode("utf-8")
38
+ # Parse the JSON line
39
+ log_data = json.loads(decoded_line)
40
+
41
+ # Check if the parsed data is a dictionary (expected format)
42
+ if isinstance(log_data, dict):
43
+ for container, log_content in log_data.items():
44
+ # Ensure log_content is a string before printing
45
+ if isinstance(log_content, str):
46
+ print(f"[{processor_name}][{container}] {log_content}")
47
+ else:
48
+ # Handle cases where log_content might not be a string
49
+ print(
50
+ f"[{processor_name}][{container}] Unexpected log format: {log_content}"
51
+ )
52
+ else:
53
+ # If not a dict, print the raw line with a warning
54
+ print(
55
+ f"[{processor_name}] Unexpected log structure (not a dict): {decoded_line}"
56
+ )
57
+
58
+ except json.JSONDecodeError:
59
+ # If JSON parsing fails, print the original line as fallback
60
+ print(f"[{processor_name}] {line.decode('utf-8')} (raw/non-JSON)")
61
+ except Exception as e:
62
+ # Catch other potential errors during line processing
63
+ print(f"Error processing log line for {processor_name}: {e}")
64
+
65
+ print(f"--- Log stream ended for {processor_name} ---")
66
+ except requests.exceptions.Timeout:
67
+ print(f"Log stream connection timed out for {processor_name}.")
68
+ except requests.exceptions.RequestException as e:
69
+ # Handle potential API errors gracefully
70
+ print(f"Error fetching logs for {processor_name} from {log_url}: {e}")
71
+ if e.response is not None:
72
+ print(
73
+ f"Response status: {e.response.status_code}, Response body: {e.response.text}"
74
+ )
75
+ except Exception as e:
76
+ print(
77
+ f"An unexpected error occurred while fetching logs for {processor_name}: {e}"
78
+ )
79
+
80
+
21
81
  class Processor:
22
82
  """
23
83
  A class for managing Processor instances.
@@ -56,6 +116,7 @@ class Processor:
56
116
  self.max_replicas = max_replicas
57
117
  self.scale_config = scale_config
58
118
  self.processors_url = f"{self.orign_host}/v1/processors"
119
+ self._log_thread: Optional[threading.Thread] = None
59
120
 
60
121
  # Fetch existing Processors
61
122
  response = requests.get(
@@ -148,27 +209,61 @@ class Processor:
148
209
  """
149
210
  return self.send(data=data, wait=wait)
150
211
 
151
- def send(self, data: BaseModel, wait: bool = False) -> Dict[str, Any]:
212
+ def send(
213
+ self, data: BaseModel, wait: bool = False, logs: bool = False
214
+ ) -> Dict[str, Any]:
152
215
  """
153
- Send data to the processor.
216
+ Send data to the processor and optionally stream logs in the background.
154
217
  """
155
- if not self.processor or not self.processor.metadata.name:
156
- raise ValueError("Processor not found")
157
-
158
- url = f"{self.processors_url}/{self.processor.metadata.namespace}/{self.processor.metadata.name}/messages"
159
-
218
+ if (
219
+ not self.processor
220
+ or not self.processor.metadata.name
221
+ or not self.processor.metadata.namespace
222
+ ):
223
+ raise ValueError("Processor not found or missing metadata (name/namespace)")
224
+
225
+ processor_name = self.processor.metadata.name
226
+ processor_namespace = self.processor.metadata.namespace
227
+
228
+ # --- Send Data ---
229
+ messages_url = (
230
+ f"{self.processors_url}/{processor_namespace}/{processor_name}/messages"
231
+ )
160
232
  stream_data = V1StreamData(
161
233
  content=data,
162
234
  wait=wait,
163
235
  )
164
-
165
236
  response = requests.post(
166
- url,
237
+ messages_url,
167
238
  json=stream_data.model_dump(mode="json", exclude_none=True),
168
239
  headers={"Authorization": f"Bearer {self.api_key}"},
169
240
  )
170
241
  response.raise_for_status()
171
- return response.json()
242
+ send_response_json = response.json()
243
+
244
+ # --- Fetch Logs (if requested and not already running) ---
245
+ if logs:
246
+ if self._log_thread is None or not self._log_thread.is_alive():
247
+ log_url = (
248
+ f"{self.processors_url}/{processor_namespace}/{processor_name}/logs"
249
+ )
250
+ self._log_thread = threading.Thread(
251
+ target=_fetch_and_print_logs,
252
+ args=(log_url, self.api_key, processor_name), # Pass processor_name
253
+ daemon=True,
254
+ )
255
+ try:
256
+ self._log_thread.start()
257
+ print(f"Started background log fetching for {processor_name}...")
258
+ except Exception as e:
259
+ print(
260
+ f"Failed to start log fetching thread for {processor_name}: {e}"
261
+ )
262
+ self._log_thread = None # Reset if start fails
263
+ else:
264
+ print(f"Log fetching is already running for {processor_name}.")
265
+
266
+ return send_response_json
172
267
 
173
268
  def scale(self, replicas: int) -> Dict[str, Any]:
174
269
  """
@@ -284,3 +379,21 @@ class Processor:
284
379
  Get the resource ref for the processor.
285
380
  """
286
381
  return f"{self.name}.{self.namespace}.Processor"
382
+
383
+ def stop_logs(self):
384
+ """
385
+ Signals the intent to stop the background log stream.
386
+ Note: Interrupting a streaming requests.get cleanly can be complex.
387
+ This currently allows a new log stream to be started on the next call.
388
+ """
389
+ if self._log_thread and self._log_thread.is_alive():
390
+ # Attempting to stop a daemon thread directly isn't standard practice.
391
+ # Setting the reference to None allows a new thread to be created if needed.
392
+ # The OS will eventually clean up the daemon thread when the main process exits,
393
+ # or potentially sooner if the network request completes or errors out.
394
+ print(
395
+ f"Disassociating from active log stream for {self.name}. A new stream can be started."
396
+ )
397
+ self._log_thread = None
398
+ else:
399
+ print(f"No active log stream to stop for {self.name}.")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nebu
3
- Version: 0.1.41
3
+ Version: 0.1.44
4
4
  Summary: A globally distributed container runtime
5
5
  Requires-Python: >=3.10.14
6
6
  Description-Content-Type: text/markdown
@@ -12,7 +12,7 @@ Requires-Dist: pillow>=11.1.0
12
12
  Requires-Dist: pydantic>=2.10.6
13
13
  Requires-Dist: pysocks>=1.7.1
14
14
  Requires-Dist: pyyaml>=6.0.2
15
- Requires-Dist: redis[socks]>=5.0
15
+ Requires-Dist: redis>=5.0
16
16
  Requires-Dist: requests>=2.32.3
17
17
  Dynamic: license-file
18
18
 
@@ -1,10 +1,11 @@
1
1
  nebu/__init__.py,sha256=5sepbzdAdoA_8TIxws60S4ugFY1apQd_savzn20a4cY,465
2
- nebu/adapter.py,sha256=yELhnYDsb56vI4xb_fpgZehsr9GQwiNY1tcasO2IQQo,546
2
+ nebu/adapter.py,sha256=lqpvfY_up-qVvWxxfYKLFXkYvlEilv-BrRNt554cixU,591
3
3
  nebu/auth.py,sha256=N_v6SPFD9HU_UoRDTaouH03g2Hmo9C-xxqInE1FweXE,1471
4
4
  nebu/cache.py,sha256=jmluqvWnE9N8uNq6nppXSxEJK7DKWaB79GicaGg9KmY,4718
5
5
  nebu/config.py,sha256=aZzQltkobtOLHFCGcIkpKoE3ITn3Z11Dp0E72w84TA0,5769
6
6
  nebu/data.py,sha256=kIH9-JJ1-iO7P2t28bku6Gn0Y5tgQszGeTW_rpmO03A,38725
7
7
  nebu/meta.py,sha256=CzFHMND9seuewzq9zNNx9WTr6JvrCBExe7BLqDSr7lM,745
8
+ nebu/builders/models.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
9
  nebu/chatx/convert.py,sha256=1x6Dz_-posZoxo-xC4QDqeKjrd5RgOkobBZT9K3Ze74,14478
9
10
  nebu/chatx/openai.py,sha256=VsJvV2MbYeJj2Ita9Q9X3qj5r5F3P-aPDhpSFr-Q-dw,44950
10
11
  nebu/containers/container.py,sha256=yb7KaPTVXnEEAlrpdlUi4HNqF6P7z9bmwAILGlq6iqU,13502
@@ -12,15 +13,15 @@ nebu/containers/decorator.py,sha256=uFtzlAXRHYZECJ-NPusY7oN9GXvdHrHDd_JNrIGr8aQ,
12
13
  nebu/containers/models.py,sha256=0j6NGy4yto-enRDh_4JH_ZTbHrLdSpuMOqNQPnIrwC4,6815
13
14
  nebu/containers/server.py,sha256=yFa2Y9PzBn59E1HftKiv0iapPonli2rbGAiU6r-wwe0,2513
14
15
  nebu/processors/consumer.py,sha256=qQleTn5BmaZvlH4YkrZqqEZNvMJDv5RpKRVRHX28Qs0,24496
15
- nebu/processors/decorate.py,sha256=ltYo2DMBsMJek2AZe4o0w9eFVo3qFCfGg7oRYlskRvU,40144
16
+ nebu/processors/decorate.py,sha256=T-nnsu85eH5ui-66E0IJfMj5KG1fMDHeSmj4oT2scZA,40990
16
17
  nebu/processors/default.py,sha256=W4slJenG59rvyTlJ7gRp58eFfXcNOTT2Hfi6zzJAobI,365
17
18
  nebu/processors/models.py,sha256=y40HoW-MEzDWB2dm_tsYlUy3Nf3s6eiLC0iGO9BoNog,3956
18
- nebu/processors/processor.py,sha256=lN1T665WV-d45gerGBgb8HxVVc2k52U3hxIBaAfe-Nk,9991
19
+ nebu/processors/processor.py,sha256=cptZEN9ZGcaoFNreaw3BkwV0qKHvjP9b4nNxlQjFT3s,15405
19
20
  nebu/processors/remote.py,sha256=TeAIPGEMqnDIb7H1iett26IEZrBlcbPB_-DSm6jcH1E,1285
20
21
  nebu/redis/models.py,sha256=coPovAcVXnOU1Xh_fpJL4PO3QctgK9nBe5QYoqEcnxg,1230
21
22
  nebu/services/service.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
- nebu-0.1.41.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
23
- nebu-0.1.41.dist-info/METADATA,sha256=OohCbzIvc4-hH0OzVDsWQJpOfAsi7kH9WqHOIqGo_f4,1738
24
- nebu-0.1.41.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
25
- nebu-0.1.41.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
26
- nebu-0.1.41.dist-info/RECORD,,
23
+ nebu-0.1.44.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
24
+ nebu-0.1.44.dist-info/METADATA,sha256=W3bJtTkgUTFqJlBO6lIjjWMlx5HofsnPICd30Xbnvh4,1731
25
+ nebu-0.1.44.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
26
+ nebu-0.1.44.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
27
+ nebu-0.1.44.dist-info/RECORD,,
File without changes