nebu 0.1.107__py3-none-any.whl → 0.1.108__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nebu/data.py +3 -3
- nebu/processors/processor.py +62 -4
- {nebu-0.1.107.dist-info → nebu-0.1.108.dist-info}/METADATA +1 -1
- {nebu-0.1.107.dist-info → nebu-0.1.108.dist-info}/RECORD +7 -7
- {nebu-0.1.107.dist-info → nebu-0.1.108.dist-info}/WHEEL +0 -0
- {nebu-0.1.107.dist-info → nebu-0.1.108.dist-info}/licenses/LICENSE +0 -0
- {nebu-0.1.107.dist-info → nebu-0.1.108.dist-info}/top_level.txt +0 -0
nebu/data.py
CHANGED
@@ -59,7 +59,7 @@ def rclone_copy(
|
|
59
59
|
source_dir,
|
60
60
|
destination,
|
61
61
|
f"--transfers={transfers}",
|
62
|
-
"--progress",
|
62
|
+
# "--progress",
|
63
63
|
]
|
64
64
|
|
65
65
|
if dry_run:
|
@@ -1171,7 +1171,7 @@ class RcloneBucket(StorageBucket):
|
|
1171
1171
|
"--modify-window=2s",
|
1172
1172
|
"--log-level=DEBUG" if self.verbose else "--log-level=INFO",
|
1173
1173
|
"--log-format=date,time,level,message",
|
1174
|
-
"--progress", # Add progress display
|
1174
|
+
# "--progress", # Add progress display
|
1175
1175
|
]
|
1176
1176
|
if dry_run:
|
1177
1177
|
rc_args.append("--dry-run")
|
@@ -1286,7 +1286,7 @@ class RcloneBucket(StorageBucket):
|
|
1286
1286
|
rc_args = [
|
1287
1287
|
"--log-level=DEBUG" if self.verbose else "--log-level=INFO",
|
1288
1288
|
"--log-format=date,time,level,message",
|
1289
|
-
"--progress", # Add progress display
|
1289
|
+
# "--progress", # Add progress display
|
1290
1290
|
]
|
1291
1291
|
|
1292
1292
|
# Set environment variables for AWS credentials if they exist
|
nebu/processors/processor.py
CHANGED
@@ -2,7 +2,7 @@ import json
|
|
2
2
|
import threading
|
3
3
|
import time
|
4
4
|
import uuid
|
5
|
-
from typing import Any, Dict, Generic, List, Optional, TypeVar
|
5
|
+
from typing import Any, Dict, Generic, List, Optional, TypeVar, cast, get_args
|
6
6
|
|
7
7
|
import requests
|
8
8
|
from pydantic import BaseModel
|
@@ -132,6 +132,30 @@ class Processor(Generic[InputType, OutputType]):
|
|
132
132
|
self.processors_url = f"{self.orign_host}/v1/processors"
|
133
133
|
self._log_thread: Optional[threading.Thread] = None
|
134
134
|
|
135
|
+
# Attempt to infer OutputType if schema_ is not provided
|
136
|
+
if self.schema_ is None and hasattr(self, "__orig_class__"):
|
137
|
+
type_args = get_args(self.__orig_class__) # type: ignore
|
138
|
+
if len(type_args) == 2:
|
139
|
+
output_type_candidate = type_args[1]
|
140
|
+
# Check if it looks like a Pydantic model class
|
141
|
+
if isinstance(output_type_candidate, type) and issubclass(
|
142
|
+
output_type_candidate, BaseModel
|
143
|
+
):
|
144
|
+
logger.debug(
|
145
|
+
f"Inferred OutputType {output_type_candidate.__name__} from generic arguments."
|
146
|
+
)
|
147
|
+
self.schema_ = output_type_candidate
|
148
|
+
else:
|
149
|
+
logger.debug(
|
150
|
+
f"Second generic argument {output_type_candidate} is not a Pydantic BaseModel. "
|
151
|
+
"Cannot infer OutputType."
|
152
|
+
)
|
153
|
+
else:
|
154
|
+
logger.debug(
|
155
|
+
"Could not infer OutputType from generic arguments: wrong number of type args found "
|
156
|
+
f"(expected 2, got {len(type_args) if type_args else 0})."
|
157
|
+
)
|
158
|
+
|
135
159
|
# Fetch existing Processors
|
136
160
|
response = requests.get(
|
137
161
|
self.processors_url, headers={"Authorization": f"Bearer {self.api_key}"}
|
@@ -222,12 +246,18 @@ class Processor(Generic[InputType, OutputType]):
|
|
222
246
|
logs: bool = False,
|
223
247
|
api_key: Optional[str] = None,
|
224
248
|
user_key: Optional[str] = None,
|
249
|
+
timeout: Optional[float] = 600.0,
|
225
250
|
) -> OutputType | Dict[str, Any] | None:
|
226
251
|
"""
|
227
252
|
Allows the Processor instance to be called like a function, sending data.
|
228
253
|
"""
|
229
254
|
return self.send(
|
230
|
-
data=data,
|
255
|
+
data=data,
|
256
|
+
wait=wait,
|
257
|
+
logs=logs,
|
258
|
+
api_key=api_key,
|
259
|
+
user_key=user_key,
|
260
|
+
timeout=timeout,
|
231
261
|
)
|
232
262
|
|
233
263
|
def send(
|
@@ -271,7 +301,9 @@ class Processor(Generic[InputType, OutputType]):
|
|
271
301
|
timeout=timeout,
|
272
302
|
)
|
273
303
|
response.raise_for_status()
|
274
|
-
|
304
|
+
raw_response_json = response.json()
|
305
|
+
raw_content = raw_response_json.get("content")
|
306
|
+
logger.debug(f">>> Raw content: {raw_content}")
|
275
307
|
|
276
308
|
# --- Fetch Logs (if requested and not already running) ---
|
277
309
|
if logs:
|
@@ -297,7 +329,33 @@ class Processor(Generic[InputType, OutputType]):
|
|
297
329
|
else:
|
298
330
|
logger.info(f"Log fetching is already running for {processor_name}.")
|
299
331
|
|
300
|
-
|
332
|
+
# Attempt to parse into OutputType if conditions are met
|
333
|
+
if (
|
334
|
+
wait
|
335
|
+
and self.schema_
|
336
|
+
and isinstance(self.schema_, type)
|
337
|
+
and issubclass(self.schema_, BaseModel) # type: ignore
|
338
|
+
and isinstance(raw_content, dict)
|
339
|
+
): # Check if raw_content is a dict
|
340
|
+
try:
|
341
|
+
# self.schema_ is assumed to be the Pydantic model class for OutputType
|
342
|
+
# Parse raw_content instead of the full response
|
343
|
+
parsed_model = self.schema_.model_validate(raw_content)
|
344
|
+
# Cast to OutputType to satisfy the linter with generics
|
345
|
+
parsed_output: OutputType = cast(OutputType, parsed_model)
|
346
|
+
return parsed_output
|
347
|
+
except (
|
348
|
+
Exception
|
349
|
+
) as e: # Consider pydantic.ValidationError for more specific handling
|
350
|
+
schema_name = getattr(self.schema_, "__name__", str(self.schema_))
|
351
|
+
logger.error(
|
352
|
+
f"Processor {processor_name}: Failed to parse 'content' field into output type {schema_name}. "
|
353
|
+
f"Error: {e}. Returning raw JSON response."
|
354
|
+
)
|
355
|
+
# Fallback to returning the raw JSON response
|
356
|
+
return raw_content
|
357
|
+
|
358
|
+
return raw_content
|
301
359
|
|
302
360
|
def scale(self, replicas: int) -> Dict[str, Any]:
|
303
361
|
"""
|
@@ -2,7 +2,7 @@ nebu/__init__.py,sha256=xNtWiN29MJZK_WBEUP-9hDmlkfLxoASVI-f4tNTXO58,454
|
|
2
2
|
nebu/auth.py,sha256=N_v6SPFD9HU_UoRDTaouH03g2Hmo9C-xxqInE1FweXE,1471
|
3
3
|
nebu/cache.py,sha256=JqRb4FdZrRrO4ePlwvsKdxRC8dNEFMxfTWag0aJz8Gw,4893
|
4
4
|
nebu/config.py,sha256=C5Jt9Bd0i0HrgzBSVNJ-Ml3KwX_gaYbYYZEtNL2gvJg,7031
|
5
|
-
nebu/data.py,sha256=
|
5
|
+
nebu/data.py,sha256=v29F1Dwfa3cw6HzG2C6bxAfAJIu16k5PwZCC_niLwCU,63508
|
6
6
|
nebu/errors.py,sha256=bBnK5YQ6qZg4OMY81AN2k03ppefg89FUwF_SHEMlqCA,170
|
7
7
|
nebu/logging.py,sha256=VzpjCEoXm3c4i0sKJL5GTsPIhTQ6Y4BPUTzPmwhve7o,950
|
8
8
|
nebu/meta.py,sha256=CzFHMND9seuewzq9zNNx9WTr6JvrCBExe7BLqDSr7lM,745
|
@@ -18,11 +18,11 @@ nebu/processors/consumer_process_worker.py,sha256=h--eNFKaLbUayxn88mB8oGGdrU2liE
|
|
18
18
|
nebu/processors/decorate.py,sha256=AfHVCoNbW7RymccF5ewleEL-GlMiqVH1-t9bCmD60rk,58654
|
19
19
|
nebu/processors/default.py,sha256=cy4ETMdbdRGkrvbYec1o60h7mGDlGN5JsuUph0ENtDU,364
|
20
20
|
nebu/processors/models.py,sha256=g4B1t6Rgoy-NUEHBLeQc0EENzHXLDlWSio8Muv7cTDU,4093
|
21
|
-
nebu/processors/processor.py,sha256=
|
21
|
+
nebu/processors/processor.py,sha256=PeJvxBPfjQ8uFqLcrGNk28yqBmqZC2VW4ssCsanZxYY,21985
|
22
22
|
nebu/redis/models.py,sha256=coPovAcVXnOU1Xh_fpJL4PO3QctgK9nBe5QYoqEcnxg,1230
|
23
23
|
nebu/services/service.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
24
|
-
nebu-0.1.
|
25
|
-
nebu-0.1.
|
26
|
-
nebu-0.1.
|
27
|
-
nebu-0.1.
|
28
|
-
nebu-0.1.
|
24
|
+
nebu-0.1.108.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
25
|
+
nebu-0.1.108.dist-info/METADATA,sha256=drUdR1bXazQ1Yqlns8I1KPOt1qnoO6OhNQMAlDwaK9o,1798
|
26
|
+
nebu-0.1.108.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
|
27
|
+
nebu-0.1.108.dist-info/top_level.txt,sha256=uLIbEKJeGSHWOAJN5S0i5XBGwybALlF9bYoB1UhdEgQ,5
|
28
|
+
nebu-0.1.108.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|