speedy-utils 1.1.23__py3-none-any.whl → 1.1.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llm_utils/__init__.py +12 -8
- llm_utils/chat_format/__init__.py +2 -0
- llm_utils/chat_format/display.py +115 -44
- llm_utils/lm/__init__.py +14 -6
- llm_utils/lm/llm.py +413 -0
- llm_utils/lm/llm_signature.py +35 -0
- llm_utils/lm/mixins.py +379 -0
- llm_utils/lm/openai_memoize.py +18 -7
- llm_utils/lm/signature.py +26 -37
- llm_utils/lm/utils.py +61 -76
- speedy_utils/__init__.py +28 -1
- speedy_utils/all.py +30 -1
- speedy_utils/common/utils_io.py +36 -26
- speedy_utils/common/utils_misc.py +25 -1
- speedy_utils/multi_worker/thread.py +145 -58
- {speedy_utils-1.1.23.dist-info → speedy_utils-1.1.24.dist-info}/METADATA +1 -1
- {speedy_utils-1.1.23.dist-info → speedy_utils-1.1.24.dist-info}/RECORD +19 -18
- llm_utils/lm/llm_as_a_judge.py +0 -390
- llm_utils/lm/llm_task.py +0 -614
- {speedy_utils-1.1.23.dist-info → speedy_utils-1.1.24.dist-info}/WHEEL +0 -0
- {speedy_utils-1.1.23.dist-info → speedy_utils-1.1.24.dist-info}/entry_points.txt +0 -0
|
@@ -80,8 +80,10 @@
|
|
|
80
80
|
|
|
81
81
|
import ctypes
|
|
82
82
|
import os
|
|
83
|
+
import sys
|
|
83
84
|
import threading
|
|
84
85
|
import time
|
|
86
|
+
import traceback
|
|
85
87
|
from collections.abc import Callable, Iterable, Mapping, Sequence
|
|
86
88
|
from concurrent.futures import FIRST_COMPLETED, Future, ThreadPoolExecutor, wait
|
|
87
89
|
from heapq import heappop, heappush
|
|
@@ -99,12 +101,42 @@ except ImportError: # pragma: no cover
|
|
|
99
101
|
# Sensible defaults
|
|
100
102
|
DEFAULT_WORKERS = (os.cpu_count() or 4) * 2
|
|
101
103
|
|
|
102
|
-
T = TypeVar(
|
|
103
|
-
R = TypeVar(
|
|
104
|
+
T = TypeVar("T")
|
|
105
|
+
R = TypeVar("R")
|
|
104
106
|
|
|
105
107
|
SPEEDY_RUNNING_THREADS: list[threading.Thread] = [] # cooperative shutdown tracking
|
|
106
108
|
_SPEEDY_THREADS_LOCK = threading.Lock()
|
|
107
109
|
|
|
110
|
+
|
|
111
|
+
class UserFunctionError(Exception):
|
|
112
|
+
"""Exception wrapper that highlights user function errors."""
|
|
113
|
+
|
|
114
|
+
def __init__(
|
|
115
|
+
self,
|
|
116
|
+
original_exception: Exception,
|
|
117
|
+
func_name: str,
|
|
118
|
+
input_value: Any,
|
|
119
|
+
user_traceback: list[traceback.FrameSummary],
|
|
120
|
+
) -> None:
|
|
121
|
+
self.original_exception = original_exception
|
|
122
|
+
self.func_name = func_name
|
|
123
|
+
self.input_value = input_value
|
|
124
|
+
self.user_traceback = user_traceback
|
|
125
|
+
|
|
126
|
+
# Create a focused error message
|
|
127
|
+
tb_str = "".join(traceback.format_list(user_traceback))
|
|
128
|
+
msg = (
|
|
129
|
+
f'\nError in function "{func_name}" with input: {input_value!r}\n'
|
|
130
|
+
f"\nUser code traceback:\n{tb_str}"
|
|
131
|
+
f"{type(original_exception).__name__}: {original_exception}"
|
|
132
|
+
)
|
|
133
|
+
super().__init__(msg)
|
|
134
|
+
|
|
135
|
+
def __str__(self) -> str:
|
|
136
|
+
# Return focused error without infrastructure frames
|
|
137
|
+
return super().__str__()
|
|
138
|
+
|
|
139
|
+
|
|
108
140
|
_PY_SET_ASYNC_EXC = ctypes.pythonapi.PyThreadState_SetAsyncExc
|
|
109
141
|
try:
|
|
110
142
|
_PY_SET_ASYNC_EXC.argtypes = (ctypes.c_ulong, ctypes.py_object) # type: ignore[attr-defined]
|
|
@@ -133,7 +165,7 @@ def _track_threads(threads: Iterable[threading.Thread]) -> None:
|
|
|
133
165
|
|
|
134
166
|
|
|
135
167
|
def _track_executor_threads(pool: ThreadPoolExecutor) -> None:
|
|
136
|
-
thread_set = getattr(pool,
|
|
168
|
+
thread_set = getattr(pool, "_threads", None)
|
|
137
169
|
if not thread_set:
|
|
138
170
|
return
|
|
139
171
|
_track_threads(tuple(thread_set))
|
|
@@ -152,7 +184,48 @@ def _worker(
|
|
|
152
184
|
fixed_kwargs: Mapping[str, Any],
|
|
153
185
|
) -> R:
|
|
154
186
|
"""Execute the function with an item and fixed kwargs."""
|
|
155
|
-
|
|
187
|
+
# Validate func is callable before attempting to call it
|
|
188
|
+
if not callable(func):
|
|
189
|
+
func_type = type(func).__name__
|
|
190
|
+
raise TypeError(
|
|
191
|
+
f"\nmulti_thread: func parameter must be callable, "
|
|
192
|
+
f"got {func_type}: {func!r}\n"
|
|
193
|
+
f"Hint: Did you accidentally pass a {func_type} instead of a function?"
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
try:
|
|
197
|
+
return func(item, **fixed_kwargs)
|
|
198
|
+
except Exception as exc:
|
|
199
|
+
# Extract user code traceback (filter out infrastructure)
|
|
200
|
+
exc_tb = sys.exc_info()[2]
|
|
201
|
+
|
|
202
|
+
if exc_tb is not None:
|
|
203
|
+
tb_list = traceback.extract_tb(exc_tb)
|
|
204
|
+
|
|
205
|
+
# Filter to keep only user code frames
|
|
206
|
+
user_frames = []
|
|
207
|
+
skip_patterns = [
|
|
208
|
+
"multi_worker/thread.py",
|
|
209
|
+
"concurrent/futures/",
|
|
210
|
+
"threading.py",
|
|
211
|
+
]
|
|
212
|
+
|
|
213
|
+
for frame in tb_list:
|
|
214
|
+
if not any(pattern in frame.filename for pattern in skip_patterns):
|
|
215
|
+
user_frames.append(frame)
|
|
216
|
+
|
|
217
|
+
# If we have user frames, wrap in our custom exception
|
|
218
|
+
if user_frames:
|
|
219
|
+
func_name = getattr(func, "__name__", repr(func))
|
|
220
|
+
raise UserFunctionError(
|
|
221
|
+
exc,
|
|
222
|
+
func_name,
|
|
223
|
+
item,
|
|
224
|
+
user_frames,
|
|
225
|
+
) from exc
|
|
226
|
+
|
|
227
|
+
# Fallback: re-raise original if we couldn't extract frames
|
|
228
|
+
raise
|
|
156
229
|
|
|
157
230
|
|
|
158
231
|
def _run_batch(
|
|
@@ -164,14 +237,14 @@ def _run_batch(
|
|
|
164
237
|
|
|
165
238
|
|
|
166
239
|
def _attach_metadata(fut: Future[Any], idx: int, logical_size: int) -> None:
|
|
167
|
-
setattr(fut,
|
|
168
|
-
setattr(fut,
|
|
240
|
+
setattr(fut, "_speedy_idx", idx)
|
|
241
|
+
setattr(fut, "_speedy_size", logical_size)
|
|
169
242
|
|
|
170
243
|
|
|
171
244
|
def _future_meta(fut: Future[Any]) -> tuple[int, int]:
|
|
172
245
|
return (
|
|
173
|
-
getattr(fut,
|
|
174
|
-
getattr(fut,
|
|
246
|
+
getattr(fut, "_speedy_idx"),
|
|
247
|
+
getattr(fut, "_speedy_size"),
|
|
175
248
|
)
|
|
176
249
|
|
|
177
250
|
|
|
@@ -219,7 +292,7 @@ def _resolve_worker_count(workers: int | None) -> int:
|
|
|
219
292
|
if workers is None:
|
|
220
293
|
return DEFAULT_WORKERS
|
|
221
294
|
if workers <= 0:
|
|
222
|
-
raise ValueError(
|
|
295
|
+
raise ValueError("workers must be a positive integer")
|
|
223
296
|
return workers
|
|
224
297
|
|
|
225
298
|
|
|
@@ -227,18 +300,18 @@ def _normalize_batch_result(result: Any, logical_size: int) -> list[Any]:
|
|
|
227
300
|
if logical_size == 1:
|
|
228
301
|
return [result]
|
|
229
302
|
if result is None:
|
|
230
|
-
raise ValueError(
|
|
303
|
+
raise ValueError("batched callable returned None for a batch result")
|
|
231
304
|
if isinstance(result, (str, bytes, bytearray)):
|
|
232
|
-
raise TypeError(
|
|
305
|
+
raise TypeError("batched callable must not return str/bytes when batching")
|
|
233
306
|
if isinstance(result, Sequence):
|
|
234
307
|
out = list(result)
|
|
235
308
|
elif isinstance(result, Iterable):
|
|
236
309
|
out = list(result)
|
|
237
310
|
else:
|
|
238
|
-
raise TypeError(
|
|
311
|
+
raise TypeError("batched callable must return an iterable of results")
|
|
239
312
|
if len(out) != logical_size:
|
|
240
313
|
raise ValueError(
|
|
241
|
-
f
|
|
314
|
+
f"batched callable returned {len(out)} items, expected {logical_size}",
|
|
242
315
|
)
|
|
243
316
|
return out
|
|
244
317
|
|
|
@@ -325,7 +398,7 @@ def multi_thread(
|
|
|
325
398
|
results: list[R | None] = []
|
|
326
399
|
|
|
327
400
|
for proc_idx, chunk in enumerate(chunks):
|
|
328
|
-
with tempfile.NamedTemporaryFile(delete=False, suffix=
|
|
401
|
+
with tempfile.NamedTemporaryFile(delete=False, suffix="multi_thread.pkl") as fh:
|
|
329
402
|
file_pkl = fh.name
|
|
330
403
|
assert isinstance(in_process_multi_thread, Callable)
|
|
331
404
|
proc = in_process_multi_thread(
|
|
@@ -347,28 +420,28 @@ def multi_thread(
|
|
|
347
420
|
|
|
348
421
|
for proc, file_pkl in procs:
|
|
349
422
|
proc.join()
|
|
350
|
-
logger.info(
|
|
423
|
+
logger.info("process finished: %s", proc)
|
|
351
424
|
try:
|
|
352
425
|
results.extend(load_by_ext(file_pkl))
|
|
353
426
|
finally:
|
|
354
427
|
try:
|
|
355
428
|
os.unlink(file_pkl)
|
|
356
429
|
except OSError as exc: # pragma: no cover - best effort cleanup
|
|
357
|
-
logger.warning(
|
|
430
|
+
logger.warning("failed to remove temp file %s: %s", file_pkl, exc)
|
|
358
431
|
return results
|
|
359
432
|
|
|
360
433
|
try:
|
|
361
434
|
import pandas as pd
|
|
362
435
|
|
|
363
436
|
if isinstance(inputs, pd.DataFrame):
|
|
364
|
-
inputs = cast(Iterable[T], inputs.to_dict(orient=
|
|
437
|
+
inputs = cast(Iterable[T], inputs.to_dict(orient="records"))
|
|
365
438
|
except ImportError: # pragma: no cover - optional dependency
|
|
366
439
|
pass
|
|
367
440
|
|
|
368
441
|
if batch <= 0:
|
|
369
|
-
raise ValueError(
|
|
442
|
+
raise ValueError("batch must be a positive integer")
|
|
370
443
|
if prefetch_factor <= 0:
|
|
371
|
-
raise ValueError(
|
|
444
|
+
raise ValueError("prefetch_factor must be a positive integer")
|
|
372
445
|
|
|
373
446
|
workers_val = _resolve_worker_count(workers)
|
|
374
447
|
progress_update = max(progress_update, 1)
|
|
@@ -390,20 +463,12 @@ def multi_thread(
|
|
|
390
463
|
|
|
391
464
|
bar = None
|
|
392
465
|
last_bar_update = 0
|
|
393
|
-
if
|
|
394
|
-
progress
|
|
395
|
-
and tqdm is not None
|
|
396
|
-
and logical_total is not None
|
|
397
|
-
and logical_total > 0
|
|
398
|
-
):
|
|
466
|
+
if progress and tqdm is not None and logical_total is not None and logical_total > 0:
|
|
399
467
|
bar = tqdm(
|
|
400
468
|
total=logical_total,
|
|
401
469
|
ncols=128,
|
|
402
|
-
colour=
|
|
403
|
-
bar_format=(
|
|
404
|
-
'{l_bar}{bar}| {n_fmt}/{total_fmt}'
|
|
405
|
-
' [{elapsed}<{remaining}, {rate_fmt}{postfix}]'
|
|
406
|
-
),
|
|
470
|
+
colour="green",
|
|
471
|
+
bar_format=("{l_bar}{bar}| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, {rate_fmt}{postfix}]"),
|
|
407
472
|
)
|
|
408
473
|
|
|
409
474
|
deadline = time.monotonic() + timeout if timeout is not None else None
|
|
@@ -417,11 +482,12 @@ def multi_thread(
|
|
|
417
482
|
inflight: set[Future[Any]] = set()
|
|
418
483
|
pool = ThreadPoolExecutor(
|
|
419
484
|
max_workers=workers_val,
|
|
420
|
-
thread_name_prefix=
|
|
485
|
+
thread_name_prefix="speedy-thread",
|
|
421
486
|
)
|
|
422
|
-
shutdown_kwargs: dict[str, Any] = {
|
|
487
|
+
shutdown_kwargs: dict[str, Any] = {"wait": True}
|
|
423
488
|
|
|
424
489
|
try:
|
|
490
|
+
|
|
425
491
|
def submit_arg(arg: Any) -> None:
|
|
426
492
|
nonlocal next_logical_idx
|
|
427
493
|
if batch > 1:
|
|
@@ -451,7 +517,7 @@ def multi_thread(
|
|
|
451
517
|
if remaining <= 0:
|
|
452
518
|
_cancel_futures(inflight)
|
|
453
519
|
raise TimeoutError(
|
|
454
|
-
f
|
|
520
|
+
f"multi_thread timed out after {timeout} seconds",
|
|
455
521
|
)
|
|
456
522
|
wait_timeout = max(remaining, 0.0)
|
|
457
523
|
|
|
@@ -464,7 +530,7 @@ def multi_thread(
|
|
|
464
530
|
if not done:
|
|
465
531
|
_cancel_futures(inflight)
|
|
466
532
|
raise TimeoutError(
|
|
467
|
-
f
|
|
533
|
+
f"multi_thread timed out after {timeout} seconds",
|
|
468
534
|
)
|
|
469
535
|
|
|
470
536
|
for fut in done:
|
|
@@ -472,11 +538,37 @@ def multi_thread(
|
|
|
472
538
|
idx, logical_size = _future_meta(fut)
|
|
473
539
|
try:
|
|
474
540
|
result = fut.result()
|
|
541
|
+
except UserFunctionError as exc:
|
|
542
|
+
# User function error - already has clean traceback
|
|
543
|
+
logger.error(str(exc))
|
|
544
|
+
|
|
545
|
+
if stop_on_error:
|
|
546
|
+
_cancel_futures(inflight)
|
|
547
|
+
# Create a clean exception without infrastructure frames
|
|
548
|
+
# by re-creating the traceback
|
|
549
|
+
orig_exc = exc.original_exception
|
|
550
|
+
|
|
551
|
+
# Build new traceback from user frames only
|
|
552
|
+
tb_str = "".join(traceback.format_list(exc.user_traceback))
|
|
553
|
+
clean_msg = (
|
|
554
|
+
f'\nError in "{exc.func_name}" '
|
|
555
|
+
f"with input: {exc.input_value!r}\n\n{tb_str}"
|
|
556
|
+
f"{type(orig_exc).__name__}: {orig_exc}"
|
|
557
|
+
)
|
|
558
|
+
|
|
559
|
+
# Raise a new instance of the original exception type
|
|
560
|
+
# with our clean message
|
|
561
|
+
new_exc = type(orig_exc)(clean_msg)
|
|
562
|
+
# Suppress the "from" chain to avoid showing infrastructure
|
|
563
|
+
raise new_exc from None
|
|
564
|
+
|
|
565
|
+
out_items = [None] * logical_size
|
|
475
566
|
except Exception as exc:
|
|
567
|
+
# Other errors (infrastructure, batching, etc.)
|
|
476
568
|
if stop_on_error:
|
|
477
569
|
_cancel_futures(inflight)
|
|
478
570
|
raise
|
|
479
|
-
logger.exception(
|
|
571
|
+
logger.exception("multi_thread task failed", exc_info=exc)
|
|
480
572
|
out_items = [None] * logical_size
|
|
481
573
|
else:
|
|
482
574
|
try:
|
|
@@ -484,7 +576,7 @@ def multi_thread(
|
|
|
484
576
|
except Exception as exc:
|
|
485
577
|
_cancel_futures(inflight)
|
|
486
578
|
raise RuntimeError(
|
|
487
|
-
|
|
579
|
+
"batched callable returned an unexpected shape",
|
|
488
580
|
) from exc
|
|
489
581
|
|
|
490
582
|
collector.add(idx, out_items)
|
|
@@ -496,14 +588,10 @@ def multi_thread(
|
|
|
496
588
|
bar.update(delta)
|
|
497
589
|
last_bar_update = completed_items
|
|
498
590
|
submitted = next_logical_idx
|
|
499
|
-
pending = (
|
|
500
|
-
max(logical_total - submitted, 0)
|
|
501
|
-
if logical_total is not None
|
|
502
|
-
else '-'
|
|
503
|
-
)
|
|
591
|
+
pending = max(logical_total - submitted, 0) if logical_total is not None else "-"
|
|
504
592
|
postfix = {
|
|
505
|
-
|
|
506
|
-
|
|
593
|
+
"processing": min(len(inflight), workers_val),
|
|
594
|
+
"pending": pending,
|
|
507
595
|
}
|
|
508
596
|
bar.set_postfix(postfix)
|
|
509
597
|
|
|
@@ -516,7 +604,7 @@ def multi_thread(
|
|
|
516
604
|
results = collector.finalize()
|
|
517
605
|
|
|
518
606
|
except KeyboardInterrupt:
|
|
519
|
-
shutdown_kwargs = {
|
|
607
|
+
shutdown_kwargs = {"wait": False, "cancel_futures": True}
|
|
520
608
|
_cancel_futures(inflight)
|
|
521
609
|
kill_all_thread(SystemExit)
|
|
522
610
|
raise KeyboardInterrupt() from None
|
|
@@ -524,29 +612,27 @@ def multi_thread(
|
|
|
524
612
|
try:
|
|
525
613
|
pool.shutdown(**shutdown_kwargs)
|
|
526
614
|
except TypeError: # pragma: no cover - Python <3.9 fallback
|
|
527
|
-
pool.shutdown(shutdown_kwargs.get(
|
|
615
|
+
pool.shutdown(shutdown_kwargs.get("wait", True))
|
|
528
616
|
if bar:
|
|
529
617
|
delta = completed_items - last_bar_update
|
|
530
618
|
if delta > 0:
|
|
531
619
|
bar.update(delta)
|
|
532
620
|
bar.close()
|
|
533
621
|
|
|
534
|
-
results = collector.finalize() if
|
|
622
|
+
results = collector.finalize() if "results" not in locals() else results
|
|
535
623
|
if store_output_pkl_file:
|
|
536
624
|
dump_json_or_pickle(results, store_output_pkl_file)
|
|
537
625
|
_prune_dead_threads()
|
|
538
626
|
return results
|
|
539
627
|
|
|
540
628
|
|
|
541
|
-
def multi_thread_standard(
|
|
542
|
-
fn: Callable[[T], R], items: Iterable[T], workers: int = 4
|
|
543
|
-
) -> list[R]:
|
|
629
|
+
def multi_thread_standard(fn: Callable[[T], R], items: Iterable[T], workers: int = 4) -> list[R]:
|
|
544
630
|
"""Execute ``fn`` across ``items`` while preserving submission order."""
|
|
545
631
|
|
|
546
632
|
workers_val = _resolve_worker_count(workers)
|
|
547
633
|
with ThreadPoolExecutor(
|
|
548
634
|
max_workers=workers_val,
|
|
549
|
-
thread_name_prefix=
|
|
635
|
+
thread_name_prefix="speedy-thread",
|
|
550
636
|
) as executor:
|
|
551
637
|
futures: list[Future[R]] = []
|
|
552
638
|
for item in items:
|
|
@@ -561,13 +647,13 @@ def _async_raise(thread_id: int, exc_type: type[BaseException]) -> bool:
|
|
|
561
647
|
if thread_id <= 0:
|
|
562
648
|
return False
|
|
563
649
|
if not issubclass(exc_type, BaseException):
|
|
564
|
-
raise TypeError(
|
|
650
|
+
raise TypeError("exc_type must derive from BaseException")
|
|
565
651
|
res = _PY_SET_ASYNC_EXC(ctypes.c_ulong(thread_id), ctypes.py_object(exc_type))
|
|
566
652
|
if res == 0:
|
|
567
653
|
return False
|
|
568
654
|
if res > 1: # pragma: no cover - defensive branch
|
|
569
655
|
_PY_SET_ASYNC_EXC(ctypes.c_ulong(thread_id), None)
|
|
570
|
-
raise SystemError(
|
|
656
|
+
raise SystemError("PyThreadState_SetAsyncExc failed")
|
|
571
657
|
return True
|
|
572
658
|
|
|
573
659
|
|
|
@@ -596,16 +682,17 @@ def kill_all_thread(exc_type: type[BaseException] = SystemExit, join_timeout: fl
|
|
|
596
682
|
terminated += 1
|
|
597
683
|
thread.join(timeout=join_timeout)
|
|
598
684
|
else:
|
|
599
|
-
logger.warning(
|
|
685
|
+
logger.warning("Unable to signal thread %s", thread.name)
|
|
600
686
|
except Exception as exc: # pragma: no cover - defensive
|
|
601
|
-
logger.error(
|
|
687
|
+
logger.error("Failed to stop thread %s: %s", thread.name, exc)
|
|
602
688
|
_prune_dead_threads()
|
|
603
689
|
return terminated
|
|
604
690
|
|
|
605
691
|
|
|
606
692
|
__all__ = [
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
693
|
+
"SPEEDY_RUNNING_THREADS",
|
|
694
|
+
"UserFunctionError",
|
|
695
|
+
"multi_thread",
|
|
696
|
+
"multi_thread_standard",
|
|
697
|
+
"kill_all_thread",
|
|
611
698
|
]
|
|
@@ -1,17 +1,18 @@
|
|
|
1
|
-
llm_utils/__init__.py,sha256=
|
|
1
|
+
llm_utils/__init__.py,sha256=pbnOQddU5KnhP8uqMqN9E87BeDeCxFrgta2m2P89LmM,1591
|
|
2
2
|
llm_utils/group_messages.py,sha256=Oe2tlhg-zRodG1-hodYebddrR77j9UdE05LzJw0EvYI,3622
|
|
3
|
-
llm_utils/chat_format/__init__.py,sha256=
|
|
4
|
-
llm_utils/chat_format/display.py,sha256=
|
|
3
|
+
llm_utils/chat_format/__init__.py,sha256=MCNT8o-BZWmoOFE5VLyhJJOqHg8lJGqHXEKSXU08fK0,775
|
|
4
|
+
llm_utils/chat_format/display.py,sha256=HiAOAC8FY7956gNuwE7rxii1MCCebn0avbXi1iIcDSc,17178
|
|
5
5
|
llm_utils/chat_format/transform.py,sha256=eU0c3PdAHCNLuGP1UqPwln0B34Lv3bt_uV9v9BrlCN4,5402
|
|
6
6
|
llm_utils/chat_format/utils.py,sha256=xTxN4HrLHcRO2PfCTR43nH1M5zCa7v0kTTdzAcGkZg0,1229
|
|
7
|
-
llm_utils/lm/__init__.py,sha256=
|
|
7
|
+
llm_utils/lm/__init__.py,sha256=FBe8wVNWDMpvJ2kQYedJ3HH5L2BCAZBQVE0zEjND0Vo,729
|
|
8
8
|
llm_utils/lm/base_prompt_builder.py,sha256=OLqyxbA8QeYIVFzB9EqxUiE_P2p4_MD_Lq4WSwxFtKU,12136
|
|
9
|
-
llm_utils/lm/
|
|
10
|
-
llm_utils/lm/
|
|
9
|
+
llm_utils/lm/llm.py,sha256=uk45JhVcWDMaqezn9Yn_K5hehFSmQ4txU901fn_PcQg,16262
|
|
10
|
+
llm_utils/lm/llm_signature.py,sha256=SP72cWXaVGcZs3m2V361DcLk_St7aYJamNapUiFBB6Q,1242
|
|
11
11
|
llm_utils/lm/lm_base.py,sha256=pqbHZOdR7yUMpvwt8uBG1dZnt76SY_Wk8BkXQQ-mpWs,9557
|
|
12
|
-
llm_utils/lm/
|
|
13
|
-
llm_utils/lm/
|
|
14
|
-
llm_utils/lm/
|
|
12
|
+
llm_utils/lm/mixins.py,sha256=Sn5KyPKGCT_HVJmmosmy3XSlZ0_k5Kds0VvSJqeUDpI,13695
|
|
13
|
+
llm_utils/lm/openai_memoize.py,sha256=PDs3YCXKgHXaHlegkhouzPtf2Gom_o7pvzChCT-NQyQ,3870
|
|
14
|
+
llm_utils/lm/signature.py,sha256=16QOHnGc-p7H8rR3j1dPg8AokdV_rEGUYCGGkIHIghE,10240
|
|
15
|
+
llm_utils/lm/utils.py,sha256=oiJ50b8WV6oktnW4BByr1gRaGc55VJeF3IyhHqoofp4,12193
|
|
15
16
|
llm_utils/lm/async_lm/__init__.py,sha256=PUBbCuf5u6-0GBUu-2PI6YAguzsyXj-LPkU6vccqT6E,121
|
|
16
17
|
llm_utils/lm/async_lm/_utils.py,sha256=P1-pUDf_0pDmo8WTIi43t5ARlyGA1RIJfpAhz-gfA5g,6105
|
|
17
18
|
llm_utils/lm/async_lm/async_llm_task.py,sha256=-BVOk18ZD8eC2obTLgiPq39f2PP3cji17Ku-Gb7c7Xo,18683
|
|
@@ -26,8 +27,8 @@ llm_utils/vector_cache/cli.py,sha256=DMXTj8nZ2_LRjprbYPb4uzq04qZtOfBbmblmaqDcCuM
|
|
|
26
27
|
llm_utils/vector_cache/core.py,sha256=J8ocRX9sBfzboQkf5vFF2cx0SK-nftmKWJUa91WUBy8,31134
|
|
27
28
|
llm_utils/vector_cache/types.py,sha256=ru8qmUZ8_lNd3_oYpjCMtpXTsqmwsSBe56Z4hTWm3xI,435
|
|
28
29
|
llm_utils/vector_cache/utils.py,sha256=dwbbXlRrARrpmS4YqSlYQqrTURg0UWe8XvaAWcX05MM,1458
|
|
29
|
-
speedy_utils/__init__.py,sha256=
|
|
30
|
-
speedy_utils/all.py,sha256=
|
|
30
|
+
speedy_utils/__init__.py,sha256=LFV2La5TaYkXbe5xVQ9xiDqsjcjcF0QUyg58jI8UMfI,6081
|
|
31
|
+
speedy_utils/all.py,sha256=gXXRlBLvU8AON7XqO6iFQ8LCIQEIcP_2CDumd_U1ppI,5171
|
|
31
32
|
speedy_utils/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
32
33
|
speedy_utils/common/clock.py,sha256=3n4FkCW0dz46O8By09V5Pve1DSMgpLDRbWEVRryryeQ,7423
|
|
33
34
|
speedy_utils/common/function_decorator.py,sha256=BspJ0YuGL6elS7lWBAgELZ-sCfED_1N2P5fgH-fCRUQ,2132
|
|
@@ -36,16 +37,16 @@ speedy_utils/common/notebook_utils.py,sha256=-97kehJ_Gg3TzDLubsLIYJcykqX1NXhbvBO
|
|
|
36
37
|
speedy_utils/common/patcher.py,sha256=VCmdxyTF87qroggQkQklRPhAOPJbeBqhcJoTsLcDxNw,2303
|
|
37
38
|
speedy_utils/common/report_manager.py,sha256=eBiw5KY6bWUhwki3B4lK5o8bFsp7L5x28X9GCI-Sd1w,3899
|
|
38
39
|
speedy_utils/common/utils_cache.py,sha256=NCwILnhsK86sDPkkriDTCyuM-qUKFxYOo1Piww1ED0g,22381
|
|
39
|
-
speedy_utils/common/utils_io.py,sha256
|
|
40
|
-
speedy_utils/common/utils_misc.py,sha256=
|
|
40
|
+
speedy_utils/common/utils_io.py,sha256=E7mbxB_OpLvNWoFM2Qpxi1jaD8VwF-tvNOpGbf7swuU,14849
|
|
41
|
+
speedy_utils/common/utils_misc.py,sha256=yYlyP0eXQuapY1dn5O8-UDePPq5bb6FxKFjb1kfZy5o,2354
|
|
41
42
|
speedy_utils/common/utils_print.py,sha256=syRrnSFtguxrV-elx6DDVcSGu4Qy7D_xVNZhPwbUY4A,4864
|
|
42
43
|
speedy_utils/multi_worker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
43
44
|
speedy_utils/multi_worker/process.py,sha256=RGGGnbZXCbEbdmxFVmnNfyccClAlflzRPE0d1C3CeeE,11385
|
|
44
|
-
speedy_utils/multi_worker/thread.py,sha256=
|
|
45
|
+
speedy_utils/multi_worker/thread.py,sha256=bRjxUHkBjbXHQ2KSsf-Zao28zbSId-8mqMFHwSG1l1s,25206
|
|
45
46
|
speedy_utils/scripts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
46
47
|
speedy_utils/scripts/mpython.py,sha256=IvywP7Y0_V6tWfMP-4MjPvN5_KfxWF21xaLJsCIayCk,3821
|
|
47
48
|
speedy_utils/scripts/openapi_client_codegen.py,sha256=f2125S_q0PILgH5dyzoKRz7pIvNEjCkzpi4Q4pPFRZE,9683
|
|
48
|
-
speedy_utils-1.1.
|
|
49
|
-
speedy_utils-1.1.
|
|
50
|
-
speedy_utils-1.1.
|
|
51
|
-
speedy_utils-1.1.
|
|
49
|
+
speedy_utils-1.1.24.dist-info/METADATA,sha256=3KOAmdRLEkW8wXkjAIZwvurxWeTKGcNyRg-oHhDRpBA,8028
|
|
50
|
+
speedy_utils-1.1.24.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
51
|
+
speedy_utils-1.1.24.dist-info/entry_points.txt,sha256=1rrFMfqvaMUE9hvwGiD6vnVh98kmgy0TARBj-v0Lfhs,244
|
|
52
|
+
speedy_utils-1.1.24.dist-info/RECORD,,
|