speedy-utils 1.1.43__py3-none-any.whl → 1.1.45__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
llm_utils/lm/llm.py CHANGED
@@ -16,6 +16,7 @@ from openai.types.chat import ChatCompletionMessageParam
16
16
  from pydantic import BaseModel
17
17
 
18
18
  from speedy_utils.common.utils_io import jdumps
19
+ from speedy_utils import clean_traceback
19
20
 
20
21
  from .base_prompt_builder import BasePromptBuilder
21
22
  from .mixins import (
@@ -159,6 +160,7 @@ class LLM(
159
160
  messages.append({'role': 'user', 'content': user_content})
160
161
  return cast(Messages, messages)
161
162
 
163
+ @clean_traceback
162
164
  def text_completion(
163
165
  self, input_data: str | BaseModel | list[dict], **runtime_kwargs
164
166
  ) -> list[dict[str, Any]]:
@@ -214,6 +216,7 @@ class LLM(
214
216
  results.append(result_dict)
215
217
  return results
216
218
 
219
+ @clean_traceback
217
220
  def pydantic_parse(
218
221
  self,
219
222
  input_data: str | BaseModel | list[dict],
speedy_utils/__init__.py CHANGED
@@ -58,6 +58,9 @@ from .common.utils_print import (
58
58
  fprint,
59
59
  )
60
60
 
61
+ # Error handling utilities
62
+ from .common.utils_error import clean_traceback, handle_exceptions_with_clean_traceback
63
+
61
64
  # Multi-worker processing
62
65
  from .multi_worker.process import multi_process
63
66
  from .multi_worker.thread import kill_all_thread, multi_thread
@@ -156,6 +159,9 @@ __all__ = [
156
159
  'print_table',
157
160
  'setup_logger',
158
161
  'log',
162
+ # Error handling utilities
163
+ 'clean_traceback',
164
+ 'handle_exceptions_with_clean_traceback',
159
165
  # Multi-worker processing
160
166
  'multi_process',
161
167
  'multi_thread',
@@ -0,0 +1,261 @@
1
+ """
2
+ Error handling utilities for clean, user-focused tracebacks.
3
+ """
4
+
5
+ import inspect
6
+ import linecache
7
+ import sys
8
+ import traceback
9
+ from typing import Any, Callable, TypeVar
10
+
11
+ try:
12
+ from rich.console import Console
13
+ from rich.panel import Panel
14
+ from rich.text import Text
15
+ except ImportError:
16
+ Console = None
17
+ Panel = None
18
+ Text = None
19
+
20
+ F = TypeVar('F', bound=Callable[..., Any])
21
+
22
+
23
+ class CleanTracebackError(Exception):
24
+ """Exception wrapper that provides clean, user-focused tracebacks."""
25
+
26
+ def __init__(
27
+ self,
28
+ original_exception: Exception,
29
+ user_traceback: list[traceback.FrameSummary],
30
+ caller_frame: traceback.FrameSummary | None = None,
31
+ func_name: str | None = None,
32
+ ) -> None:
33
+ self.original_exception = original_exception
34
+ self.user_traceback = user_traceback
35
+ self.caller_frame = caller_frame
36
+ self.func_name = func_name
37
+
38
+ # Create a focused error message
39
+ tb_str = ''.join(traceback.format_list(user_traceback))
40
+ func_part = f' in {func_name}' if func_name else ''
41
+ msg = (
42
+ f'Error{func_part}:\n'
43
+ f'\nUser code traceback:\n{tb_str}'
44
+ f'{type(original_exception).__name__}: {original_exception}'
45
+ )
46
+ super().__init__(msg)
47
+
48
+ def __str__(self) -> str:
49
+ return super().__str__()
50
+
51
+ def format_rich(self) -> None:
52
+ """Format and print error with rich panels and code context."""
53
+ if Console is None or Panel is None or Text is None:
54
+ print(str(self), file=sys.stderr)
55
+ return
56
+
57
+ console = Console(stderr=True, force_terminal=True)
58
+
59
+ # Build traceback display with code context
60
+ tb_parts: list[str] = []
61
+
62
+ # Show caller frame first if available
63
+ if self.caller_frame and self.caller_frame.lineno is not None:
64
+ tb_parts.append(
65
+ f'[cyan]{self.caller_frame.filename}[/cyan]:[yellow]{self.caller_frame.lineno}[/yellow] '
66
+ f'in [green]{self.caller_frame.name}[/green]'
67
+ )
68
+ tb_parts.append('')
69
+ context = _get_code_context_rich(self.caller_frame.filename, self.caller_frame.lineno, 3)
70
+ tb_parts.extend(context)
71
+ tb_parts.append('')
72
+
73
+ # Show user code frames with context
74
+ for frame in self.user_traceback:
75
+ if frame.lineno is not None:
76
+ func_name = f' {self.func_name}' if self.func_name else ''
77
+ tb_parts.append(
78
+ f'[cyan]{frame.filename}[/cyan]:[yellow]{frame.lineno}[/yellow] '
79
+ f'in [green]{frame.name}{func_name}[/green]'
80
+ )
81
+ tb_parts.append('')
82
+ context = _get_code_context_rich(frame.filename, frame.lineno, 3)
83
+ tb_parts.extend(context)
84
+ tb_parts.append('')
85
+
86
+ # Print with rich Panel
87
+ console.print()
88
+ console.print(
89
+ Panel(
90
+ '\n'.join(tb_parts),
91
+ title='[bold red]Traceback (most recent call last)[/bold red]',
92
+ border_style='red',
93
+ expand=False,
94
+ )
95
+ )
96
+ console.print(
97
+ f'[bold red]{type(self.original_exception).__name__}[/bold red]: '
98
+ f'{self.original_exception}'
99
+ )
100
+ console.print()
101
+
102
+
103
+ def _get_code_context(filename: str, lineno: int, context_lines: int = 3) -> list[str]:
104
+ """Get code context around a line with line numbers and highlighting."""
105
+ lines: list[str] = []
106
+ start = max(1, lineno - context_lines)
107
+ end = lineno + context_lines
108
+
109
+ for i in range(start, end + 1):
110
+ line = linecache.getline(filename, i)
111
+ if not line:
112
+ continue
113
+ line = line.rstrip()
114
+ marker = '❱' if i == lineno else ' '
115
+ lines.append(f' {i:4d} {marker} {line}')
116
+
117
+ return lines
118
+
119
+
120
+ def _get_code_context_rich(filename: str, lineno: int, context_lines: int = 3) -> list[str]:
121
+ """Get code context with rich formatting (colors)."""
122
+ lines: list[str] = []
123
+ start = max(1, lineno - context_lines)
124
+ end = lineno + context_lines
125
+
126
+ for i in range(start, end + 1):
127
+ line = linecache.getline(filename, i)
128
+ if not line:
129
+ continue
130
+ line = line.rstrip()
131
+ num_str = f'{i:4d}'
132
+
133
+ if i == lineno:
134
+ # Highlight error line
135
+ lines.append(f'[dim]{num_str}[/dim] [red]❱[/red] {line}')
136
+ else:
137
+ # Normal context line
138
+ lines.append(f'[dim]{num_str} │[/dim] {line}')
139
+
140
+ return lines
141
+
142
+
143
+ def _filter_traceback_frames(tb_list: list[traceback.FrameSummary]) -> list[traceback.FrameSummary]:
144
+ """Filter traceback frames to show only user code."""
145
+ user_frames = []
146
+ skip_patterns = [
147
+ 'site-packages/',
148
+ 'dist-packages/',
149
+ 'python3.',
150
+ 'lib/python',
151
+ 'concurrent/futures/',
152
+ 'threading.py',
153
+ 'multiprocessing/',
154
+ 'urllib/',
155
+ 'httpx/',
156
+ 'httpcore/',
157
+ 'openai/',
158
+ 'requests/',
159
+ 'aiohttp/',
160
+ 'urllib3/',
161
+ 'speedy_utils/common/',
162
+ 'speedy_utils/multi_worker/',
163
+ 'llm_utils/lm/',
164
+ 'llm_utils/chat_format/',
165
+ 'llm_utils/vector_cache/',
166
+ ]
167
+
168
+ skip_functions = [
169
+ 'wrapper', # Our decorator wrapper
170
+ '__call__', # Internal calls
171
+ '__inner_call__', # Internal calls
172
+ '_worker', # Multi-thread worker
173
+ '_run_batch', # Batch runner
174
+ ]
175
+
176
+ for frame in tb_list:
177
+ # Skip frames matching skip patterns
178
+ if any(pattern in frame.filename for pattern in skip_patterns):
179
+ continue
180
+ # Skip specific function names
181
+ if frame.name in skip_functions:
182
+ continue
183
+ # Skip frames that are too deep in the call stack (internal implementation)
184
+ if 'speedy_utils' in frame.filename and any(
185
+ name in frame.name for name in ['__inner_call__', '_worker', '_run_batch']
186
+ ):
187
+ continue
188
+ user_frames.append(frame)
189
+
190
+ # If no user frames found, keep frames that are likely user code (not deep library internals)
191
+ if not user_frames:
192
+ for frame in reversed(tb_list):
193
+ # Keep if it's not in site-packages and not in standard library
194
+ if ('site-packages/' not in frame.filename and
195
+ 'dist-packages/' not in frame.filename and
196
+ not frame.filename.startswith('/usr/') and
197
+ not frame.filename.startswith('/opt/') and
198
+ 'python3.' not in frame.filename and
199
+ frame.name not in skip_functions):
200
+ user_frames.append(frame)
201
+ if len(user_frames) >= 5: # Keep up to 5 frames
202
+ break
203
+ user_frames.reverse()
204
+
205
+ return user_frames
206
+
207
+
208
+ def clean_traceback(func: F) -> F:
209
+ """Decorator to wrap function calls with clean traceback handling."""
210
+ def wrapper(*args, **kwargs):
211
+ try:
212
+ return func(*args, **kwargs)
213
+ except Exception as exc:
214
+ # Get the current traceback
215
+ exc_tb = sys.exc_info()[2]
216
+
217
+ if exc_tb is not None:
218
+ tb_list = traceback.extract_tb(exc_tb)
219
+
220
+ # Filter to keep only user code frames
221
+ user_frames = _filter_traceback_frames(tb_list)
222
+
223
+ # Get caller frame - walk up the stack to find the original caller
224
+ caller_context = None
225
+ frame = inspect.currentframe()
226
+ while frame:
227
+ frame = frame.f_back
228
+ if frame and frame.f_code.co_name not in ['wrapper', '__call__', '__inner_call__']:
229
+ caller_info = inspect.getframeinfo(frame)
230
+ if not any(skip in caller_info.filename for skip in [
231
+ 'speedy_utils/common/', 'speedy_utils/multi_worker/',
232
+ 'llm_utils/lm/', 'site-packages/', 'dist-packages/'
233
+ ]):
234
+ caller_context = traceback.FrameSummary(
235
+ caller_info.filename,
236
+ caller_info.lineno,
237
+ caller_info.function,
238
+ )
239
+ break
240
+
241
+ # If we have user frames, create and format our custom exception
242
+ if user_frames:
243
+ func_name = getattr(func, '__name__', repr(func))
244
+ clean_error = CleanTracebackError(
245
+ exc,
246
+ user_frames,
247
+ caller_context,
248
+ func_name,
249
+ )
250
+ clean_error.format_rich()
251
+ sys.exit(1) # Exit after formatting
252
+
253
+ # Fallback: re-raise original if we couldn't extract frames
254
+ raise
255
+
256
+ return wrapper # type: ignore
257
+
258
+
259
+ def handle_exceptions_with_clean_traceback(func: F) -> F:
260
+ """Alias for clean_traceback decorator."""
261
+ return clean_traceback(func)
@@ -115,32 +115,39 @@ class ErrorStats:
115
115
  func_name: str,
116
116
  ) -> str:
117
117
  """Write error details to a log file."""
118
+ from io import StringIO
119
+ from rich.console import Console
120
+
118
121
  log_path = self._error_log_dir / f'{idx}.log'
119
122
 
123
+ output = StringIO()
124
+ console = Console(file=output, width=120, no_color=False)
125
+
120
126
  # Format traceback
121
127
  tb_lines = self._format_traceback(error)
122
128
 
123
- content = []
124
- content.append(f'{"=" * 60}')
125
- content.append(f'Error at index: {idx}')
126
- content.append(f'Function: {func_name}')
127
- content.append(f'Error Type: {type(error).__name__}')
128
- content.append(f'Error Message: {error}')
129
- content.append(f'{"=" * 60}')
130
- content.append('')
131
- content.append('Input:')
132
- content.append('-' * 40)
129
+ console.print(f'{"=" * 60}')
130
+ console.print(f'Error at index: {idx}')
131
+ console.print(f'Function: {func_name}')
132
+ console.print(f'Error Type: {type(error).__name__}')
133
+ console.print(f'Error Message: {error}')
134
+ console.print(f'{"=" * 60}')
135
+ console.print('')
136
+ console.print('Input:')
137
+ console.print('-' * 40)
133
138
  try:
134
- content.append(repr(input_value))
139
+ import json
140
+ console.print(json.dumps(input_value, indent=2))
135
141
  except Exception:
136
- content.append('<unable to repr input>')
137
- content.append('')
138
- content.append('Traceback:')
139
- content.append('-' * 40)
140
- content.extend(tb_lines)
142
+ console.print(repr(input_value))
143
+ console.print('')
144
+ console.print('Traceback:')
145
+ console.print('-' * 40)
146
+ for line in tb_lines:
147
+ console.print(line)
141
148
 
142
149
  with open(log_path, 'w') as f:
143
- f.write('\n'.join(content))
150
+ f.write(output.getvalue())
144
151
 
145
152
  return str(log_path)
146
153
 
@@ -824,7 +831,7 @@ def multi_process(
824
831
  log_worker: Literal['zero', 'first', 'all'] = 'first',
825
832
  total_items: int | None = None,
826
833
  poll_interval: float = 0.3,
827
- error_handler: ErrorHandlerType = 'raise',
834
+ error_handler: ErrorHandlerType = 'log',
828
835
  max_error_files: int = 100,
829
836
  **func_kwargs: Any,
830
837
  ) -> list[Any]:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: speedy-utils
3
- Version: 1.1.43
3
+ Version: 1.1.45
4
4
  Summary: Fast and easy-to-use package for data science
5
5
  Project-URL: Homepage, https://github.com/anhvth/speedy
6
6
  Project-URL: Repository, https://github.com/anhvth/speedy
@@ -58,13 +58,49 @@ Description-Content-Type: text/markdown
58
58
 
59
59
  **Speedy Utils** is a Python utility library designed to streamline common programming tasks such as caching, parallel processing, file I/O, and data manipulation. It provides a collection of decorators, functions, and classes to enhance productivity and performance in your Python projects.
60
60
 
61
+ ## 🚀 Recent Updates (January 27, 2026)
62
+
63
+ **Enhanced Error Handling in Parallel Processing:**
64
+
65
+ - Rich-formatted error tracebacks with code context and syntax highlighting
66
+ - Three error handling modes: 'raise', 'ignore', and 'log'
67
+ - Filtered tracebacks focusing on user code (hiding infrastructure)
68
+ - Real-time progress reporting with error/success statistics
69
+ - Automatic error logging to timestamped files
70
+ - Caller frame information showing where parallel functions were invoked
71
+
72
+ ## Quick Start
73
+
74
+ ### Parallel Processing with Error Handling
75
+
76
+ ```python
77
+ from speedy_utils import multi_thread, multi_process
78
+
79
+ # Simple parallel processing
80
+ results = multi_thread(lambda x: x * 2, [1, 2, 3, 4, 5])
81
+ # Results: [2, 4, 6, 8, 10]
82
+
83
+ # Robust processing with error handling
84
+ def process_item(item):
85
+ if item == 3:
86
+ raise ValueError(f"Cannot process item {item}")
87
+ return item * 2
88
+
89
+ # Continue processing despite errors
90
+ results = multi_thread(process_item, [1, 2, 3, 4, 5], error_handler='log')
91
+ # Results: [2, 4, None, 8, 10] - errors logged automatically
92
+ ```
93
+
61
94
  ## Table of Contents
62
95
 
96
+ - [🚀 Recent Updates](#-recent-updates-january-27-2026)
97
+ - [Quick Start](#quick-start)
63
98
  - [Features](#features)
64
99
  - [Installation](#installation)
65
100
  - [Usage](#usage)
66
- - [Caching](#caching)
67
101
  - [Parallel Processing](#parallel-processing)
102
+ - [Enhanced Error Handling](#enhanced-error-handling)
103
+ - [Caching](#caching)
68
104
  - [File I/O](#file-io)
69
105
  - [Data Manipulation](#data-manipulation)
70
106
  - [Utility Functions](#utility-functions)
@@ -73,11 +109,12 @@ Description-Content-Type: text/markdown
73
109
  ## Features
74
110
 
75
111
  - **Caching Mechanisms**: Disk-based and in-memory caching to optimize function calls.
76
- - **Parallel Processing**: Multi-threading, multi-processing, and asynchronous multi-threading utilities.
112
+ - **Parallel Processing**: Multi-threading, multi-processing, and asynchronous multi-threading utilities with enhanced error handling.
77
113
  - **File I/O**: Simplified JSON, JSONL, and pickle file handling with support for various file extensions.
78
114
  - **Data Manipulation**: Utilities for flattening lists and dictionaries, converting data types, and more.
79
115
  - **Timing Utilities**: Tools to measure and log execution time of functions and processes.
80
116
  - **Pretty Printing**: Enhanced printing functions for structured data, including HTML tables for Jupyter notebooks.
117
+ - **Enhanced Error Handling**: Rich error tracebacks with code context, configurable error handling modes ('raise', 'ignore', 'log'), and detailed progress reporting.
81
118
 
82
119
  ## Installation
83
120
 
@@ -162,20 +199,133 @@ result = compute_sum(5, 7) # Retrieved from in-memory cache
162
199
 
163
200
  ### Parallel Processing
164
201
 
165
- #### Multi-threading
202
+ #### Multi-threading with Enhanced Error Handling
166
203
 
167
- Execute functions concurrently using multiple threads. This approach is straightforward and automatically handles both notebook and Python script executions. In a notebook environment, it delegates the running thread to a separate process. If interrupted, it immediately stops this process, avoiding thread dependency issues where threads continue running until all tasks are completed.
204
+ Execute functions concurrently using multiple threads with comprehensive error handling. The enhanced error handling provides three modes: 'raise' (default), 'ignore', and 'log'. When errors occur, you'll see rich-formatted tracebacks with code context and caller information.
168
205
 
169
206
  ```python
170
207
  from speedy_utils import multi_thread
171
208
 
172
209
  def process_item(item):
173
- # Your processing logic
210
+ # Simulate processing that might fail
211
+ if item == 3:
212
+ raise ValueError(f"Invalid item: {item}")
174
213
  return item * 2
175
214
 
176
215
  items = [1, 2, 3, 4, 5]
177
- results = multi_thread(process_item, items, workers=3)
178
- print(results) # [2, 4, 6, 8, 10]
216
+
217
+ # Default behavior: raise on first error with rich traceback
218
+ try:
219
+ results = multi_thread(process_item, items, workers=3)
220
+ except SystemExit:
221
+ print("Error occurred and was displayed with rich formatting")
222
+
223
+ # Continue processing on errors, return None for failed items
224
+ results = multi_thread(process_item, items, workers=3, error_handler='ignore')
225
+ print(results) # [2, 4, None, 8, 10]
226
+
227
+ # Log errors to files and continue processing
228
+ results = multi_thread(process_item, items, workers=3, error_handler='log', max_error_files=10)
229
+ print(results) # [2, 4, None, 8, 10] - errors logged to .cache/speedy_utils/error_logs/
230
+ ```
231
+
232
+ #### Multi-processing with Error Handling
233
+
234
+ Process items across multiple processes with the same enhanced error handling capabilities.
235
+
236
+ ```python
237
+ from speedy_utils import multi_process
238
+
239
+ def risky_computation(x):
240
+ """Computation that might fail for certain inputs."""
241
+ if x % 5 == 0:
242
+ raise RuntimeError(f"Cannot process multiples of 5: {x}")
243
+ return x ** 2
244
+
245
+ data = list(range(12))
246
+
247
+ # Process with error logging (continues on errors)
248
+ results = multi_process(
249
+ risky_computation,
250
+ data,
251
+ backend='mp',
252
+ error_handler='log',
253
+ max_error_files=5
254
+ )
255
+ print(results) # [0, 1, 4, 9, 16, None, 36, 49, 64, 81, None, 121]
256
+ ```
257
+
258
+ ### Enhanced Error Handling
259
+
260
+ **Speedy Utils** now provides comprehensive error handling for parallel processing with rich formatting and detailed diagnostics.
261
+
262
+ #### Rich Error Tracebacks
263
+
264
+ When errors occur, you'll see beautifully formatted tracebacks with:
265
+
266
+ - **Code context**: Lines of code around the error location
267
+ - **Caller information**: Shows where the parallel function was invoked
268
+ - **Filtered frames**: Focuses on user code, hiding infrastructure details
269
+ - **Color coding**: Easy-to-read formatting with syntax highlighting
270
+
271
+ #### Error Handling Modes
272
+
273
+ Choose how to handle errors in parallel processing:
274
+
275
+ - **`'raise'` (default)**: Stop on first error with detailed traceback
276
+ - **`'ignore'`**: Continue processing, return `None` for failed items
277
+ - **`'log'`**: Log errors to files and continue processing
278
+
279
+ #### Error Logging
280
+
281
+ When using `error_handler='log'`, errors are automatically saved to timestamped files in `.cache/speedy_utils/error_logs/` with full context and stack traces.
282
+
283
+ #### Progress Reporting with Error Statistics
284
+
285
+ Progress bars now show real-time error and success counts:
286
+
287
+ ```bash
288
+ Multi-thread [8/10] [00:02<00:00, 3.45it/s, success=8, errors=2, pending=0]
289
+ ```
290
+
291
+ This makes it easy to monitor processing health at a glance.
292
+
293
+ #### Example: Robust Data Processing
294
+
295
+ ```python
296
+ from speedy_utils import multi_thread
297
+
298
+ def process_data_record(record):
299
+ """Process a data record that might have issues."""
300
+ try:
301
+ # Your processing logic here
302
+ value = record['value'] / record['divisor']
303
+ return {'result': value, 'status': 'success'}
304
+ except KeyError as e:
305
+ raise ValueError(f"Missing required field in record: {e}")
306
+ except ZeroDivisionError:
307
+ raise ValueError("Division by zero in record")
308
+
309
+ # Sample data with some problematic records
310
+ data = [
311
+ {'value': 10, 'divisor': 2}, # OK
312
+ {'value': 15, 'divisor': 0}, # Will error
313
+ {'value': 20, 'divisor': 4}, # OK
314
+ {'value': 25}, # Missing divisor - will error
315
+ ]
316
+
317
+ # Process with error logging - continues despite errors
318
+ results = multi_thread(
319
+ process_data_record,
320
+ data,
321
+ workers=4,
322
+ error_handler='log',
323
+ max_error_files=10
324
+ )
325
+
326
+ print("Results:", results)
327
+ # Output: Results: [{'result': 5.0, 'status': 'success'}, None, {'result': 5.0, 'status': 'success'}, None]
328
+ # Errors are logged to files for later analysis
179
329
  ```
180
330
 
181
331
  ### File I/O
@@ -7,7 +7,7 @@ llm_utils/chat_format/transform.py,sha256=PJ2g9KT1GSbWuAs7giEbTpTAffpU9QsIXyRlbf
7
7
  llm_utils/chat_format/utils.py,sha256=M2EctZ6NeHXqFYufh26Y3CpSphN0bdZm5xoNaEJj5vg,1251
8
8
  llm_utils/lm/__init__.py,sha256=4jYMy3wPH3tg-tHFyWEWOqrnmX4Tu32VZCdzRGMGQsI,778
9
9
  llm_utils/lm/base_prompt_builder.py,sha256=_TzYMsWr-SsbA_JNXptUVN56lV5RfgWWTrFi-E8LMy4,12337
10
- llm_utils/lm/llm.py,sha256=4nTBNvIZfsKUZo5SdllLQOp3w-HohR3B1hcfUnYDl7A,20014
10
+ llm_utils/lm/llm.py,sha256=i6L5aKF6NhzmaFPBA2pCm8TkQmS1nCgORMqP5QyfJ28,20097
11
11
  llm_utils/lm/llm_signature.py,sha256=vV8uZgLLd6ZKqWbq0OPywWvXAfl7hrJQnbtBF-VnZRU,1244
12
12
  llm_utils/lm/lm_base.py,sha256=Bk3q34KrcCK_bC4Ryxbc3KqkiPL39zuVZaBQ1i6wJqs,9437
13
13
  llm_utils/lm/mixins.py,sha256=Nz7CwJFBOvbZNbODUlJC04Pcbac3zWnT8vy7sZG_MVI,24906
@@ -30,7 +30,7 @@ llm_utils/vector_cache/core.py,sha256=VXuYJy1AX22NHKvIXRriETip5RrmQcNp73-g-ZT774
30
30
  llm_utils/vector_cache/types.py,sha256=CpMZanJSTeBVxQSqjBq6pBVWp7u2-JRcgY9t5jhykdQ,438
31
31
  llm_utils/vector_cache/utils.py,sha256=OsiRFydv8i8HiJtPL9hh40aUv8I5pYfg2zvmtDi4DME,1446
32
32
  speedy_utils/__imports.py,sha256=V0YzkDK4-QkK_IDXY1be6C6_STuNhXAKIp4_dM0coQs,7800
33
- speedy_utils/__init__.py,sha256=1ubAYR6P0cEZJLDt7KQLLxl6ylh-T7WE7HPP94-rVLI,3045
33
+ speedy_utils/__init__.py,sha256=_kSjS816Kv5UZPd4EM_juB68tXM_sHUYt6OFB-RhE6U,3261
34
34
  speedy_utils/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
35
35
  speedy_utils/common/clock.py,sha256=raLtMGIgzrRej5kUt7hOUm2ZZw2THVPo-q8dMvdZOxw,7354
36
36
  speedy_utils/common/function_decorator.py,sha256=GKXqRs_hHFFmhyhql0Br0o52WzekUnpNlm99NfaVwgY,2025
@@ -39,13 +39,14 @@ speedy_utils/common/notebook_utils.py,sha256=6mxXZcTHwYob3nobAzbSZnDyXRahFaaSko1
39
39
  speedy_utils/common/patcher.py,sha256=Rku-N4DJNue8BCLUx7y3ad_3t_WU2HleHKlbR0vhaRc,2319
40
40
  speedy_utils/common/report_manager.py,sha256=78KQ0gUzvbzT6EjHYZ5zgtV41cPRvdX8hnF2oSWA4qA,3849
41
41
  speedy_utils/common/utils_cache.py,sha256=1UAqOSb4nFVlhuQRfTEXCN-8Wf6yntXyMA6yp61-98I,26277
42
+ speedy_utils/common/utils_error.py,sha256=KQx2JTZsvsX2DsKRIoVR-4rc-6-l3OzEz9UtnHt8HJg,9108
42
43
  speedy_utils/common/utils_io.py,sha256=94m_EZ2eIs3w2m0rx-QQWsREPpVJctpweYHco3byczQ,15876
43
44
  speedy_utils/common/utils_misc.py,sha256=ZRJCS7OJxybpVm1sasoeCYRW2TaaGCXj4DySYlQeVR8,2227
44
45
  speedy_utils/common/utils_print.py,sha256=AGDB7mgJnO00QkJBH6kJb46738q3GzMUZPwtQ248vQw,4763
45
46
  speedy_utils/multi_worker/__init__.py,sha256=urcuxzaAJp-Rl3SIwHNre3x2vyHxLR7YGiDdm-Q8GQs,361
46
47
  speedy_utils/multi_worker/dataset_ray.py,sha256=U_l_4Y7CVpaHiApsXQSdNvals8NK87LHPS_XHiJF3qs,10044
47
48
  speedy_utils/multi_worker/parallel_gpu_pool.py,sha256=A7llZcQbRVZqwCqNRku7TpqGCdSoIzpdcTaupgqT5nI,6108
48
- speedy_utils/multi_worker/process.py,sha256=EujMtc9NqDVPqGOrLjlM6k60vEpRmOvlIJAYPKRQO6I,45237
49
+ speedy_utils/multi_worker/process.py,sha256=U-pjHoWZ3xOeplMl2nSxVeiJE0F9V-eswpSdK-8c3dU,45446
49
50
  speedy_utils/multi_worker/progress.py,sha256=Ozeca-t-j1224n_dWwZkWzva9DC16SCLgScKeGtXLaQ,4717
50
51
  speedy_utils/multi_worker/thread.py,sha256=E7o_iUCIKmgk1tFt7mZAFT7c5q229wVzWj-trmVsxVA,27254
51
52
  speedy_utils/scripts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -55,7 +56,7 @@ vision_utils/README.md,sha256=AIDZZj8jo_QNrEjFyHwd00iOO431s-js-M2dLtVTn3I,5740
55
56
  vision_utils/__init__.py,sha256=hF54sT6FAxby8kDVhOvruy4yot8O-Ateey5n96O1pQM,284
56
57
  vision_utils/io_utils.py,sha256=pI0Va6miesBysJcllK6NXCay8HpGZsaMWwlsKB2DMgA,26510
57
58
  vision_utils/plot.py,sha256=HkNj3osA3moPuupP1VguXfPPOW614dZO5tvC-EFKpKM,12028
58
- speedy_utils-1.1.43.dist-info/METADATA,sha256=IiZqjlKf2KUOwRDLZd0TusT3T5PECy5YRJssiw8wHAo,8099
59
- speedy_utils-1.1.43.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
60
- speedy_utils-1.1.43.dist-info/entry_points.txt,sha256=rwn89AYfBUh9SRJtFbpp-u2JIKiqmZ2sczvqyO6s9cI,289
61
- speedy_utils-1.1.43.dist-info/RECORD,,
59
+ speedy_utils-1.1.45.dist-info/METADATA,sha256=yXr9vEuXiRpvZ3VibGULqr3X1a832aQZJAb-lJLM6mM,13073
60
+ speedy_utils-1.1.45.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
61
+ speedy_utils-1.1.45.dist-info/entry_points.txt,sha256=rwn89AYfBUh9SRJtFbpp-u2JIKiqmZ2sczvqyO6s9cI,289
62
+ speedy_utils-1.1.45.dist-info/RECORD,,