aiqtoolkit 1.2.0a20250731__py3-none-any.whl → 1.2.0a20250801__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aiqtoolkit might be problematic. Click here for more details.
- aiq/builder/function.py +21 -6
- aiq/builder/function_base.py +6 -2
- aiq/cli/commands/sizing/calc.py +6 -3
- aiq/cli/commands/uninstall.py +2 -4
- aiq/observability/exporter/processing_exporter.py +99 -46
- aiq/observability/exporter/span_exporter.py +1 -0
- aiq/observability/processor/batching_processor.py +52 -59
- aiq/observability/processor/callback_processor.py +43 -0
- aiq/observability/processor/processor.py +4 -1
- aiq/profiler/calc/calc_runner.py +5 -1
- aiq/profiler/calc/data_models.py +18 -6
- aiq/tool/server_tools.py +1 -1
- aiq/utils/type_converter.py +52 -10
- {aiqtoolkit-1.2.0a20250731.dist-info → aiqtoolkit-1.2.0a20250801.dist-info}/METADATA +1 -1
- {aiqtoolkit-1.2.0a20250731.dist-info → aiqtoolkit-1.2.0a20250801.dist-info}/RECORD +20 -19
- {aiqtoolkit-1.2.0a20250731.dist-info → aiqtoolkit-1.2.0a20250801.dist-info}/WHEEL +0 -0
- {aiqtoolkit-1.2.0a20250731.dist-info → aiqtoolkit-1.2.0a20250801.dist-info}/entry_points.txt +0 -0
- {aiqtoolkit-1.2.0a20250731.dist-info → aiqtoolkit-1.2.0a20250801.dist-info}/licenses/LICENSE-3rd-party.txt +0 -0
- {aiqtoolkit-1.2.0a20250731.dist-info → aiqtoolkit-1.2.0a20250801.dist-info}/licenses/LICENSE.md +0 -0
- {aiqtoolkit-1.2.0a20250731.dist-info → aiqtoolkit-1.2.0a20250801.dist-info}/top_level.txt +0 -0
aiq/builder/function.py
CHANGED
|
@@ -76,11 +76,16 @@ class Function(FunctionBase[InputT, StreamingOutputT, SingleOutputT], ABC):
|
|
|
76
76
|
-------
|
|
77
77
|
_T
|
|
78
78
|
The converted value.
|
|
79
|
+
|
|
80
|
+
Raises
|
|
81
|
+
------
|
|
82
|
+
ValueError
|
|
83
|
+
If the value cannot be converted to the specified type (when `to_type` is specified).
|
|
79
84
|
"""
|
|
80
85
|
|
|
81
86
|
return self._converter.convert(value, to_type=to_type)
|
|
82
87
|
|
|
83
|
-
def try_convert(self, value: typing.Any, to_type: type[_T]) -> _T:
|
|
88
|
+
def try_convert(self, value: typing.Any, to_type: type[_T]) -> _T | typing.Any:
|
|
84
89
|
"""
|
|
85
90
|
Converts the given value to the specified type using graceful error handling.
|
|
86
91
|
If conversion fails, returns the original value and continues processing.
|
|
@@ -94,7 +99,7 @@ class Function(FunctionBase[InputT, StreamingOutputT, SingleOutputT], ABC):
|
|
|
94
99
|
|
|
95
100
|
Returns
|
|
96
101
|
-------
|
|
97
|
-
_T
|
|
102
|
+
_T | typing.Any
|
|
98
103
|
The converted value, or original value if conversion fails.
|
|
99
104
|
"""
|
|
100
105
|
return self._converter.try_convert(value, to_type=to_type)
|
|
@@ -129,17 +134,22 @@ class Function(FunctionBase[InputT, StreamingOutputT, SingleOutputT], ABC):
|
|
|
129
134
|
-------
|
|
130
135
|
typing.Any
|
|
131
136
|
The output of the function optionally converted to the specified type.
|
|
137
|
+
|
|
138
|
+
Raises
|
|
139
|
+
------
|
|
140
|
+
ValueError
|
|
141
|
+
If the output of the function cannot be converted to the specified type.
|
|
132
142
|
"""
|
|
133
143
|
|
|
134
144
|
with self._context.push_active_function(self.instance_name,
|
|
135
145
|
input_data=value) as manager: # Set the current invocation context
|
|
136
146
|
try:
|
|
137
|
-
converted_input: InputT = self._convert_input(value)
|
|
147
|
+
converted_input: InputT = self._convert_input(value)
|
|
138
148
|
|
|
139
149
|
result = await self._ainvoke(converted_input)
|
|
140
150
|
|
|
141
151
|
if to_type is not None and not isinstance(result, to_type):
|
|
142
|
-
result = self.
|
|
152
|
+
result = self.convert(result, to_type)
|
|
143
153
|
|
|
144
154
|
manager.set_output(result)
|
|
145
155
|
|
|
@@ -215,18 +225,23 @@ class Function(FunctionBase[InputT, StreamingOutputT, SingleOutputT], ABC):
|
|
|
215
225
|
------
|
|
216
226
|
typing.Any
|
|
217
227
|
The output of the function optionally converted to the specified type.
|
|
228
|
+
|
|
229
|
+
Raises
|
|
230
|
+
------
|
|
231
|
+
ValueError
|
|
232
|
+
If the output of the function cannot be converted to the specified type (when `to_type` is specified).
|
|
218
233
|
"""
|
|
219
234
|
|
|
220
235
|
with self._context.push_active_function(self.instance_name, input_data=value) as manager:
|
|
221
236
|
try:
|
|
222
|
-
converted_input: InputT = self._convert_input(value)
|
|
237
|
+
converted_input: InputT = self._convert_input(value)
|
|
223
238
|
|
|
224
239
|
# Collect streaming outputs to capture the final result
|
|
225
240
|
final_output: list[typing.Any] = []
|
|
226
241
|
|
|
227
242
|
async for data in self._astream(converted_input):
|
|
228
243
|
if to_type is not None and not isinstance(data, to_type):
|
|
229
|
-
converted_data = self.
|
|
244
|
+
converted_data = self.convert(data, to_type=to_type)
|
|
230
245
|
final_output.append(converted_data)
|
|
231
246
|
yield converted_data
|
|
232
247
|
else:
|
aiq/builder/function_base.py
CHANGED
|
@@ -350,7 +350,7 @@ class FunctionBase(typing.Generic[InputT, StreamingOutputT, SingleOutputT], ABC)
|
|
|
350
350
|
# output because the ABC has it.
|
|
351
351
|
return True
|
|
352
352
|
|
|
353
|
-
def _convert_input(self, value: typing.Any):
|
|
353
|
+
def _convert_input(self, value: typing.Any) -> InputT:
|
|
354
354
|
if (isinstance(value, self.input_class)):
|
|
355
355
|
return value
|
|
356
356
|
|
|
@@ -373,4 +373,8 @@ class FunctionBase(typing.Generic[InputT, StreamingOutputT, SingleOutputT], ABC)
|
|
|
373
373
|
return value
|
|
374
374
|
|
|
375
375
|
# Fallback to the converter
|
|
376
|
-
|
|
376
|
+
try:
|
|
377
|
+
return self._converter.convert(value, to_type=self.input_class)
|
|
378
|
+
except ValueError as e:
|
|
379
|
+
# Input parsing should yield a TypeError instead of a ValueError
|
|
380
|
+
raise TypeError from e
|
aiq/cli/commands/sizing/calc.py
CHANGED
|
@@ -274,9 +274,12 @@ def calc_command(ctx,
|
|
|
274
274
|
|
|
275
275
|
click.echo(tabulate(table, headers=headers, tablefmt="github"))
|
|
276
276
|
|
|
277
|
-
# Display slope-based GPU estimates
|
|
278
|
-
|
|
279
|
-
|
|
277
|
+
# Display slope-based GPU estimates if they are available
|
|
278
|
+
if results.gpu_estimates.gpu_estimate_by_llm_latency is not None or \
|
|
279
|
+
results.gpu_estimates.gpu_estimate_by_wf_runtime is not None:
|
|
280
|
+
click.echo("")
|
|
281
|
+
click.echo(click.style("=== GPU ESTIMATES ===", fg="bright_blue", bold=True))
|
|
282
|
+
|
|
280
283
|
if results.gpu_estimates.gpu_estimate_by_wf_runtime is not None:
|
|
281
284
|
click.echo(
|
|
282
285
|
click.style(
|
aiq/cli/commands/uninstall.py
CHANGED
|
@@ -53,13 +53,11 @@ async def uninstall_packages(packages: list[dict[str, str]]) -> None:
|
|
|
53
53
|
await stack.enter_async_context(registry_handler.remove(packages=package_name_list))
|
|
54
54
|
|
|
55
55
|
|
|
56
|
-
@click.group(name=__name__,
|
|
57
|
-
invoke_without_command=True,
|
|
58
|
-
help=("Uninstall an AIQ Toolkit plugin packages from the local environment."))
|
|
56
|
+
@click.group(name=__name__, invoke_without_command=True, help=("Uninstall plugin packages from the local environment."))
|
|
59
57
|
@click.argument("packages", type=str)
|
|
60
58
|
def uninstall_command(packages: str) -> None:
|
|
61
59
|
"""
|
|
62
|
-
Uninstall
|
|
60
|
+
Uninstall plugin packages from the local environment.
|
|
63
61
|
"""
|
|
64
62
|
|
|
65
63
|
packages = packages.split()
|
|
@@ -17,6 +17,7 @@ import asyncio
|
|
|
17
17
|
import logging
|
|
18
18
|
from abc import abstractmethod
|
|
19
19
|
from collections.abc import Coroutine
|
|
20
|
+
from typing import Any
|
|
20
21
|
from typing import Generic
|
|
21
22
|
from typing import TypeVar
|
|
22
23
|
|
|
@@ -24,6 +25,7 @@ from aiq.builder.context import AIQContextState
|
|
|
24
25
|
from aiq.data_models.intermediate_step import IntermediateStep
|
|
25
26
|
from aiq.observability.exporter.base_exporter import BaseExporter
|
|
26
27
|
from aiq.observability.mixin.type_introspection_mixin import TypeIntrospectionMixin
|
|
28
|
+
from aiq.observability.processor.callback_processor import CallbackProcessor
|
|
27
29
|
from aiq.observability.processor.processor import Processor
|
|
28
30
|
from aiq.utils.type_utils import DecomposedType
|
|
29
31
|
from aiq.utils.type_utils import override
|
|
@@ -89,6 +91,14 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
|
|
|
89
91
|
self._processors[-1].output_type)
|
|
90
92
|
self._processors.append(processor)
|
|
91
93
|
|
|
94
|
+
# Set up pipeline continuation callback for processors that support it
|
|
95
|
+
if isinstance(processor, CallbackProcessor):
|
|
96
|
+
# Create a callback that continues processing through the rest of the pipeline
|
|
97
|
+
async def pipeline_callback(item):
|
|
98
|
+
await self._continue_pipeline_after(processor, item)
|
|
99
|
+
|
|
100
|
+
processor.set_done_callback(pipeline_callback)
|
|
101
|
+
|
|
92
102
|
def remove_processor(self, processor: Processor) -> None:
|
|
93
103
|
"""Remove a processor from the processing pipeline.
|
|
94
104
|
|
|
@@ -143,20 +153,82 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
|
|
|
143
153
|
"""Process item through all registered processors.
|
|
144
154
|
|
|
145
155
|
Args:
|
|
146
|
-
item: The item to process (starts as PipelineInputT, can transform to PipelineOutputT)
|
|
156
|
+
item (PipelineInputT): The item to process (starts as PipelineInputT, can transform to PipelineOutputT)
|
|
157
|
+
|
|
158
|
+
Returns:
|
|
159
|
+
PipelineOutputT: The processed item after running through all processors
|
|
160
|
+
"""
|
|
161
|
+
return await self._process_through_processors(self._processors, item) # type: ignore
|
|
162
|
+
|
|
163
|
+
async def _process_through_processors(self, processors: list[Processor], item: Any) -> Any:
|
|
164
|
+
"""Process an item through a list of processors.
|
|
165
|
+
|
|
166
|
+
Args:
|
|
167
|
+
processors (list[Processor]): List of processors to run the item through
|
|
168
|
+
item (Any): The item to process
|
|
147
169
|
|
|
148
170
|
Returns:
|
|
149
171
|
The processed item after running through all processors
|
|
150
172
|
"""
|
|
151
173
|
processed_item = item
|
|
152
|
-
for processor in
|
|
174
|
+
for processor in processors:
|
|
153
175
|
try:
|
|
154
176
|
processed_item = await processor.process(processed_item)
|
|
155
177
|
except Exception as e:
|
|
156
178
|
logger.error("Error in processor %s: %s", processor.__class__.__name__, e, exc_info=True)
|
|
157
|
-
# Continue with unprocessed item rather than failing
|
|
179
|
+
# Continue with unprocessed item rather than failing
|
|
180
|
+
return processed_item
|
|
181
|
+
|
|
182
|
+
async def _export_final_item(self, processed_item: Any, raise_on_invalid: bool = False) -> None:
|
|
183
|
+
"""Export a processed item with proper type handling.
|
|
184
|
+
|
|
185
|
+
Args:
|
|
186
|
+
processed_item (Any): The item to export
|
|
187
|
+
raise_on_invalid (bool): If True, raise ValueError for invalid types instead of logging warning
|
|
188
|
+
"""
|
|
189
|
+
if isinstance(processed_item, list):
|
|
190
|
+
if len(processed_item) > 0:
|
|
191
|
+
await self.export_processed(processed_item)
|
|
192
|
+
else:
|
|
193
|
+
logger.debug("Skipping export of empty batch")
|
|
194
|
+
elif isinstance(processed_item, self.output_class):
|
|
195
|
+
await self.export_processed(processed_item)
|
|
196
|
+
else:
|
|
197
|
+
if raise_on_invalid:
|
|
198
|
+
raise ValueError(f"Processed item {processed_item} is not a valid output type. "
|
|
199
|
+
f"Expected {self.output_class} or list[{self.output_class}]")
|
|
200
|
+
logger.warning("Processed item %s is not a valid output type for export", processed_item)
|
|
201
|
+
|
|
202
|
+
async def _continue_pipeline_after(self, source_processor: Processor, item: Any) -> None:
|
|
203
|
+
"""Continue processing an item through the pipeline after a specific processor.
|
|
158
204
|
|
|
159
|
-
|
|
205
|
+
This is used when processors (like BatchingProcessor) need to inject items
|
|
206
|
+
back into the pipeline flow to continue through downstream processors.
|
|
207
|
+
|
|
208
|
+
Args:
|
|
209
|
+
source_processor (Processor): The processor that generated the item
|
|
210
|
+
item (Any): The item to continue processing through the remaining pipeline
|
|
211
|
+
"""
|
|
212
|
+
try:
|
|
213
|
+
# Find the source processor's position
|
|
214
|
+
try:
|
|
215
|
+
source_index = self._processors.index(source_processor)
|
|
216
|
+
except ValueError:
|
|
217
|
+
logger.error("Source processor %s not found in pipeline", source_processor.__class__.__name__)
|
|
218
|
+
return
|
|
219
|
+
|
|
220
|
+
# Process through remaining processors (skip the source processor)
|
|
221
|
+
remaining_processors = self._processors[source_index + 1:]
|
|
222
|
+
processed_item = await self._process_through_processors(remaining_processors, item)
|
|
223
|
+
|
|
224
|
+
# Export the final result
|
|
225
|
+
await self._export_final_item(processed_item)
|
|
226
|
+
|
|
227
|
+
except Exception as e:
|
|
228
|
+
logger.error("Failed to continue pipeline processing after %s: %s",
|
|
229
|
+
source_processor.__class__.__name__,
|
|
230
|
+
e,
|
|
231
|
+
exc_info=True)
|
|
160
232
|
|
|
161
233
|
async def _export_with_processing(self, item: PipelineInputT) -> None:
|
|
162
234
|
"""Export an item after processing it through the pipeline.
|
|
@@ -169,20 +241,11 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
|
|
|
169
241
|
final_item: PipelineOutputT = await self._process_pipeline(item)
|
|
170
242
|
|
|
171
243
|
# Handle different output types from batch processors
|
|
172
|
-
if isinstance(final_item, list):
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
# Non-empty lists should be exported (batch processors)
|
|
179
|
-
await self.export_processed(final_item)
|
|
180
|
-
elif isinstance(final_item, self.output_class):
|
|
181
|
-
# Single items should be exported normally
|
|
182
|
-
await self.export_processed(final_item)
|
|
183
|
-
else:
|
|
184
|
-
raise ValueError(f"Processed item {final_item} is not a valid output type. "
|
|
185
|
-
f"Expected {self.output_class} or list[{self.output_class}]")
|
|
244
|
+
if isinstance(final_item, list) and len(final_item) == 0:
|
|
245
|
+
logger.debug("Skipping export of empty batch from processor pipeline")
|
|
246
|
+
return
|
|
247
|
+
|
|
248
|
+
await self._export_final_item(final_item, raise_on_invalid=True)
|
|
186
249
|
|
|
187
250
|
except Exception as e:
|
|
188
251
|
logger.error("Failed to export item '%s': %s", item, e, exc_info=True)
|
|
@@ -235,35 +298,25 @@ class ProcessingExporter(Generic[PipelineInputT, PipelineOutputT], BaseExporter,
|
|
|
235
298
|
|
|
236
299
|
@override
|
|
237
300
|
async def _cleanup(self):
|
|
238
|
-
"""Enhanced cleanup that shuts down all shutdown-aware processors.
|
|
301
|
+
"""Enhanced cleanup that shuts down all shutdown-aware processors.
|
|
302
|
+
|
|
303
|
+
Each processor is responsible for its own cleanup, including routing
|
|
304
|
+
any final batches through the remaining pipeline via their done callbacks.
|
|
305
|
+
"""
|
|
239
306
|
# Shutdown all processors that support it
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
# Process final batches from batch processors
|
|
255
|
-
for processor in getattr(self, '_processors', []):
|
|
256
|
-
if hasattr(processor, 'has_final_batch') and hasattr(processor, 'get_final_batch'):
|
|
257
|
-
if processor.has_final_batch():
|
|
258
|
-
final_batch = processor.get_final_batch()
|
|
259
|
-
if final_batch:
|
|
260
|
-
logger.info("Processing final batch of %d items from %s during cleanup",
|
|
261
|
-
len(final_batch),
|
|
262
|
-
processor.__class__.__name__)
|
|
263
|
-
try:
|
|
264
|
-
await self.export_processed(final_batch)
|
|
265
|
-
except Exception as e:
|
|
266
|
-
logger.error("Error processing final batch during cleanup: %s", e, exc_info=True)
|
|
307
|
+
shutdown_tasks = []
|
|
308
|
+
for processor in getattr(self, '_processors', []):
|
|
309
|
+
shutdown_method = getattr(processor, 'shutdown', None)
|
|
310
|
+
if shutdown_method:
|
|
311
|
+
logger.debug("Shutting down processor: %s", processor.__class__.__name__)
|
|
312
|
+
shutdown_tasks.append(shutdown_method())
|
|
313
|
+
|
|
314
|
+
if shutdown_tasks:
|
|
315
|
+
try:
|
|
316
|
+
await asyncio.gather(*shutdown_tasks, return_exceptions=True)
|
|
317
|
+
logger.info("Successfully shut down %d processors", len(shutdown_tasks))
|
|
318
|
+
except Exception as e:
|
|
319
|
+
logger.error("Error shutting down processors: %s", e, exc_info=True)
|
|
267
320
|
|
|
268
321
|
# Call parent cleanup
|
|
269
322
|
await super()._cleanup()
|
|
@@ -23,17 +23,17 @@ from typing import Any
|
|
|
23
23
|
from typing import Generic
|
|
24
24
|
from typing import TypeVar
|
|
25
25
|
|
|
26
|
-
from aiq.observability.processor.
|
|
26
|
+
from aiq.observability.processor.callback_processor import CallbackProcessor
|
|
27
27
|
|
|
28
28
|
logger = logging.getLogger(__name__)
|
|
29
29
|
|
|
30
30
|
T = TypeVar('T')
|
|
31
31
|
|
|
32
32
|
|
|
33
|
-
class BatchingProcessor(
|
|
33
|
+
class BatchingProcessor(CallbackProcessor[T, list[T]], Generic[T]):
|
|
34
34
|
"""Pass-through batching processor that accumulates items and outputs batched lists.
|
|
35
35
|
|
|
36
|
-
This processor
|
|
36
|
+
This processor extends CallbackProcessor[T, List[T]] to provide batching functionality.
|
|
37
37
|
It accumulates individual items and outputs them as batches when size or time thresholds
|
|
38
38
|
are met. The batched output continues through the processing pipeline.
|
|
39
39
|
|
|
@@ -43,25 +43,31 @@ class BatchingProcessor(Processor[T, list[T]], Generic[T]):
|
|
|
43
43
|
Key Features:
|
|
44
44
|
- Pass-through design: Processor[T, List[T]]
|
|
45
45
|
- Size-based and time-based batching
|
|
46
|
-
-
|
|
46
|
+
- Pipeline flow: batches continue through downstream processors
|
|
47
47
|
- GUARANTEED: No items lost during cleanup
|
|
48
48
|
- Comprehensive statistics and monitoring
|
|
49
49
|
- Proper cleanup and shutdown handling
|
|
50
50
|
- High-performance async implementation
|
|
51
51
|
- Back-pressure handling with queue limits
|
|
52
52
|
|
|
53
|
+
Pipeline Flow:
|
|
54
|
+
Normal processing: Individual items → BatchingProcessor → List[items] → downstream processors → export
|
|
55
|
+
Time-based flush: Scheduled batches automatically continue through remaining pipeline
|
|
56
|
+
Shutdown: Final batch immediately routed through remaining pipeline
|
|
57
|
+
|
|
53
58
|
Cleanup Guarantee:
|
|
54
|
-
When
|
|
59
|
+
When shutdown() is called, this processor:
|
|
55
60
|
1. Stops accepting new items
|
|
56
|
-
2.
|
|
57
|
-
3.
|
|
58
|
-
4. Ensures zero data loss
|
|
61
|
+
2. Creates final batch from all queued items
|
|
62
|
+
3. Immediately routes final batch through remaining pipeline via callback
|
|
63
|
+
4. Ensures zero data loss with no external coordination needed
|
|
59
64
|
|
|
60
65
|
Usage in Pipeline:
|
|
61
66
|
```python
|
|
62
|
-
# Individual spans → Batched spans → Continue
|
|
63
|
-
exporter.add_processor(BatchingProcessor[Span](batch_size=100))
|
|
64
|
-
exporter.add_processor(
|
|
67
|
+
# Individual spans → Batched spans → Continue through downstream processors
|
|
68
|
+
exporter.add_processor(BatchingProcessor[Span](batch_size=100)) # Auto-wired with pipeline callback
|
|
69
|
+
exporter.add_processor(FilterProcessor()) # Processes List[Span] from batching
|
|
70
|
+
exporter.add_processor(TransformProcessor()) # Further processing
|
|
65
71
|
```
|
|
66
72
|
|
|
67
73
|
Args:
|
|
@@ -70,6 +76,10 @@ class BatchingProcessor(Processor[T, list[T]], Generic[T]):
|
|
|
70
76
|
max_queue_size: Maximum items to queue before blocking (default: 1000)
|
|
71
77
|
drop_on_overflow: If True, drop items when queue is full (default: False)
|
|
72
78
|
shutdown_timeout: Max seconds to wait for final batch processing (default: 10.0)
|
|
79
|
+
|
|
80
|
+
Note:
|
|
81
|
+
The done_callback for pipeline integration is automatically set by ProcessingExporter
|
|
82
|
+
when the processor is added to a pipeline. For standalone usage, call set_done_callback().
|
|
73
83
|
"""
|
|
74
84
|
|
|
75
85
|
def __init__(self,
|
|
@@ -77,14 +87,13 @@ class BatchingProcessor(Processor[T, list[T]], Generic[T]):
|
|
|
77
87
|
flush_interval: float = 5.0,
|
|
78
88
|
max_queue_size: int = 1000,
|
|
79
89
|
drop_on_overflow: bool = False,
|
|
80
|
-
shutdown_timeout: float = 10.0
|
|
81
|
-
done_callback: Callable[[list[T]], Awaitable[None]] | None = None):
|
|
90
|
+
shutdown_timeout: float = 10.0):
|
|
82
91
|
self._batch_size = batch_size
|
|
83
92
|
self._flush_interval = flush_interval
|
|
84
93
|
self._max_queue_size = max_queue_size
|
|
85
94
|
self._drop_on_overflow = drop_on_overflow
|
|
86
95
|
self._shutdown_timeout = shutdown_timeout
|
|
87
|
-
self._done_callback =
|
|
96
|
+
self._done_callback: Callable[[list[T]], Awaitable[None]] | None = None
|
|
88
97
|
|
|
89
98
|
# Batching state
|
|
90
99
|
self._batch_queue: deque[T] = deque()
|
|
@@ -93,11 +102,7 @@ class BatchingProcessor(Processor[T, list[T]], Generic[T]):
|
|
|
93
102
|
self._batch_lock = asyncio.Lock()
|
|
94
103
|
self._shutdown_requested = False
|
|
95
104
|
self._shutdown_complete = False
|
|
96
|
-
self._shutdown_complete_event
|
|
97
|
-
|
|
98
|
-
# Final batch handling for cleanup
|
|
99
|
-
self._final_batch: list[T] | None = None
|
|
100
|
-
self._final_batch_processed = False
|
|
105
|
+
self._shutdown_complete_event = asyncio.Event()
|
|
101
106
|
|
|
102
107
|
# Callback for immediate export of scheduled batches
|
|
103
108
|
self._done = None
|
|
@@ -167,7 +172,11 @@ class BatchingProcessor(Processor[T, list[T]], Generic[T]):
|
|
|
167
172
|
return []
|
|
168
173
|
|
|
169
174
|
def set_done_callback(self, callback: Callable[[list[T]], Awaitable[None]]):
|
|
170
|
-
"""Set callback function for
|
|
175
|
+
"""Set callback function for routing batches through the remaining pipeline.
|
|
176
|
+
|
|
177
|
+
This is automatically set by ProcessingExporter.add_processor() to continue
|
|
178
|
+
batches through downstream processors before final export.
|
|
179
|
+
"""
|
|
171
180
|
self._done_callback = callback
|
|
172
181
|
|
|
173
182
|
async def _schedule_flush(self):
|
|
@@ -178,15 +187,15 @@ class BatchingProcessor(Processor[T, list[T]], Generic[T]):
|
|
|
178
187
|
if not self._shutdown_requested and self._batch_queue:
|
|
179
188
|
batch = await self._create_batch()
|
|
180
189
|
if batch:
|
|
181
|
-
#
|
|
190
|
+
# Route scheduled batches through pipeline via callback
|
|
182
191
|
if self._done_callback is not None:
|
|
183
192
|
try:
|
|
184
193
|
await self._done_callback(batch)
|
|
185
|
-
logger.debug("Scheduled flush
|
|
194
|
+
logger.debug("Scheduled flush routed batch of %d items through pipeline", len(batch))
|
|
186
195
|
except Exception as e:
|
|
187
|
-
logger.error("Error
|
|
196
|
+
logger.error("Error routing scheduled batch through pipeline: %s", e, exc_info=True)
|
|
188
197
|
else:
|
|
189
|
-
logger.warning("Scheduled flush created batch of %d items but no
|
|
198
|
+
logger.warning("Scheduled flush created batch of %d items but no pipeline callback set",
|
|
190
199
|
len(batch))
|
|
191
200
|
except asyncio.CancelledError:
|
|
192
201
|
pass
|
|
@@ -223,11 +232,8 @@ class BatchingProcessor(Processor[T, list[T]], Generic[T]):
|
|
|
223
232
|
"""Shutdown the processor and ensure all items are processed.
|
|
224
233
|
|
|
225
234
|
CRITICAL: This method is called by ProcessingExporter._cleanup() to ensure
|
|
226
|
-
no items are lost during shutdown. It
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
The final batch will be processed by the next process() call or can be
|
|
230
|
-
retrieved via get_final_batch().
|
|
235
|
+
no items are lost during shutdown. It immediately routes any remaining
|
|
236
|
+
items as a final batch through the rest of the processing pipeline.
|
|
231
237
|
"""
|
|
232
238
|
if self._shutdown_requested:
|
|
233
239
|
logger.debug("Shutdown already requested, waiting for completion")
|
|
@@ -251,13 +257,26 @@ class BatchingProcessor(Processor[T, list[T]], Generic[T]):
|
|
|
251
257
|
except asyncio.CancelledError:
|
|
252
258
|
pass
|
|
253
259
|
|
|
254
|
-
# Create final batch
|
|
260
|
+
# Create and route final batch through pipeline
|
|
255
261
|
async with self._batch_lock:
|
|
256
262
|
if self._batch_queue:
|
|
257
|
-
|
|
258
|
-
logger.info("Created final batch of %d items during shutdown", len(
|
|
263
|
+
final_batch = await self._create_batch()
|
|
264
|
+
logger.info("Created final batch of %d items during shutdown", len(final_batch))
|
|
265
|
+
|
|
266
|
+
# Route final batch through pipeline via callback
|
|
267
|
+
if self._done_callback is not None:
|
|
268
|
+
try:
|
|
269
|
+
await self._done_callback(final_batch)
|
|
270
|
+
logger.info("Successfully routed final batch of %d items through pipeline during shutdown",
|
|
271
|
+
len(final_batch))
|
|
272
|
+
except Exception as e:
|
|
273
|
+
logger.error("Error routing final batch through pipeline during shutdown: %s",
|
|
274
|
+
e,
|
|
275
|
+
exc_info=True)
|
|
276
|
+
else:
|
|
277
|
+
logger.warning("Final batch of %d items created during shutdown but no pipeline callback set",
|
|
278
|
+
len(final_batch))
|
|
259
279
|
else:
|
|
260
|
-
self._final_batch = []
|
|
261
280
|
logger.info("No items remaining during shutdown")
|
|
262
281
|
|
|
263
282
|
self._shutdown_complete = True
|
|
@@ -269,30 +288,6 @@ class BatchingProcessor(Processor[T, list[T]], Generic[T]):
|
|
|
269
288
|
self._shutdown_complete = True
|
|
270
289
|
self._shutdown_complete_event.set()
|
|
271
290
|
|
|
272
|
-
def get_final_batch(self) -> list[T]:
|
|
273
|
-
"""Get the final batch created during shutdown.
|
|
274
|
-
|
|
275
|
-
This method allows the exporter to retrieve and process any items
|
|
276
|
-
that were queued when shutdown was called.
|
|
277
|
-
|
|
278
|
-
Returns:
|
|
279
|
-
List[T]: Final batch of items, empty list if none
|
|
280
|
-
"""
|
|
281
|
-
if self._final_batch is not None:
|
|
282
|
-
final_batch = self._final_batch
|
|
283
|
-
self._final_batch = None # Clear to avoid double processing
|
|
284
|
-
self._final_batch_processed = True
|
|
285
|
-
return final_batch
|
|
286
|
-
return []
|
|
287
|
-
|
|
288
|
-
def has_final_batch(self) -> bool:
|
|
289
|
-
"""Check if there's a final batch waiting to be processed.
|
|
290
|
-
|
|
291
|
-
Returns:
|
|
292
|
-
bool: True if final batch exists and hasn't been processed
|
|
293
|
-
"""
|
|
294
|
-
return self._final_batch is not None and not self._final_batch_processed
|
|
295
|
-
|
|
296
291
|
def get_stats(self) -> dict[str, Any]:
|
|
297
292
|
"""Get comprehensive batching statistics."""
|
|
298
293
|
return {
|
|
@@ -309,8 +304,6 @@ class BatchingProcessor(Processor[T, list[T]], Generic[T]):
|
|
|
309
304
|
"shutdown_batches": self._shutdown_batches,
|
|
310
305
|
"shutdown_requested": self._shutdown_requested,
|
|
311
306
|
"shutdown_complete": self._shutdown_complete,
|
|
312
|
-
"final_batch_size": len(self._final_batch) if self._final_batch else 0,
|
|
313
|
-
"final_batch_processed": self._final_batch_processed,
|
|
314
307
|
"avg_items_per_batch": self._items_processed / max(1, self._batches_created),
|
|
315
308
|
"drop_rate": self._items_dropped / max(1, self._items_processed) * 100 if self._items_processed > 0 else 0
|
|
316
309
|
}
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
from abc import abstractmethod
|
|
17
|
+
from collections.abc import Awaitable
|
|
18
|
+
from collections.abc import Callable
|
|
19
|
+
from typing import Any
|
|
20
|
+
from typing import Generic
|
|
21
|
+
from typing import TypeVar
|
|
22
|
+
|
|
23
|
+
from aiq.observability.processor.processor import Processor
|
|
24
|
+
|
|
25
|
+
InputT = TypeVar('InputT')
|
|
26
|
+
OutputT = TypeVar('OutputT')
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class CallbackProcessor(Processor[InputT, OutputT], Generic[InputT, OutputT]):
|
|
30
|
+
"""Abstract base class for processors that support done callbacks.
|
|
31
|
+
|
|
32
|
+
Processors inheriting from this class can register callbacks that are
|
|
33
|
+
invoked when items are ready for further processing or export.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
@abstractmethod
|
|
37
|
+
def set_done_callback(self, callback: Callable[[Any], Awaitable[None]]) -> None:
|
|
38
|
+
"""Set a callback function to be invoked when items are processed.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
callback (Callable[[Any], Awaitable[None]]): Function to call with processed items
|
|
42
|
+
"""
|
|
43
|
+
pass
|
|
@@ -63,6 +63,9 @@ class Processor(Generic[InputT, OutputT], TypeIntrospectionMixin, ABC):
|
|
|
63
63
|
"""Process an item and return a potentially different type.
|
|
64
64
|
|
|
65
65
|
Args:
|
|
66
|
-
item: The item to process
|
|
66
|
+
item (InputT): The item to process
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
OutputT: The processed item
|
|
67
70
|
"""
|
|
68
71
|
pass
|
aiq/profiler/calc/calc_runner.py
CHANGED
|
@@ -34,6 +34,7 @@ from aiq.profiler.calc.data_models import CalcData
|
|
|
34
34
|
from aiq.profiler.calc.data_models import CalcRunnerConfig
|
|
35
35
|
from aiq.profiler.calc.data_models import CalcRunnerOutput
|
|
36
36
|
from aiq.profiler.calc.data_models import FitConfig
|
|
37
|
+
from aiq.profiler.calc.data_models import FitResults
|
|
37
38
|
from aiq.profiler.calc.data_models import GPUEstimates
|
|
38
39
|
from aiq.profiler.calc.data_models import SizingMetricPerItem
|
|
39
40
|
from aiq.profiler.calc.data_models import SizingMetrics
|
|
@@ -408,7 +409,10 @@ class CalcRunner:
|
|
|
408
409
|
if gpu_estimates.gpu_estimate_by_llm_latency is not None:
|
|
409
410
|
logger.info("GPU estimate by LLM latency: %.2f", gpu_estimates.gpu_estimate_by_llm_latency)
|
|
410
411
|
|
|
411
|
-
return CalcRunnerOutput(gpu_estimates=gpu_estimates,
|
|
412
|
+
return CalcRunnerOutput(gpu_estimates=gpu_estimates,
|
|
413
|
+
calc_data=calc_data,
|
|
414
|
+
fit_results=FitResults(llm_latency_fit=self.linear_analyzer.llm_latency_fit,
|
|
415
|
+
wf_runtime_fit=self.linear_analyzer.wf_runtime_fit))
|
|
412
416
|
|
|
413
417
|
def plot_concurrency_vs_time_metrics(self, output_dir: Path):
|
|
414
418
|
"""Plots concurrency vs. time metrics using pre-computed fits."""
|
aiq/profiler/calc/data_models.py
CHANGED
|
@@ -17,6 +17,7 @@ import typing
|
|
|
17
17
|
from pathlib import Path
|
|
18
18
|
|
|
19
19
|
from pydantic import BaseModel
|
|
20
|
+
from pydantic import Field
|
|
20
21
|
|
|
21
22
|
|
|
22
23
|
class FitConfig(BaseModel):
|
|
@@ -76,7 +77,7 @@ class CalcRunnerConfig(BaseModel):
|
|
|
76
77
|
plot_data: bool = True
|
|
77
78
|
|
|
78
79
|
# Configuration for linear fit and outlier detection
|
|
79
|
-
fit_config: FitConfig = FitConfig
|
|
80
|
+
fit_config: FitConfig = Field(default_factory=FitConfig)
|
|
80
81
|
|
|
81
82
|
|
|
82
83
|
# Sizing metrics are gathered from the evaluation runs and used as input by the calculator.
|
|
@@ -103,7 +104,7 @@ class SizingMetrics(BaseModel):
|
|
|
103
104
|
Sizing metrics for a single concurrency.
|
|
104
105
|
"""
|
|
105
106
|
# alerts associated with the sizing metrics
|
|
106
|
-
alerts: SizingMetricsAlerts = SizingMetricsAlerts
|
|
107
|
+
alerts: SizingMetricsAlerts = Field(default_factory=SizingMetricsAlerts)
|
|
107
108
|
|
|
108
109
|
# p95 LLM latency
|
|
109
110
|
llm_latency_p95: float = 0.0
|
|
@@ -125,6 +126,14 @@ class LinearFitResult(BaseModel):
|
|
|
125
126
|
outliers_removed: list[int]
|
|
126
127
|
|
|
127
128
|
|
|
129
|
+
class FitResults(BaseModel):
|
|
130
|
+
"""
|
|
131
|
+
Linear fit results for both LLM latency and workflow runtime analysis.
|
|
132
|
+
"""
|
|
133
|
+
llm_latency_fit: LinearFitResult | None = None
|
|
134
|
+
wf_runtime_fit: LinearFitResult | None = None
|
|
135
|
+
|
|
136
|
+
|
|
128
137
|
# GPU estimates are generated by the calculator.
|
|
129
138
|
class GPUEstimates(BaseModel):
|
|
130
139
|
"""
|
|
@@ -158,11 +167,11 @@ class CalcData(BaseModel):
|
|
|
158
167
|
"""
|
|
159
168
|
# ROUGH GPU estimates per concurrency: these are not used for the final GPU estimation
|
|
160
169
|
# they are only available for information purposes
|
|
161
|
-
gpu_estimates: GPUEstimates = GPUEstimates
|
|
170
|
+
gpu_estimates: GPUEstimates = Field(default_factory=GPUEstimates)
|
|
162
171
|
# Calc runner alerts
|
|
163
|
-
alerts: CalcAlerts = CalcAlerts
|
|
172
|
+
alerts: CalcAlerts = Field(default_factory=CalcAlerts)
|
|
164
173
|
# Sizing metrics
|
|
165
|
-
sizing_metrics: SizingMetrics = SizingMetrics
|
|
174
|
+
sizing_metrics: SizingMetrics = Field(default_factory=SizingMetrics)
|
|
166
175
|
|
|
167
176
|
|
|
168
177
|
class CalcRunnerOutput(BaseModel):
|
|
@@ -170,7 +179,10 @@ class CalcRunnerOutput(BaseModel):
|
|
|
170
179
|
Output of the calc runner.
|
|
171
180
|
"""
|
|
172
181
|
# GPU estimates based on the slope of the time vs concurrency, calculated online or offline
|
|
173
|
-
gpu_estimates: GPUEstimates
|
|
182
|
+
gpu_estimates: GPUEstimates = Field(default_factory=GPUEstimates)
|
|
183
|
+
|
|
184
|
+
# Linear fit results for analysis and debugging
|
|
185
|
+
fit_results: FitResults = Field(default_factory=FitResults)
|
|
174
186
|
|
|
175
187
|
# Per-concurrency data (GPU estimates, out-of-range runs, and sizing metrics)
|
|
176
188
|
calc_data: dict[int, CalcData] = {}
|
aiq/tool/server_tools.py
CHANGED
|
@@ -63,4 +63,4 @@ async def current_request_attributes(config: RequestAttributesTool, builder: Bui
|
|
|
63
63
|
f"Conversation Id: {conversation_id}")
|
|
64
64
|
|
|
65
65
|
yield FunctionInfo.from_fn(_get_request_attributes,
|
|
66
|
-
description="Returns the acquired user defined request
|
|
66
|
+
description="Returns the acquired user defined request attributes.")
|
aiq/utils/type_converter.py
CHANGED
|
@@ -35,9 +35,12 @@ class TypeConverter:
|
|
|
35
35
|
|
|
36
36
|
def __init__(self, converters: list[Callable[[typing.Any], typing.Any]], parent: "TypeConverter | None" = None):
|
|
37
37
|
"""
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
38
|
+
Parameters
|
|
39
|
+
----------
|
|
40
|
+
converters : list[Callable[[typing.Any], typing.Any]]
|
|
41
|
+
A list of single-argument converter callables annotated with their input param and return type.
|
|
42
|
+
parent : TypeConverter | None
|
|
43
|
+
An optional parent TypeConverter for fallback.
|
|
41
44
|
"""
|
|
42
45
|
# dict[to_type, dict[from_type, converter]]
|
|
43
46
|
self._converters: OrderedDict[type, OrderedDict[type, Callable]] = OrderedDict()
|
|
@@ -54,6 +57,16 @@ class TypeConverter:
|
|
|
54
57
|
"""
|
|
55
58
|
Registers a converter. Must have exactly one parameter
|
|
56
59
|
and an annotated return type.
|
|
60
|
+
|
|
61
|
+
Parameters
|
|
62
|
+
----------
|
|
63
|
+
converter : Callable
|
|
64
|
+
A converter function. Must have exactly one parameter and an annotated return type.
|
|
65
|
+
|
|
66
|
+
Raises
|
|
67
|
+
------
|
|
68
|
+
ValueError
|
|
69
|
+
If the converter does not have a return type or exactly one argument or the argument has no data type.
|
|
57
70
|
"""
|
|
58
71
|
sig = typing.get_type_hints(converter)
|
|
59
72
|
to_type = sig.pop("return", None)
|
|
@@ -70,7 +83,7 @@ class TypeConverter:
|
|
|
70
83
|
self._converters.setdefault(to_type, OrderedDict())[from_type] = converter
|
|
71
84
|
# to do(MDD): If needed, sort by specificity here.
|
|
72
85
|
|
|
73
|
-
def _convert(self, data, to_type: type[_T]) -> _T | None:
|
|
86
|
+
def _convert(self, data: typing.Any, to_type: type[_T]) -> _T | None:
|
|
74
87
|
"""
|
|
75
88
|
Attempts to convert `data` into `to_type`. Returns None if no path is found.
|
|
76
89
|
"""
|
|
@@ -95,10 +108,27 @@ class TypeConverter:
|
|
|
95
108
|
# 4) If we still haven't succeeded, return None
|
|
96
109
|
return None
|
|
97
110
|
|
|
98
|
-
def convert(self, data, to_type: type[_T]) -> _T:
|
|
111
|
+
def convert(self, data: typing.Any, to_type: type[_T]) -> _T:
|
|
99
112
|
"""
|
|
100
|
-
Converts or raises ValueError if no path is found.
|
|
113
|
+
Converts or raises ValueError if no conversion path is found.
|
|
101
114
|
We also give the parent a chance if self fails.
|
|
115
|
+
|
|
116
|
+
Parameters
|
|
117
|
+
----------
|
|
118
|
+
data : typing.Any
|
|
119
|
+
The value to convert.
|
|
120
|
+
to_type : type
|
|
121
|
+
The type to convert the value to.
|
|
122
|
+
|
|
123
|
+
Returns
|
|
124
|
+
-------
|
|
125
|
+
_T
|
|
126
|
+
The converted value.
|
|
127
|
+
|
|
128
|
+
Raises
|
|
129
|
+
------
|
|
130
|
+
ValueError
|
|
131
|
+
If the value cannot be converted to the specified type.
|
|
102
132
|
"""
|
|
103
133
|
result = self._convert(data, to_type)
|
|
104
134
|
if result is None and self._parent:
|
|
@@ -109,10 +139,22 @@ class TypeConverter:
|
|
|
109
139
|
return result
|
|
110
140
|
raise ValueError(f"Cannot convert type {type(data)} to {to_type}. No match found.")
|
|
111
141
|
|
|
112
|
-
def try_convert(self, data, to_type: type[_T]) -> _T:
|
|
142
|
+
def try_convert(self, data: typing.Any, to_type: type[_T]) -> _T | typing.Any:
|
|
113
143
|
"""
|
|
114
144
|
Converts with graceful error handling. If conversion fails, returns the original data
|
|
115
145
|
and continues processing.
|
|
146
|
+
|
|
147
|
+
Parameters
|
|
148
|
+
----------
|
|
149
|
+
data : typing.Any
|
|
150
|
+
The value to convert.
|
|
151
|
+
to_type : type
|
|
152
|
+
The type to convert the value to.
|
|
153
|
+
|
|
154
|
+
Returns
|
|
155
|
+
-------
|
|
156
|
+
_T | typing.Any
|
|
157
|
+
The converted value, or original value if conversion fails.
|
|
116
158
|
"""
|
|
117
159
|
try:
|
|
118
160
|
return self.convert(data, to_type)
|
|
@@ -124,7 +166,7 @@ class TypeConverter:
|
|
|
124
166
|
# -------------------------------------------------
|
|
125
167
|
# INTERNAL DIRECT CONVERSION (with parent fallback)
|
|
126
168
|
# -------------------------------------------------
|
|
127
|
-
def _try_direct_conversion(self, data, target_root_type: type) -> typing.Any | None:
|
|
169
|
+
def _try_direct_conversion(self, data: typing.Any, target_root_type: type) -> typing.Any | None:
|
|
128
170
|
"""
|
|
129
171
|
Tries direct conversion in *this* converter's registry.
|
|
130
172
|
If no match here, we forward to parent's direct conversion
|
|
@@ -149,7 +191,7 @@ class TypeConverter:
|
|
|
149
191
|
# -------------------------------------------------
|
|
150
192
|
# INTERNAL INDIRECT CONVERSION (with parent fallback)
|
|
151
193
|
# -------------------------------------------------
|
|
152
|
-
def _try_indirect_convert(self, data, to_type: type[_T]) -> _T | None:
|
|
194
|
+
def _try_indirect_convert(self, data: typing.Any, to_type: type[_T]) -> _T | None:
|
|
153
195
|
"""
|
|
154
196
|
Attempt indirect conversion (DFS) in *this* converter.
|
|
155
197
|
If no success, fallback to parent's indirect attempt.
|
|
@@ -234,7 +276,7 @@ class GlobalTypeConverter:
|
|
|
234
276
|
return GlobalTypeConverter._global_converter.convert(data, to_type)
|
|
235
277
|
|
|
236
278
|
@staticmethod
|
|
237
|
-
def try_convert(data, to_type: type[_T]) -> _T:
|
|
279
|
+
def try_convert(data: typing.Any, to_type: type[_T]) -> _T | typing.Any:
|
|
238
280
|
return GlobalTypeConverter._global_converter.try_convert(data, to_type)
|
|
239
281
|
|
|
240
282
|
|
|
@@ -44,8 +44,8 @@ aiq/builder/eval_builder.py,sha256=P2yqhPkjvkUi_ZwEsBNJi_qTSmHjraH0_9LXPGnLR7s,6
|
|
|
44
44
|
aiq/builder/evaluator.py,sha256=O6Gu0cUwQkrPxPX29Vf_-RopgijxPnhy7mhg_j-9A84,1162
|
|
45
45
|
aiq/builder/framework_enum.py,sha256=eYwHQifZ86dx-OTubVA3qhCLRqhB4ElMBYBGA0gYtic,885
|
|
46
46
|
aiq/builder/front_end.py,sha256=Xhvfi4VcDh5EoCtLr6AlLQfbRm8_TyugUc_IRfirN6Y,2225
|
|
47
|
-
aiq/builder/function.py,sha256=
|
|
48
|
-
aiq/builder/function_base.py,sha256=
|
|
47
|
+
aiq/builder/function.py,sha256=ox6wkmSk9ZlwJqAogxCe5rlWr7ZjwlyK2P_iGRJtOlY,13179
|
|
48
|
+
aiq/builder/function_base.py,sha256=xj956YMIySsppijaRkL0GBL0NICtiKqCt4clPFSYJKE,13280
|
|
49
49
|
aiq/builder/function_info.py,sha256=pGPIAL0tjVqLOJymIRB0boI9pzJGdXiPK3KiZvXQsqM,25266
|
|
50
50
|
aiq/builder/intermediate_step_manager.py,sha256=sENuXYQKOwnlEZ7f4lKZ63TTVa6FraT8s_ZM9-vwqa4,7614
|
|
51
51
|
aiq/builder/llm.py,sha256=DcoYCyschsRjkW_yGsa_Ci7ELSpk5KRbi9778Dm_B9c,951
|
|
@@ -64,7 +64,7 @@ aiq/cli/cli_utils/validation.py,sha256=GlKpoi3HfE5HELjmz5wk8ezGbb5iZeY0zmA3uxmCr
|
|
|
64
64
|
aiq/cli/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
65
65
|
aiq/cli/commands/evaluate.py,sha256=_pqAuvrNKBf0DvGpZFO28vAKBWp6izMpaLbAVnP57_4,4783
|
|
66
66
|
aiq/cli/commands/start.py,sha256=8mnJYSToQ1XuYuRBRSonuOsjSL1wqSm8nwj3tH9cnAM,10097
|
|
67
|
-
aiq/cli/commands/uninstall.py,sha256=
|
|
67
|
+
aiq/cli/commands/uninstall.py,sha256=D3PGizMGy-c3DLQ-HBcVYPOv0X8eyxFFw27jJW4kxig,3195
|
|
68
68
|
aiq/cli/commands/validate.py,sha256=YfYNRK7m5te_jkKp1klhGp4PdUVCuDyVG4LRW1YXZk0,1669
|
|
69
69
|
aiq/cli/commands/configure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
70
70
|
aiq/cli/commands/configure/configure.py,sha256=b_rnfThV161rDmuRO0Jbke-10oSn4RJj2q4IdTvL3ng,1107
|
|
@@ -85,7 +85,7 @@ aiq/cli/commands/registry/registry.py,sha256=oo-dPYoKqmGc9lqw3FN9ADg6mGMHwfM14hX
|
|
|
85
85
|
aiq/cli/commands/registry/remove.py,sha256=ysyfA38NKFZpktjKqDkVK54e9AakpAmBGym2OF6xSuo,3915
|
|
86
86
|
aiq/cli/commands/registry/search.py,sha256=1AhW5Q5GL8SsR6fKgG0rbsd2E0nodm-ehYZwqX2SGgA,5125
|
|
87
87
|
aiq/cli/commands/sizing/__init__.py,sha256=GUJrgGtpvyMUCjUBvR3faAdv-tZzbU9W-izgx9aMEQg,680
|
|
88
|
-
aiq/cli/commands/sizing/calc.py,sha256=
|
|
88
|
+
aiq/cli/commands/sizing/calc.py,sha256=vAcDknEG8tbmfJV7GcsdbPePOvb-Z8fIZUeW5Q5dl2w,11150
|
|
89
89
|
aiq/cli/commands/sizing/sizing.py,sha256=-Hr9mz_ScEMtBbn6ijvmmWVk0WybLeX0Ryi4qhDiYQU,902
|
|
90
90
|
aiq/cli/commands/workflow/__init__.py,sha256=GUJrgGtpvyMUCjUBvR3faAdv-tZzbU9W-izgx9aMEQg,680
|
|
91
91
|
aiq/cli/commands/workflow/workflow.py,sha256=gzd_UXIMGq_NBRZiS_WHh3Dimuw9aTdncREVh8KItJI,1342
|
|
@@ -261,9 +261,9 @@ aiq/observability/exporter/__init__.py,sha256=Xs1JQ16L9btwreh4pdGKwskffAw1YFO48j
|
|
|
261
261
|
aiq/observability/exporter/base_exporter.py,sha256=uVr2qssNwJPVJP0P9fD7tNVSdUmj4SWx4GVHqkA1_3s,16639
|
|
262
262
|
aiq/observability/exporter/exporter.py,sha256=QjEtbaTC7LTQpuzdcvRK7gUs6OIlHzQnhzD2pGmXyws,2391
|
|
263
263
|
aiq/observability/exporter/file_exporter.py,sha256=Z1DhUSpTu1SmN282pPRTU20QJZox49jZ75ARsUkgZAk,1496
|
|
264
|
-
aiq/observability/exporter/processing_exporter.py,sha256=
|
|
264
|
+
aiq/observability/exporter/processing_exporter.py,sha256=wLQ7luy5pZD_uVeAI_JDS92MJhyLDSmoOWeSNL_G3uI,14493
|
|
265
265
|
aiq/observability/exporter/raw_exporter.py,sha256=9MFukCkJE7qHBWf2V2rnsCdzL1HRYRarcudveTsvS8w,1862
|
|
266
|
-
aiq/observability/exporter/span_exporter.py,sha256=
|
|
266
|
+
aiq/observability/exporter/span_exporter.py,sha256=AxEAFeEN3oOs-5ljveDWWCYuJDhVeNXzqyIDS5Gx6Rc,12095
|
|
267
267
|
aiq/observability/mixin/__init__.py,sha256=Xs1JQ16L9btwreh4pdGKwskffAw1YFO48jKrU4ib_7c,685
|
|
268
268
|
aiq/observability/mixin/batch_config_mixin.py,sha256=DixQq-jRhBFJvpOX-gq7GvPmZCPOXQdacylyEuhZ6y0,1399
|
|
269
269
|
aiq/observability/mixin/collector_config_mixin.py,sha256=3iptkRH9N6JgcsPq7GyjjJVAoxjd-l42UKE7iSF4Hq8,1087
|
|
@@ -273,9 +273,10 @@ aiq/observability/mixin/resource_conflict_mixin.py,sha256=mcUp3Qinmhiepq3DyRvp9I
|
|
|
273
273
|
aiq/observability/mixin/serialize_mixin.py,sha256=DgRHJpXCz9qHFYzhlTTx8Dkj297EylCKK3ydGrH5zOw,2478
|
|
274
274
|
aiq/observability/mixin/type_introspection_mixin.py,sha256=VCb68SY_hitWrWLaK2UHQLkjn2jsgxSn9593T2N3zC0,6637
|
|
275
275
|
aiq/observability/processor/__init__.py,sha256=Xs1JQ16L9btwreh4pdGKwskffAw1YFO48jKrU4ib_7c,685
|
|
276
|
-
aiq/observability/processor/batching_processor.py,sha256=
|
|
276
|
+
aiq/observability/processor/batching_processor.py,sha256=dE5E_0oHwISXfCxGXpOJ5fEM-YKIfuPz5h9j9gpJoMQ,13857
|
|
277
|
+
aiq/observability/processor/callback_processor.py,sha256=CTa9FVcoGXaYBRfarF7Ig9jymgmKHNgWRIy-OXtDZ1k,1600
|
|
277
278
|
aiq/observability/processor/intermediate_step_serializer.py,sha256=UQgcHauq568TqVmF_FrInsE5Q_Piryrf_fDnuJbRtAc,1249
|
|
278
|
-
aiq/observability/processor/processor.py,sha256=
|
|
279
|
+
aiq/observability/processor/processor.py,sha256=kfYe1EiQZkOPSnqIwQ6Rjz44j-EpreRAe2uIspmHK9w,2593
|
|
279
280
|
aiq/observability/utils/__init__.py,sha256=Xs1JQ16L9btwreh4pdGKwskffAw1YFO48jKrU4ib_7c,685
|
|
280
281
|
aiq/observability/utils/dict_utils.py,sha256=DcNhZ0mgcJ-QQfsCl9QSGL-m_jTuHhr1N-v43ZCAMik,7371
|
|
281
282
|
aiq/observability/utils/time_utils.py,sha256=V8m-e3ldUgwv031B17y29yLXIowdlTH4QW8xDw9WKvk,1071
|
|
@@ -288,9 +289,9 @@ aiq/profiler/intermediate_property_adapter.py,sha256=XZ_A8f2S5M-EJSkErY6I750Y8HA
|
|
|
288
289
|
aiq/profiler/profile_runner.py,sha256=aNwzcgw9udNh_rgTjtPCScAfvn7ug63LVzW13AvhWRY,22363
|
|
289
290
|
aiq/profiler/utils.py,sha256=hNh_JfxXDrACIp4usXtlriTfVuYUkk3Pv-x74K34MQg,8180
|
|
290
291
|
aiq/profiler/calc/__init__.py,sha256=GUJrgGtpvyMUCjUBvR3faAdv-tZzbU9W-izgx9aMEQg,680
|
|
291
|
-
aiq/profiler/calc/calc_runner.py,sha256=
|
|
292
|
+
aiq/profiler/calc/calc_runner.py,sha256=r06RJi7bWGfI39lmG6qjS-l9ZkWm_r7AbzBRNYuVc9w,30681
|
|
292
293
|
aiq/profiler/calc/calculations.py,sha256=GUDceQOs52QMmHg0u6BVgq_cmFKjXLbuQdNyd7UVZ0A,12327
|
|
293
|
-
aiq/profiler/calc/data_models.py,sha256=
|
|
294
|
+
aiq/profiler/calc/data_models.py,sha256=vmBu89C1LsJyIRpEEzGi29_cFJDZJUJXOSXWtraVfd8,6172
|
|
294
295
|
aiq/profiler/calc/plot.py,sha256=GW1cK8U2w6hIzKp6ItciCmxHR0KeUCw7n5EvpcPsoio,12222
|
|
295
296
|
aiq/profiler/callbacks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
296
297
|
aiq/profiler/callbacks/agno_callback_handler.py,sha256=aDAUY6GDIUtly6KowXXKUqLc7NbE6khg1aXT1AritaA,14930
|
|
@@ -368,7 +369,7 @@ aiq/tool/document_search.py,sha256=w3D3r5ZBS2jYmVAZZ7lC7xCoi25bA1RePoFjjlV1Zog,6
|
|
|
368
369
|
aiq/tool/nvidia_rag.py,sha256=9mS3igONo1RywxXNj_ITh2-qD91x1R0f7uhOWMZQX3o,4178
|
|
369
370
|
aiq/tool/register.py,sha256=Lwl6l_eEzS8LAELxClmniNhhLluRVZFYXhsk2ocQhNg,1491
|
|
370
371
|
aiq/tool/retriever.py,sha256=DnuU4khpJkd4epDBGQsowDOqDBKFiLQrnyKXgU6IRW8,3724
|
|
371
|
-
aiq/tool/server_tools.py,sha256=
|
|
372
|
+
aiq/tool/server_tools.py,sha256=286hTIvX_8pEohmkqmgWTCLkTfMTjuR9v0zivcW17r4,3200
|
|
372
373
|
aiq/tool/code_execution/README.md,sha256=GQy3pPOVspFHQdclQCCweIAY2r8rVZ6bXyCY2FcEc6M,4090
|
|
373
374
|
aiq/tool/code_execution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
374
375
|
aiq/tool/code_execution/code_sandbox.py,sha256=ibe6BoYdWzmQWHdVNweVP-QW1WRGirfhvFr0iqm4fI8,10159
|
|
@@ -404,7 +405,7 @@ aiq/utils/metadata_utils.py,sha256=lGYvc8Gk0az4qZDGeRbVz4L7B_b-Gnjss8JT4goqL5I,2
|
|
|
404
405
|
aiq/utils/optional_imports.py,sha256=jQSVBc2fBSRw-2d6r8cEwvh5-di2EUUPakuuo9QbbwA,4039
|
|
405
406
|
aiq/utils/producer_consumer_queue.py,sha256=AcSYkAMBxLx06A5Xdy960PP3AJ7YaSPGJ7rbN_hJsjI,6599
|
|
406
407
|
aiq/utils/string_utils.py,sha256=71HuIzGx7rF8ocTmeoUBpnCi1Qf1yynYlNLLIKP4BVs,1415
|
|
407
|
-
aiq/utils/type_converter.py,sha256=
|
|
408
|
+
aiq/utils/type_converter.py,sha256=mu09lnTOrsYee1gQlc1zLoDL7rJhzN7VNhK6viIS5gA,10640
|
|
408
409
|
aiq/utils/type_utils.py,sha256=G-SYlk4BkJv4L225myUhwBY6-Z7wOgR9K49dVUtV8SA,14896
|
|
409
410
|
aiq/utils/url_utils.py,sha256=UzDP_xaS6brWTu7vAws0B4jZyrITIK9Si3U6pZBZqDE,1028
|
|
410
411
|
aiq/utils/data_models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -427,10 +428,10 @@ aiq/utils/reactive/base/observer_base.py,sha256=UAlyAY_ky4q2t0P81RVFo2Bs_R7z5Nde
|
|
|
427
428
|
aiq/utils/reactive/base/subject_base.py,sha256=Ed-AC6P7cT3qkW1EXjzbd5M9WpVoeN_9KCe3OM3FLU4,2521
|
|
428
429
|
aiq/utils/settings/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
429
430
|
aiq/utils/settings/global_settings.py,sha256=U9TCLdoZsKq5qOVGjREipGVv9e-FlStzqy5zv82_VYk,7454
|
|
430
|
-
aiqtoolkit-1.2.
|
|
431
|
-
aiqtoolkit-1.2.
|
|
432
|
-
aiqtoolkit-1.2.
|
|
433
|
-
aiqtoolkit-1.2.
|
|
434
|
-
aiqtoolkit-1.2.
|
|
435
|
-
aiqtoolkit-1.2.
|
|
436
|
-
aiqtoolkit-1.2.
|
|
431
|
+
aiqtoolkit-1.2.0a20250801.dist-info/licenses/LICENSE-3rd-party.txt,sha256=8o7aySJa9CBvFshPcsRdJbczzdNyDGJ8b0J67WRUQ2k,183936
|
|
432
|
+
aiqtoolkit-1.2.0a20250801.dist-info/licenses/LICENSE.md,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
|
433
|
+
aiqtoolkit-1.2.0a20250801.dist-info/METADATA,sha256=BrsA79HoQ5RbxW4dd9v8o5Ym7CqbsDV8ywi_GSbg9x8,21564
|
|
434
|
+
aiqtoolkit-1.2.0a20250801.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
435
|
+
aiqtoolkit-1.2.0a20250801.dist-info/entry_points.txt,sha256=iZR3yrf1liXfbcLqn5_pUkLhZyr1bUw_Qh1d2i7gsv4,625
|
|
436
|
+
aiqtoolkit-1.2.0a20250801.dist-info/top_level.txt,sha256=fo7AzYcNhZ_tRWrhGumtxwnxMew4xrT1iwouDy_f0Kc,4
|
|
437
|
+
aiqtoolkit-1.2.0a20250801.dist-info/RECORD,,
|
|
File without changes
|
{aiqtoolkit-1.2.0a20250731.dist-info → aiqtoolkit-1.2.0a20250801.dist-info}/entry_points.txt
RENAMED
|
File without changes
|
|
File without changes
|
{aiqtoolkit-1.2.0a20250731.dist-info → aiqtoolkit-1.2.0a20250801.dist-info}/licenses/LICENSE.md
RENAMED
|
File without changes
|
|
File without changes
|