nvidia-nat 1.2.0rc5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aiq/agent/__init__.py +0 -0
- aiq/agent/base.py +239 -0
- aiq/agent/dual_node.py +67 -0
- aiq/agent/react_agent/__init__.py +0 -0
- aiq/agent/react_agent/agent.py +355 -0
- aiq/agent/react_agent/output_parser.py +104 -0
- aiq/agent/react_agent/prompt.py +41 -0
- aiq/agent/react_agent/register.py +149 -0
- aiq/agent/reasoning_agent/__init__.py +0 -0
- aiq/agent/reasoning_agent/reasoning_agent.py +225 -0
- aiq/agent/register.py +23 -0
- aiq/agent/rewoo_agent/__init__.py +0 -0
- aiq/agent/rewoo_agent/agent.py +411 -0
- aiq/agent/rewoo_agent/prompt.py +108 -0
- aiq/agent/rewoo_agent/register.py +158 -0
- aiq/agent/tool_calling_agent/__init__.py +0 -0
- aiq/agent/tool_calling_agent/agent.py +119 -0
- aiq/agent/tool_calling_agent/register.py +106 -0
- aiq/authentication/__init__.py +14 -0
- aiq/authentication/api_key/__init__.py +14 -0
- aiq/authentication/api_key/api_key_auth_provider.py +96 -0
- aiq/authentication/api_key/api_key_auth_provider_config.py +124 -0
- aiq/authentication/api_key/register.py +26 -0
- aiq/authentication/exceptions/__init__.py +14 -0
- aiq/authentication/exceptions/api_key_exceptions.py +38 -0
- aiq/authentication/http_basic_auth/__init__.py +0 -0
- aiq/authentication/http_basic_auth/http_basic_auth_provider.py +81 -0
- aiq/authentication/http_basic_auth/register.py +30 -0
- aiq/authentication/interfaces.py +93 -0
- aiq/authentication/oauth2/__init__.py +14 -0
- aiq/authentication/oauth2/oauth2_auth_code_flow_provider.py +107 -0
- aiq/authentication/oauth2/oauth2_auth_code_flow_provider_config.py +39 -0
- aiq/authentication/oauth2/register.py +25 -0
- aiq/authentication/register.py +21 -0
- aiq/builder/__init__.py +0 -0
- aiq/builder/builder.py +285 -0
- aiq/builder/component_utils.py +316 -0
- aiq/builder/context.py +264 -0
- aiq/builder/embedder.py +24 -0
- aiq/builder/eval_builder.py +161 -0
- aiq/builder/evaluator.py +29 -0
- aiq/builder/framework_enum.py +24 -0
- aiq/builder/front_end.py +73 -0
- aiq/builder/function.py +344 -0
- aiq/builder/function_base.py +380 -0
- aiq/builder/function_info.py +627 -0
- aiq/builder/intermediate_step_manager.py +174 -0
- aiq/builder/llm.py +25 -0
- aiq/builder/retriever.py +25 -0
- aiq/builder/user_interaction_manager.py +74 -0
- aiq/builder/workflow.py +148 -0
- aiq/builder/workflow_builder.py +1117 -0
- aiq/cli/__init__.py +14 -0
- aiq/cli/cli_utils/__init__.py +0 -0
- aiq/cli/cli_utils/config_override.py +231 -0
- aiq/cli/cli_utils/validation.py +37 -0
- aiq/cli/commands/__init__.py +0 -0
- aiq/cli/commands/configure/__init__.py +0 -0
- aiq/cli/commands/configure/channel/__init__.py +0 -0
- aiq/cli/commands/configure/channel/add.py +28 -0
- aiq/cli/commands/configure/channel/channel.py +36 -0
- aiq/cli/commands/configure/channel/remove.py +30 -0
- aiq/cli/commands/configure/channel/update.py +30 -0
- aiq/cli/commands/configure/configure.py +33 -0
- aiq/cli/commands/evaluate.py +139 -0
- aiq/cli/commands/info/__init__.py +14 -0
- aiq/cli/commands/info/info.py +39 -0
- aiq/cli/commands/info/list_channels.py +32 -0
- aiq/cli/commands/info/list_components.py +129 -0
- aiq/cli/commands/info/list_mcp.py +213 -0
- aiq/cli/commands/registry/__init__.py +14 -0
- aiq/cli/commands/registry/publish.py +88 -0
- aiq/cli/commands/registry/pull.py +118 -0
- aiq/cli/commands/registry/registry.py +38 -0
- aiq/cli/commands/registry/remove.py +108 -0
- aiq/cli/commands/registry/search.py +155 -0
- aiq/cli/commands/sizing/__init__.py +14 -0
- aiq/cli/commands/sizing/calc.py +297 -0
- aiq/cli/commands/sizing/sizing.py +27 -0
- aiq/cli/commands/start.py +246 -0
- aiq/cli/commands/uninstall.py +81 -0
- aiq/cli/commands/validate.py +47 -0
- aiq/cli/commands/workflow/__init__.py +14 -0
- aiq/cli/commands/workflow/templates/__init__.py.j2 +0 -0
- aiq/cli/commands/workflow/templates/config.yml.j2 +16 -0
- aiq/cli/commands/workflow/templates/pyproject.toml.j2 +22 -0
- aiq/cli/commands/workflow/templates/register.py.j2 +5 -0
- aiq/cli/commands/workflow/templates/workflow.py.j2 +36 -0
- aiq/cli/commands/workflow/workflow.py +37 -0
- aiq/cli/commands/workflow/workflow_commands.py +313 -0
- aiq/cli/entrypoint.py +135 -0
- aiq/cli/main.py +44 -0
- aiq/cli/register_workflow.py +488 -0
- aiq/cli/type_registry.py +1000 -0
- aiq/data_models/__init__.py +14 -0
- aiq/data_models/api_server.py +694 -0
- aiq/data_models/authentication.py +231 -0
- aiq/data_models/common.py +171 -0
- aiq/data_models/component.py +54 -0
- aiq/data_models/component_ref.py +168 -0
- aiq/data_models/config.py +406 -0
- aiq/data_models/dataset_handler.py +123 -0
- aiq/data_models/discovery_metadata.py +335 -0
- aiq/data_models/embedder.py +27 -0
- aiq/data_models/evaluate.py +127 -0
- aiq/data_models/evaluator.py +26 -0
- aiq/data_models/front_end.py +26 -0
- aiq/data_models/function.py +30 -0
- aiq/data_models/function_dependencies.py +72 -0
- aiq/data_models/interactive.py +246 -0
- aiq/data_models/intermediate_step.py +302 -0
- aiq/data_models/invocation_node.py +38 -0
- aiq/data_models/llm.py +27 -0
- aiq/data_models/logging.py +26 -0
- aiq/data_models/memory.py +27 -0
- aiq/data_models/object_store.py +44 -0
- aiq/data_models/profiler.py +54 -0
- aiq/data_models/registry_handler.py +26 -0
- aiq/data_models/retriever.py +30 -0
- aiq/data_models/retry_mixin.py +35 -0
- aiq/data_models/span.py +187 -0
- aiq/data_models/step_adaptor.py +64 -0
- aiq/data_models/streaming.py +33 -0
- aiq/data_models/swe_bench_model.py +54 -0
- aiq/data_models/telemetry_exporter.py +26 -0
- aiq/data_models/ttc_strategy.py +30 -0
- aiq/embedder/__init__.py +0 -0
- aiq/embedder/langchain_client.py +41 -0
- aiq/embedder/nim_embedder.py +59 -0
- aiq/embedder/openai_embedder.py +43 -0
- aiq/embedder/register.py +24 -0
- aiq/eval/__init__.py +14 -0
- aiq/eval/config.py +60 -0
- aiq/eval/dataset_handler/__init__.py +0 -0
- aiq/eval/dataset_handler/dataset_downloader.py +106 -0
- aiq/eval/dataset_handler/dataset_filter.py +52 -0
- aiq/eval/dataset_handler/dataset_handler.py +254 -0
- aiq/eval/evaluate.py +506 -0
- aiq/eval/evaluator/__init__.py +14 -0
- aiq/eval/evaluator/base_evaluator.py +73 -0
- aiq/eval/evaluator/evaluator_model.py +45 -0
- aiq/eval/intermediate_step_adapter.py +99 -0
- aiq/eval/rag_evaluator/__init__.py +0 -0
- aiq/eval/rag_evaluator/evaluate.py +178 -0
- aiq/eval/rag_evaluator/register.py +143 -0
- aiq/eval/register.py +23 -0
- aiq/eval/remote_workflow.py +133 -0
- aiq/eval/runners/__init__.py +14 -0
- aiq/eval/runners/config.py +39 -0
- aiq/eval/runners/multi_eval_runner.py +54 -0
- aiq/eval/runtime_event_subscriber.py +52 -0
- aiq/eval/swe_bench_evaluator/__init__.py +0 -0
- aiq/eval/swe_bench_evaluator/evaluate.py +215 -0
- aiq/eval/swe_bench_evaluator/register.py +36 -0
- aiq/eval/trajectory_evaluator/__init__.py +0 -0
- aiq/eval/trajectory_evaluator/evaluate.py +75 -0
- aiq/eval/trajectory_evaluator/register.py +40 -0
- aiq/eval/tunable_rag_evaluator/__init__.py +0 -0
- aiq/eval/tunable_rag_evaluator/evaluate.py +245 -0
- aiq/eval/tunable_rag_evaluator/register.py +52 -0
- aiq/eval/usage_stats.py +41 -0
- aiq/eval/utils/__init__.py +0 -0
- aiq/eval/utils/output_uploader.py +140 -0
- aiq/eval/utils/tqdm_position_registry.py +40 -0
- aiq/eval/utils/weave_eval.py +184 -0
- aiq/experimental/__init__.py +0 -0
- aiq/experimental/decorators/__init__.py +0 -0
- aiq/experimental/decorators/experimental_warning_decorator.py +130 -0
- aiq/experimental/test_time_compute/__init__.py +0 -0
- aiq/experimental/test_time_compute/editing/__init__.py +0 -0
- aiq/experimental/test_time_compute/editing/iterative_plan_refinement_editor.py +147 -0
- aiq/experimental/test_time_compute/editing/llm_as_a_judge_editor.py +204 -0
- aiq/experimental/test_time_compute/editing/motivation_aware_summarization.py +107 -0
- aiq/experimental/test_time_compute/functions/__init__.py +0 -0
- aiq/experimental/test_time_compute/functions/execute_score_select_function.py +105 -0
- aiq/experimental/test_time_compute/functions/its_tool_orchestration_function.py +205 -0
- aiq/experimental/test_time_compute/functions/its_tool_wrapper_function.py +146 -0
- aiq/experimental/test_time_compute/functions/plan_select_execute_function.py +224 -0
- aiq/experimental/test_time_compute/models/__init__.py +0 -0
- aiq/experimental/test_time_compute/models/editor_config.py +132 -0
- aiq/experimental/test_time_compute/models/scoring_config.py +112 -0
- aiq/experimental/test_time_compute/models/search_config.py +120 -0
- aiq/experimental/test_time_compute/models/selection_config.py +154 -0
- aiq/experimental/test_time_compute/models/stage_enums.py +43 -0
- aiq/experimental/test_time_compute/models/strategy_base.py +66 -0
- aiq/experimental/test_time_compute/models/tool_use_config.py +41 -0
- aiq/experimental/test_time_compute/models/ttc_item.py +48 -0
- aiq/experimental/test_time_compute/register.py +36 -0
- aiq/experimental/test_time_compute/scoring/__init__.py +0 -0
- aiq/experimental/test_time_compute/scoring/llm_based_agent_scorer.py +168 -0
- aiq/experimental/test_time_compute/scoring/llm_based_plan_scorer.py +168 -0
- aiq/experimental/test_time_compute/scoring/motivation_aware_scorer.py +111 -0
- aiq/experimental/test_time_compute/search/__init__.py +0 -0
- aiq/experimental/test_time_compute/search/multi_llm_planner.py +128 -0
- aiq/experimental/test_time_compute/search/multi_query_retrieval_search.py +122 -0
- aiq/experimental/test_time_compute/search/single_shot_multi_plan_planner.py +128 -0
- aiq/experimental/test_time_compute/selection/__init__.py +0 -0
- aiq/experimental/test_time_compute/selection/best_of_n_selector.py +63 -0
- aiq/experimental/test_time_compute/selection/llm_based_agent_output_selector.py +131 -0
- aiq/experimental/test_time_compute/selection/llm_based_output_merging_selector.py +159 -0
- aiq/experimental/test_time_compute/selection/llm_based_plan_selector.py +128 -0
- aiq/experimental/test_time_compute/selection/threshold_selector.py +58 -0
- aiq/front_ends/__init__.py +14 -0
- aiq/front_ends/console/__init__.py +14 -0
- aiq/front_ends/console/authentication_flow_handler.py +233 -0
- aiq/front_ends/console/console_front_end_config.py +32 -0
- aiq/front_ends/console/console_front_end_plugin.py +96 -0
- aiq/front_ends/console/register.py +25 -0
- aiq/front_ends/cron/__init__.py +14 -0
- aiq/front_ends/fastapi/__init__.py +14 -0
- aiq/front_ends/fastapi/auth_flow_handlers/__init__.py +0 -0
- aiq/front_ends/fastapi/auth_flow_handlers/http_flow_handler.py +27 -0
- aiq/front_ends/fastapi/auth_flow_handlers/websocket_flow_handler.py +107 -0
- aiq/front_ends/fastapi/fastapi_front_end_config.py +234 -0
- aiq/front_ends/fastapi/fastapi_front_end_controller.py +68 -0
- aiq/front_ends/fastapi/fastapi_front_end_plugin.py +116 -0
- aiq/front_ends/fastapi/fastapi_front_end_plugin_worker.py +1092 -0
- aiq/front_ends/fastapi/html_snippets/__init__.py +14 -0
- aiq/front_ends/fastapi/html_snippets/auth_code_grant_success.py +35 -0
- aiq/front_ends/fastapi/intermediate_steps_subscriber.py +80 -0
- aiq/front_ends/fastapi/job_store.py +183 -0
- aiq/front_ends/fastapi/main.py +72 -0
- aiq/front_ends/fastapi/message_handler.py +298 -0
- aiq/front_ends/fastapi/message_validator.py +345 -0
- aiq/front_ends/fastapi/register.py +25 -0
- aiq/front_ends/fastapi/response_helpers.py +195 -0
- aiq/front_ends/fastapi/step_adaptor.py +321 -0
- aiq/front_ends/mcp/__init__.py +14 -0
- aiq/front_ends/mcp/mcp_front_end_config.py +32 -0
- aiq/front_ends/mcp/mcp_front_end_plugin.py +93 -0
- aiq/front_ends/mcp/register.py +27 -0
- aiq/front_ends/mcp/tool_converter.py +242 -0
- aiq/front_ends/register.py +22 -0
- aiq/front_ends/simple_base/__init__.py +14 -0
- aiq/front_ends/simple_base/simple_front_end_plugin_base.py +54 -0
- aiq/llm/__init__.py +0 -0
- aiq/llm/aws_bedrock_llm.py +57 -0
- aiq/llm/nim_llm.py +46 -0
- aiq/llm/openai_llm.py +46 -0
- aiq/llm/register.py +23 -0
- aiq/llm/utils/__init__.py +14 -0
- aiq/llm/utils/env_config_value.py +94 -0
- aiq/llm/utils/error.py +17 -0
- aiq/memory/__init__.py +20 -0
- aiq/memory/interfaces.py +183 -0
- aiq/memory/models.py +112 -0
- aiq/meta/module_to_distro.json +3 -0
- aiq/meta/pypi.md +58 -0
- aiq/object_store/__init__.py +20 -0
- aiq/object_store/in_memory_object_store.py +76 -0
- aiq/object_store/interfaces.py +84 -0
- aiq/object_store/models.py +36 -0
- aiq/object_store/register.py +20 -0
- aiq/observability/__init__.py +14 -0
- aiq/observability/exporter/__init__.py +14 -0
- aiq/observability/exporter/base_exporter.py +449 -0
- aiq/observability/exporter/exporter.py +78 -0
- aiq/observability/exporter/file_exporter.py +33 -0
- aiq/observability/exporter/processing_exporter.py +322 -0
- aiq/observability/exporter/raw_exporter.py +52 -0
- aiq/observability/exporter/span_exporter.py +265 -0
- aiq/observability/exporter_manager.py +335 -0
- aiq/observability/mixin/__init__.py +14 -0
- aiq/observability/mixin/batch_config_mixin.py +26 -0
- aiq/observability/mixin/collector_config_mixin.py +23 -0
- aiq/observability/mixin/file_mixin.py +288 -0
- aiq/observability/mixin/file_mode.py +23 -0
- aiq/observability/mixin/resource_conflict_mixin.py +134 -0
- aiq/observability/mixin/serialize_mixin.py +61 -0
- aiq/observability/mixin/type_introspection_mixin.py +183 -0
- aiq/observability/processor/__init__.py +14 -0
- aiq/observability/processor/batching_processor.py +310 -0
- aiq/observability/processor/callback_processor.py +42 -0
- aiq/observability/processor/intermediate_step_serializer.py +28 -0
- aiq/observability/processor/processor.py +71 -0
- aiq/observability/register.py +96 -0
- aiq/observability/utils/__init__.py +14 -0
- aiq/observability/utils/dict_utils.py +236 -0
- aiq/observability/utils/time_utils.py +31 -0
- aiq/plugins/.namespace +1 -0
- aiq/profiler/__init__.py +0 -0
- aiq/profiler/calc/__init__.py +14 -0
- aiq/profiler/calc/calc_runner.py +627 -0
- aiq/profiler/calc/calculations.py +288 -0
- aiq/profiler/calc/data_models.py +188 -0
- aiq/profiler/calc/plot.py +345 -0
- aiq/profiler/callbacks/__init__.py +0 -0
- aiq/profiler/callbacks/agno_callback_handler.py +295 -0
- aiq/profiler/callbacks/base_callback_class.py +20 -0
- aiq/profiler/callbacks/langchain_callback_handler.py +290 -0
- aiq/profiler/callbacks/llama_index_callback_handler.py +205 -0
- aiq/profiler/callbacks/semantic_kernel_callback_handler.py +238 -0
- aiq/profiler/callbacks/token_usage_base_model.py +27 -0
- aiq/profiler/data_frame_row.py +51 -0
- aiq/profiler/data_models.py +24 -0
- aiq/profiler/decorators/__init__.py +0 -0
- aiq/profiler/decorators/framework_wrapper.py +131 -0
- aiq/profiler/decorators/function_tracking.py +254 -0
- aiq/profiler/forecasting/__init__.py +0 -0
- aiq/profiler/forecasting/config.py +18 -0
- aiq/profiler/forecasting/model_trainer.py +75 -0
- aiq/profiler/forecasting/models/__init__.py +22 -0
- aiq/profiler/forecasting/models/forecasting_base_model.py +40 -0
- aiq/profiler/forecasting/models/linear_model.py +196 -0
- aiq/profiler/forecasting/models/random_forest_regressor.py +268 -0
- aiq/profiler/inference_metrics_model.py +28 -0
- aiq/profiler/inference_optimization/__init__.py +0 -0
- aiq/profiler/inference_optimization/bottleneck_analysis/__init__.py +0 -0
- aiq/profiler/inference_optimization/bottleneck_analysis/nested_stack_analysis.py +460 -0
- aiq/profiler/inference_optimization/bottleneck_analysis/simple_stack_analysis.py +258 -0
- aiq/profiler/inference_optimization/data_models.py +386 -0
- aiq/profiler/inference_optimization/experimental/__init__.py +0 -0
- aiq/profiler/inference_optimization/experimental/concurrency_spike_analysis.py +468 -0
- aiq/profiler/inference_optimization/experimental/prefix_span_analysis.py +405 -0
- aiq/profiler/inference_optimization/llm_metrics.py +212 -0
- aiq/profiler/inference_optimization/prompt_caching.py +163 -0
- aiq/profiler/inference_optimization/token_uniqueness.py +107 -0
- aiq/profiler/inference_optimization/workflow_runtimes.py +72 -0
- aiq/profiler/intermediate_property_adapter.py +102 -0
- aiq/profiler/profile_runner.py +473 -0
- aiq/profiler/utils.py +184 -0
- aiq/registry_handlers/__init__.py +0 -0
- aiq/registry_handlers/local/__init__.py +0 -0
- aiq/registry_handlers/local/local_handler.py +176 -0
- aiq/registry_handlers/local/register_local.py +37 -0
- aiq/registry_handlers/metadata_factory.py +60 -0
- aiq/registry_handlers/package_utils.py +567 -0
- aiq/registry_handlers/pypi/__init__.py +0 -0
- aiq/registry_handlers/pypi/pypi_handler.py +251 -0
- aiq/registry_handlers/pypi/register_pypi.py +40 -0
- aiq/registry_handlers/register.py +21 -0
- aiq/registry_handlers/registry_handler_base.py +157 -0
- aiq/registry_handlers/rest/__init__.py +0 -0
- aiq/registry_handlers/rest/register_rest.py +56 -0
- aiq/registry_handlers/rest/rest_handler.py +237 -0
- aiq/registry_handlers/schemas/__init__.py +0 -0
- aiq/registry_handlers/schemas/headers.py +42 -0
- aiq/registry_handlers/schemas/package.py +68 -0
- aiq/registry_handlers/schemas/publish.py +63 -0
- aiq/registry_handlers/schemas/pull.py +82 -0
- aiq/registry_handlers/schemas/remove.py +36 -0
- aiq/registry_handlers/schemas/search.py +91 -0
- aiq/registry_handlers/schemas/status.py +47 -0
- aiq/retriever/__init__.py +0 -0
- aiq/retriever/interface.py +37 -0
- aiq/retriever/milvus/__init__.py +14 -0
- aiq/retriever/milvus/register.py +81 -0
- aiq/retriever/milvus/retriever.py +228 -0
- aiq/retriever/models.py +74 -0
- aiq/retriever/nemo_retriever/__init__.py +14 -0
- aiq/retriever/nemo_retriever/register.py +60 -0
- aiq/retriever/nemo_retriever/retriever.py +190 -0
- aiq/retriever/register.py +22 -0
- aiq/runtime/__init__.py +14 -0
- aiq/runtime/loader.py +215 -0
- aiq/runtime/runner.py +190 -0
- aiq/runtime/session.py +158 -0
- aiq/runtime/user_metadata.py +130 -0
- aiq/settings/__init__.py +0 -0
- aiq/settings/global_settings.py +318 -0
- aiq/test/.namespace +1 -0
- aiq/tool/__init__.py +0 -0
- aiq/tool/chat_completion.py +74 -0
- aiq/tool/code_execution/README.md +151 -0
- aiq/tool/code_execution/__init__.py +0 -0
- aiq/tool/code_execution/code_sandbox.py +267 -0
- aiq/tool/code_execution/local_sandbox/.gitignore +1 -0
- aiq/tool/code_execution/local_sandbox/Dockerfile.sandbox +60 -0
- aiq/tool/code_execution/local_sandbox/__init__.py +13 -0
- aiq/tool/code_execution/local_sandbox/local_sandbox_server.py +198 -0
- aiq/tool/code_execution/local_sandbox/sandbox.requirements.txt +6 -0
- aiq/tool/code_execution/local_sandbox/start_local_sandbox.sh +50 -0
- aiq/tool/code_execution/register.py +74 -0
- aiq/tool/code_execution/test_code_execution_sandbox.py +414 -0
- aiq/tool/code_execution/utils.py +100 -0
- aiq/tool/datetime_tools.py +42 -0
- aiq/tool/document_search.py +141 -0
- aiq/tool/github_tools/__init__.py +0 -0
- aiq/tool/github_tools/create_github_commit.py +133 -0
- aiq/tool/github_tools/create_github_issue.py +87 -0
- aiq/tool/github_tools/create_github_pr.py +106 -0
- aiq/tool/github_tools/get_github_file.py +106 -0
- aiq/tool/github_tools/get_github_issue.py +166 -0
- aiq/tool/github_tools/get_github_pr.py +256 -0
- aiq/tool/github_tools/update_github_issue.py +100 -0
- aiq/tool/mcp/__init__.py +14 -0
- aiq/tool/mcp/exceptions.py +142 -0
- aiq/tool/mcp/mcp_client.py +255 -0
- aiq/tool/mcp/mcp_tool.py +96 -0
- aiq/tool/memory_tools/__init__.py +0 -0
- aiq/tool/memory_tools/add_memory_tool.py +79 -0
- aiq/tool/memory_tools/delete_memory_tool.py +67 -0
- aiq/tool/memory_tools/get_memory_tool.py +72 -0
- aiq/tool/nvidia_rag.py +95 -0
- aiq/tool/register.py +38 -0
- aiq/tool/retriever.py +89 -0
- aiq/tool/server_tools.py +66 -0
- aiq/utils/__init__.py +0 -0
- aiq/utils/data_models/__init__.py +0 -0
- aiq/utils/data_models/schema_validator.py +58 -0
- aiq/utils/debugging_utils.py +43 -0
- aiq/utils/dump_distro_mapping.py +32 -0
- aiq/utils/exception_handlers/__init__.py +0 -0
- aiq/utils/exception_handlers/automatic_retries.py +289 -0
- aiq/utils/exception_handlers/mcp.py +211 -0
- aiq/utils/exception_handlers/schemas.py +114 -0
- aiq/utils/io/__init__.py +0 -0
- aiq/utils/io/model_processing.py +28 -0
- aiq/utils/io/yaml_tools.py +119 -0
- aiq/utils/log_utils.py +37 -0
- aiq/utils/metadata_utils.py +74 -0
- aiq/utils/optional_imports.py +142 -0
- aiq/utils/producer_consumer_queue.py +178 -0
- aiq/utils/reactive/__init__.py +0 -0
- aiq/utils/reactive/base/__init__.py +0 -0
- aiq/utils/reactive/base/observable_base.py +65 -0
- aiq/utils/reactive/base/observer_base.py +55 -0
- aiq/utils/reactive/base/subject_base.py +79 -0
- aiq/utils/reactive/observable.py +59 -0
- aiq/utils/reactive/observer.py +76 -0
- aiq/utils/reactive/subject.py +131 -0
- aiq/utils/reactive/subscription.py +49 -0
- aiq/utils/settings/__init__.py +0 -0
- aiq/utils/settings/global_settings.py +197 -0
- aiq/utils/string_utils.py +38 -0
- aiq/utils/type_converter.py +290 -0
- aiq/utils/type_utils.py +484 -0
- aiq/utils/url_utils.py +27 -0
- nvidia_nat-1.2.0rc5.dist-info/METADATA +363 -0
- nvidia_nat-1.2.0rc5.dist-info/RECORD +435 -0
- nvidia_nat-1.2.0rc5.dist-info/WHEEL +5 -0
- nvidia_nat-1.2.0rc5.dist-info/entry_points.txt +20 -0
- nvidia_nat-1.2.0rc5.dist-info/licenses/LICENSE-3rd-party.txt +3686 -0
- nvidia_nat-1.2.0rc5.dist-info/licenses/LICENSE.md +201 -0
- nvidia_nat-1.2.0rc5.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
from __future__ import annotations
|
|
17
|
+
|
|
18
|
+
import copy
|
|
19
|
+
import logging
|
|
20
|
+
import threading
|
|
21
|
+
import time
|
|
22
|
+
from typing import Any
|
|
23
|
+
from uuid import UUID
|
|
24
|
+
from uuid import uuid4
|
|
25
|
+
|
|
26
|
+
from langchain_core.callbacks import AsyncCallbackHandler
|
|
27
|
+
from langchain_core.messages import AIMessage
|
|
28
|
+
from langchain_core.messages import BaseMessage
|
|
29
|
+
from langchain_core.outputs import ChatGeneration
|
|
30
|
+
from langchain_core.outputs import LLMResult
|
|
31
|
+
|
|
32
|
+
from aiq.builder.context import AIQContext
|
|
33
|
+
from aiq.builder.framework_enum import LLMFrameworkEnum
|
|
34
|
+
from aiq.data_models.intermediate_step import IntermediateStepPayload
|
|
35
|
+
from aiq.data_models.intermediate_step import IntermediateStepType
|
|
36
|
+
from aiq.data_models.intermediate_step import StreamEventData
|
|
37
|
+
from aiq.data_models.intermediate_step import ToolSchema
|
|
38
|
+
from aiq.data_models.intermediate_step import TraceMetadata
|
|
39
|
+
from aiq.data_models.intermediate_step import UsageInfo
|
|
40
|
+
from aiq.profiler.callbacks.base_callback_class import BaseProfilerCallback
|
|
41
|
+
from aiq.profiler.callbacks.token_usage_base_model import TokenUsageBaseModel
|
|
42
|
+
|
|
43
|
+
logger = logging.getLogger(__name__)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _extract_tools_schema(invocation_params: dict) -> list:
|
|
47
|
+
|
|
48
|
+
tools_schema = []
|
|
49
|
+
if invocation_params is not None:
|
|
50
|
+
for tool in invocation_params.get("tools", []):
|
|
51
|
+
tools_schema.append(ToolSchema(**tool))
|
|
52
|
+
|
|
53
|
+
return tools_schema
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class LangchainProfilerHandler(AsyncCallbackHandler, BaseProfilerCallback): # pylint: disable=R0901
|
|
57
|
+
"""Callback Handler that tracks NIM info."""
|
|
58
|
+
|
|
59
|
+
total_tokens: int = 0
|
|
60
|
+
prompt_tokens: int = 0
|
|
61
|
+
completion_tokens: int = 0
|
|
62
|
+
successful_requests: int = 0
|
|
63
|
+
raise_error = True # Override to raise error and run inline
|
|
64
|
+
run_inline = True
|
|
65
|
+
|
|
66
|
+
def __init__(self) -> None:
|
|
67
|
+
super().__init__()
|
|
68
|
+
self._lock = threading.Lock()
|
|
69
|
+
self.last_call_ts = time.time()
|
|
70
|
+
|
|
71
|
+
self.step_manager = AIQContext.get().intermediate_step_manager
|
|
72
|
+
self._state = IntermediateStepType.LLM_END
|
|
73
|
+
|
|
74
|
+
self._run_id_to_model_name = {}
|
|
75
|
+
self._run_id_to_llm_input = {}
|
|
76
|
+
self._run_id_to_tool_input = {}
|
|
77
|
+
self._run_id_to_start_time = {}
|
|
78
|
+
|
|
79
|
+
def __repr__(self) -> str:
|
|
80
|
+
return (f"Tokens Used: {self.total_tokens}\n"
|
|
81
|
+
f"\tPrompt Tokens: {self.prompt_tokens}\n"
|
|
82
|
+
f"\tCompletion Tokens: {self.completion_tokens}\n"
|
|
83
|
+
f"Successful Requests: {self.successful_requests}\n")
|
|
84
|
+
|
|
85
|
+
@property
|
|
86
|
+
def always_verbose(self) -> bool:
|
|
87
|
+
"""Whether to call verbose callbacks even if verbose is False."""
|
|
88
|
+
return True
|
|
89
|
+
|
|
90
|
+
def _extract_token_base_model(self, usage_metadata: dict[str, Any]) -> TokenUsageBaseModel:
|
|
91
|
+
if usage_metadata:
|
|
92
|
+
prompt_tokens = usage_metadata.get("input_tokens", 0)
|
|
93
|
+
completion_tokens = usage_metadata.get("output_tokens", 0)
|
|
94
|
+
total_tokens = usage_metadata.get("total_tokens", 0)
|
|
95
|
+
|
|
96
|
+
return TokenUsageBaseModel(
|
|
97
|
+
prompt_tokens=prompt_tokens,
|
|
98
|
+
completion_tokens=completion_tokens,
|
|
99
|
+
total_tokens=total_tokens,
|
|
100
|
+
)
|
|
101
|
+
return TokenUsageBaseModel()
|
|
102
|
+
|
|
103
|
+
async def on_llm_start(self, serialized: dict[str, Any], prompts: list[str], **kwargs: Any) -> None:
|
|
104
|
+
|
|
105
|
+
model_name = ""
|
|
106
|
+
try:
|
|
107
|
+
model_name = kwargs.get("metadata")["ls_model_name"]
|
|
108
|
+
except Exception as e:
|
|
109
|
+
logger.exception("Error getting model name: %s", e, exc_info=True)
|
|
110
|
+
|
|
111
|
+
run_id = str(kwargs.get("run_id", str(uuid4())))
|
|
112
|
+
self._run_id_to_model_name[run_id] = model_name
|
|
113
|
+
|
|
114
|
+
stats = IntermediateStepPayload(event_type=IntermediateStepType.LLM_START,
|
|
115
|
+
framework=LLMFrameworkEnum.LANGCHAIN,
|
|
116
|
+
name=model_name,
|
|
117
|
+
UUID=run_id,
|
|
118
|
+
data=StreamEventData(input=prompts[-1]),
|
|
119
|
+
metadata=TraceMetadata(chat_inputs=copy.deepcopy(prompts)),
|
|
120
|
+
usage_info=UsageInfo(token_usage=TokenUsageBaseModel(),
|
|
121
|
+
num_llm_calls=1,
|
|
122
|
+
seconds_between_calls=int(time.time() -
|
|
123
|
+
self.last_call_ts)))
|
|
124
|
+
|
|
125
|
+
self.step_manager.push_intermediate_step(stats)
|
|
126
|
+
self._run_id_to_llm_input[run_id] = prompts[-1]
|
|
127
|
+
self._state = IntermediateStepType.LLM_START
|
|
128
|
+
self.last_call_ts = time.time()
|
|
129
|
+
self._run_id_to_start_time[run_id] = time.time()
|
|
130
|
+
|
|
131
|
+
async def on_chat_model_start(
|
|
132
|
+
self,
|
|
133
|
+
serialized: dict[str, Any],
|
|
134
|
+
messages: list[list[BaseMessage]],
|
|
135
|
+
*,
|
|
136
|
+
run_id: UUID,
|
|
137
|
+
parent_run_id: UUID | None = None,
|
|
138
|
+
tags: list[str] | None = None,
|
|
139
|
+
metadata: dict[str, Any] | None = None,
|
|
140
|
+
**kwargs: Any,
|
|
141
|
+
) -> Any:
|
|
142
|
+
|
|
143
|
+
model_name = ""
|
|
144
|
+
try:
|
|
145
|
+
model_name = metadata["ls_model_name"] if metadata else kwargs.get("metadata")["ls_model_name"]
|
|
146
|
+
except Exception as e:
|
|
147
|
+
logger.exception("Error getting model name: %s", e, exc_info=True)
|
|
148
|
+
|
|
149
|
+
run_id = str(run_id)
|
|
150
|
+
self._run_id_to_model_name[run_id] = model_name
|
|
151
|
+
|
|
152
|
+
stats = IntermediateStepPayload(
|
|
153
|
+
event_type=IntermediateStepType.LLM_START,
|
|
154
|
+
framework=LLMFrameworkEnum.LANGCHAIN,
|
|
155
|
+
name=model_name,
|
|
156
|
+
UUID=run_id,
|
|
157
|
+
data=StreamEventData(input=copy.deepcopy(messages[0])),
|
|
158
|
+
metadata=TraceMetadata(chat_inputs=copy.deepcopy(messages[0]),
|
|
159
|
+
tools_schema=_extract_tools_schema(kwargs.get("invocation_params", {}))),
|
|
160
|
+
usage_info=UsageInfo(token_usage=TokenUsageBaseModel(),
|
|
161
|
+
num_llm_calls=1,
|
|
162
|
+
seconds_between_calls=int(time.time() - self.last_call_ts)))
|
|
163
|
+
|
|
164
|
+
self.step_manager.push_intermediate_step(stats)
|
|
165
|
+
self._run_id_to_llm_input[run_id] = messages[0][-1].content
|
|
166
|
+
self._state = IntermediateStepType.LLM_START
|
|
167
|
+
self.last_call_ts = time.time()
|
|
168
|
+
self._run_id_to_start_time[run_id] = time.time()
|
|
169
|
+
|
|
170
|
+
async def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
|
|
171
|
+
"""Collect stats for just the token"""
|
|
172
|
+
model_name = ""
|
|
173
|
+
try:
|
|
174
|
+
model_name = self._run_id_to_model_name.get(str(kwargs.get("run_id", "")), "")
|
|
175
|
+
except Exception as e:
|
|
176
|
+
logger.exception("Error getting model name: %s", e, exc_info=True)
|
|
177
|
+
|
|
178
|
+
usage_metadata = {}
|
|
179
|
+
try:
|
|
180
|
+
usage_metadata = kwargs.get("chunk").message.usage_metadata if kwargs.get("chunk") else {}
|
|
181
|
+
except Exception as e:
|
|
182
|
+
logger.exception("Error getting usage metadata: %s", e, exc_info=True)
|
|
183
|
+
|
|
184
|
+
stats = IntermediateStepPayload(
|
|
185
|
+
event_type=IntermediateStepType.LLM_NEW_TOKEN,
|
|
186
|
+
framework=LLMFrameworkEnum.LANGCHAIN,
|
|
187
|
+
name=model_name,
|
|
188
|
+
UUID=str(kwargs.get("run_id", str(uuid4()))),
|
|
189
|
+
data=StreamEventData(input=self._run_id_to_llm_input.get(str(kwargs.get("run_id", "")), ""), chunk=token),
|
|
190
|
+
usage_info=UsageInfo(token_usage=self._extract_token_base_model(usage_metadata),
|
|
191
|
+
num_llm_calls=1,
|
|
192
|
+
seconds_between_calls=int(time.time() - self.last_call_ts)),
|
|
193
|
+
metadata=TraceMetadata(chat_responses=[kwargs.get("chunk")] if kwargs.get("chunk") else []))
|
|
194
|
+
|
|
195
|
+
self.step_manager.push_intermediate_step(stats)
|
|
196
|
+
|
|
197
|
+
async def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
|
|
198
|
+
"""Collect token usage."""
|
|
199
|
+
|
|
200
|
+
usage_metadata = {}
|
|
201
|
+
|
|
202
|
+
model_name = ""
|
|
203
|
+
try:
|
|
204
|
+
model_name = response.llm_output["model_name"]
|
|
205
|
+
except Exception as e:
|
|
206
|
+
try:
|
|
207
|
+
model_name = self._run_id_to_model_name.get(str(kwargs.get("run_id", "")), "")
|
|
208
|
+
except Exception as e_inner:
|
|
209
|
+
logger.exception("Error getting model name: %s from outer error %s", e_inner, e, exc_info=True)
|
|
210
|
+
|
|
211
|
+
try:
|
|
212
|
+
generation = response.generations[0][0]
|
|
213
|
+
except IndexError:
|
|
214
|
+
generation = None
|
|
215
|
+
|
|
216
|
+
if isinstance(generation, ChatGeneration):
|
|
217
|
+
try:
|
|
218
|
+
message = generation.message
|
|
219
|
+
if isinstance(message, AIMessage):
|
|
220
|
+
usage_metadata = message.usage_metadata
|
|
221
|
+
else:
|
|
222
|
+
usage_metadata = {}
|
|
223
|
+
except AttributeError:
|
|
224
|
+
usage_metadata = {}
|
|
225
|
+
|
|
226
|
+
llm_text_output = generation.message.content if generation else ""
|
|
227
|
+
|
|
228
|
+
# update shared state behind lock
|
|
229
|
+
with self._lock:
|
|
230
|
+
usage_stat = IntermediateStepPayload(
|
|
231
|
+
span_event_timestamp=self._run_id_to_start_time.get(str(kwargs.get("run_id", "")), time.time()),
|
|
232
|
+
event_type=IntermediateStepType.LLM_END,
|
|
233
|
+
framework=LLMFrameworkEnum.LANGCHAIN,
|
|
234
|
+
name=model_name,
|
|
235
|
+
UUID=str(kwargs.get("run_id", str(uuid4()))),
|
|
236
|
+
data=StreamEventData(input=self._run_id_to_llm_input.get(str(kwargs.get("run_id", "")), ""),
|
|
237
|
+
output=llm_text_output),
|
|
238
|
+
usage_info=UsageInfo(token_usage=self._extract_token_base_model(usage_metadata)),
|
|
239
|
+
metadata=TraceMetadata(chat_responses=[generation] if generation else []))
|
|
240
|
+
|
|
241
|
+
self.step_manager.push_intermediate_step(usage_stat)
|
|
242
|
+
|
|
243
|
+
self._state = IntermediateStepType.LLM_END
|
|
244
|
+
|
|
245
|
+
async def on_tool_start(
|
|
246
|
+
self,
|
|
247
|
+
serialized: dict[str, Any],
|
|
248
|
+
input_str: str,
|
|
249
|
+
*,
|
|
250
|
+
run_id: UUID,
|
|
251
|
+
parent_run_id: UUID | None = None,
|
|
252
|
+
tags: list[str] | None = None,
|
|
253
|
+
metadata: dict[str, Any] | None = None,
|
|
254
|
+
inputs: dict[str, Any] | None = None,
|
|
255
|
+
**kwargs: Any,
|
|
256
|
+
) -> Any:
|
|
257
|
+
|
|
258
|
+
stats = IntermediateStepPayload(event_type=IntermediateStepType.TOOL_START,
|
|
259
|
+
framework=LLMFrameworkEnum.LANGCHAIN,
|
|
260
|
+
name=serialized.get("name", ""),
|
|
261
|
+
UUID=str(run_id),
|
|
262
|
+
data=StreamEventData(input=input_str),
|
|
263
|
+
metadata=TraceMetadata(tool_inputs=copy.deepcopy(inputs),
|
|
264
|
+
tool_info=copy.deepcopy(serialized)),
|
|
265
|
+
usage_info=UsageInfo(token_usage=TokenUsageBaseModel()))
|
|
266
|
+
|
|
267
|
+
self.step_manager.push_intermediate_step(stats)
|
|
268
|
+
self._run_id_to_tool_input[str(run_id)] = input_str
|
|
269
|
+
self._run_id_to_start_time[str(run_id)] = time.time()
|
|
270
|
+
|
|
271
|
+
async def on_tool_end(
|
|
272
|
+
self,
|
|
273
|
+
output: Any,
|
|
274
|
+
*,
|
|
275
|
+
run_id: UUID,
|
|
276
|
+
parent_run_id: UUID | None = None,
|
|
277
|
+
**kwargs: Any,
|
|
278
|
+
) -> Any:
|
|
279
|
+
|
|
280
|
+
stats = IntermediateStepPayload(event_type=IntermediateStepType.TOOL_END,
|
|
281
|
+
span_event_timestamp=self._run_id_to_start_time.get(str(run_id), time.time()),
|
|
282
|
+
framework=LLMFrameworkEnum.LANGCHAIN,
|
|
283
|
+
name=kwargs.get("name", ""),
|
|
284
|
+
UUID=str(run_id),
|
|
285
|
+
metadata=TraceMetadata(tool_outputs=output),
|
|
286
|
+
usage_info=UsageInfo(token_usage=TokenUsageBaseModel()),
|
|
287
|
+
data=StreamEventData(input=self._run_id_to_tool_input.get(str(run_id), ""),
|
|
288
|
+
output=output))
|
|
289
|
+
|
|
290
|
+
self.step_manager.push_intermediate_step(stats)
|
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
from __future__ import annotations
|
|
17
|
+
|
|
18
|
+
import copy
|
|
19
|
+
import logging
|
|
20
|
+
import threading
|
|
21
|
+
import time
|
|
22
|
+
from typing import Any
|
|
23
|
+
|
|
24
|
+
from llama_index.core.callbacks import CBEventType
|
|
25
|
+
from llama_index.core.callbacks import EventPayload
|
|
26
|
+
from llama_index.core.callbacks.base_handler import BaseCallbackHandler
|
|
27
|
+
from llama_index.core.llms import ChatResponse
|
|
28
|
+
|
|
29
|
+
from aiq.builder.context import AIQContext
|
|
30
|
+
from aiq.builder.framework_enum import LLMFrameworkEnum
|
|
31
|
+
from aiq.data_models.intermediate_step import IntermediateStepPayload
|
|
32
|
+
from aiq.data_models.intermediate_step import IntermediateStepType
|
|
33
|
+
from aiq.data_models.intermediate_step import StreamEventData
|
|
34
|
+
from aiq.data_models.intermediate_step import TraceMetadata
|
|
35
|
+
from aiq.data_models.intermediate_step import UsageInfo
|
|
36
|
+
from aiq.profiler.callbacks.base_callback_class import BaseProfilerCallback
|
|
37
|
+
from aiq.profiler.callbacks.token_usage_base_model import TokenUsageBaseModel
|
|
38
|
+
|
|
39
|
+
logger = logging.getLogger(__name__)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class LlamaIndexProfilerHandler(BaseCallbackHandler, BaseProfilerCallback):
|
|
43
|
+
"""
|
|
44
|
+
A callback handler for LlamaIndex that tracks usage stats similarly to NIMCallbackHandler.
|
|
45
|
+
Collects:
|
|
46
|
+
|
|
47
|
+
- Prompts
|
|
48
|
+
- Token usage
|
|
49
|
+
- Response data
|
|
50
|
+
- Time intervals between calls
|
|
51
|
+
|
|
52
|
+
and appends them to AIQContextState.usage_stats.
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
def __init__(self) -> None:
|
|
56
|
+
BaseCallbackHandler.__init__(self, event_starts_to_ignore=[], event_ends_to_ignore=[])
|
|
57
|
+
BaseProfilerCallback.__init__(self)
|
|
58
|
+
self._lock = threading.Lock()
|
|
59
|
+
self.last_call_ts = time.time()
|
|
60
|
+
self._last_tool_map: dict[str, str] = {}
|
|
61
|
+
self.step_manager = AIQContext.get().intermediate_step_manager
|
|
62
|
+
|
|
63
|
+
self._run_id_to_llm_input = {}
|
|
64
|
+
self._run_id_to_tool_input = {}
|
|
65
|
+
self._run_id_to_timestamp = {}
|
|
66
|
+
|
|
67
|
+
def on_event_start(
|
|
68
|
+
self,
|
|
69
|
+
event_type: CBEventType,
|
|
70
|
+
payload: dict[str, Any] | None = None,
|
|
71
|
+
event_id: str = "",
|
|
72
|
+
parent_id: str = "",
|
|
73
|
+
**kwargs: Any,
|
|
74
|
+
) -> str:
|
|
75
|
+
"""
|
|
76
|
+
Called at the *start* of a LlamaIndex "event" (LLM call, Embedding, etc.).
|
|
77
|
+
We capture the prompts or query strings here, if any.
|
|
78
|
+
"""
|
|
79
|
+
|
|
80
|
+
prompts_or_messages = None
|
|
81
|
+
now = time.time()
|
|
82
|
+
seconds_between_calls = int(now - self.last_call_ts)
|
|
83
|
+
|
|
84
|
+
# For LLM or chat calls, look in `payload` for messages/prompts
|
|
85
|
+
if event_type == CBEventType.LLM and payload:
|
|
86
|
+
# For example, "PROMPT" or "MESSAGES" might be in the payload.
|
|
87
|
+
# If found, store them in usage stats (just like your NIMCallbackHandler).
|
|
88
|
+
if EventPayload.PROMPT in payload:
|
|
89
|
+
prompts_or_messages = [payload[EventPayload.PROMPT]]
|
|
90
|
+
elif EventPayload.MESSAGES in payload:
|
|
91
|
+
prompts_or_messages = [str(msg) for msg in payload[EventPayload.MESSAGES]]
|
|
92
|
+
|
|
93
|
+
model_name = ""
|
|
94
|
+
try:
|
|
95
|
+
model_name = payload.get(EventPayload.SERIALIZED)['model']
|
|
96
|
+
except Exception as e:
|
|
97
|
+
logger.exception("Error getting model name: %s", e, exc_info=True)
|
|
98
|
+
|
|
99
|
+
llm_text_input = " ".join(prompts_or_messages) if prompts_or_messages else ""
|
|
100
|
+
|
|
101
|
+
if prompts_or_messages:
|
|
102
|
+
stats = IntermediateStepPayload(event_type=IntermediateStepType.LLM_START,
|
|
103
|
+
framework=LLMFrameworkEnum.LLAMA_INDEX,
|
|
104
|
+
name=model_name,
|
|
105
|
+
UUID=event_id,
|
|
106
|
+
data=StreamEventData(input=llm_text_input),
|
|
107
|
+
metadata=TraceMetadata(chat_inputs=copy.deepcopy(prompts_or_messages)),
|
|
108
|
+
usage_info=UsageInfo(token_usage=TokenUsageBaseModel(),
|
|
109
|
+
num_llm_calls=1,
|
|
110
|
+
seconds_between_calls=seconds_between_calls))
|
|
111
|
+
|
|
112
|
+
self.step_manager.push_intermediate_step(stats)
|
|
113
|
+
self._run_id_to_llm_input[event_id] = llm_text_input
|
|
114
|
+
self.last_call_ts = now
|
|
115
|
+
self._run_id_to_timestamp[event_id] = time.time()
|
|
116
|
+
|
|
117
|
+
elif event_type == CBEventType.FUNCTION_CALL and payload:
|
|
118
|
+
tool_metadata = payload.get(EventPayload.TOOL)
|
|
119
|
+
tool_metadata = {
|
|
120
|
+
"description": tool_metadata.description if hasattr(tool_metadata, "description") else "",
|
|
121
|
+
"fn_schema_str": tool_metadata.fn_schema_str if hasattr(tool_metadata, "fn_schema_str") else "",
|
|
122
|
+
"name": tool_metadata.name if hasattr(tool_metadata, "name") else "",
|
|
123
|
+
}
|
|
124
|
+
stats = IntermediateStepPayload(
|
|
125
|
+
event_type=IntermediateStepType.TOOL_START,
|
|
126
|
+
framework=LLMFrameworkEnum.LLAMA_INDEX,
|
|
127
|
+
name=payload.get(EventPayload.TOOL).name,
|
|
128
|
+
UUID=event_id,
|
|
129
|
+
data=StreamEventData(input=copy.deepcopy(payload.get(EventPayload.FUNCTION_CALL))),
|
|
130
|
+
metadata=TraceMetadata(tool_inputs=copy.deepcopy(payload.get(EventPayload.FUNCTION_CALL)),
|
|
131
|
+
tool_info=tool_metadata),
|
|
132
|
+
usage_info=UsageInfo(token_usage=TokenUsageBaseModel()))
|
|
133
|
+
|
|
134
|
+
self._run_id_to_tool_input[event_id] = copy.deepcopy(payload.get(EventPayload.FUNCTION_CALL))
|
|
135
|
+
self._last_tool_map[event_id] = payload.get(EventPayload.TOOL).name
|
|
136
|
+
self.step_manager.push_intermediate_step(stats)
|
|
137
|
+
self._run_id_to_timestamp[event_id] = time.time()
|
|
138
|
+
return event_id # must return the event_id
|
|
139
|
+
|
|
140
|
+
def on_event_end(
|
|
141
|
+
self,
|
|
142
|
+
event_type: CBEventType,
|
|
143
|
+
payload: dict[str, Any] | None = None,
|
|
144
|
+
event_id: str = "",
|
|
145
|
+
**kwargs: Any,
|
|
146
|
+
) -> None:
|
|
147
|
+
"""
|
|
148
|
+
Called at the *end* of a LlamaIndex "event".
|
|
149
|
+
We collect token usage (if available) and the returned response text.
|
|
150
|
+
"""
|
|
151
|
+
|
|
152
|
+
if payload and event_type == CBEventType.LLM:
|
|
153
|
+
# Often, token usage is embedded in e.g. payload["RESPONSE"].raw["usage"] for OpenAI-based calls
|
|
154
|
+
response = payload.get(EventPayload.RESPONSE)
|
|
155
|
+
if isinstance(response, ChatResponse):
|
|
156
|
+
llm_text_output = ""
|
|
157
|
+
|
|
158
|
+
try:
|
|
159
|
+
for block in response.message.blocks:
|
|
160
|
+
llm_text_output += block.text
|
|
161
|
+
except Exception as e:
|
|
162
|
+
logger.exception("Error getting LLM text output: %s", e, exc_info=True)
|
|
163
|
+
|
|
164
|
+
model_name = ""
|
|
165
|
+
try:
|
|
166
|
+
model_name = response.raw.model
|
|
167
|
+
except Exception as e:
|
|
168
|
+
logger.exception("Error getting model name: %s", e, exc_info=True)
|
|
169
|
+
|
|
170
|
+
# Append usage data to AIQ Toolkit usage stats
|
|
171
|
+
with self._lock:
|
|
172
|
+
stats = IntermediateStepPayload(
|
|
173
|
+
event_type=IntermediateStepType.LLM_END,
|
|
174
|
+
span_event_timestamp=self._run_id_to_timestamp.get(event_id),
|
|
175
|
+
framework=LLMFrameworkEnum.LLAMA_INDEX,
|
|
176
|
+
name=model_name,
|
|
177
|
+
UUID=event_id,
|
|
178
|
+
data=StreamEventData(input=self._run_id_to_llm_input.get(event_id), output=llm_text_output),
|
|
179
|
+
metadata=TraceMetadata(chat_responses=response.message if response.message else None),
|
|
180
|
+
usage_info=UsageInfo(token_usage=TokenUsageBaseModel(**response.additional_kwargs)))
|
|
181
|
+
self.step_manager.push_intermediate_step(stats)
|
|
182
|
+
|
|
183
|
+
elif event_type == CBEventType.FUNCTION_CALL and payload:
|
|
184
|
+
stats = IntermediateStepPayload(
|
|
185
|
+
event_type=IntermediateStepType.TOOL_END,
|
|
186
|
+
span_event_timestamp=self._run_id_to_timestamp.get(event_id),
|
|
187
|
+
framework=LLMFrameworkEnum.LLAMA_INDEX,
|
|
188
|
+
name=self._last_tool_map.get(event_id),
|
|
189
|
+
UUID=event_id,
|
|
190
|
+
data=StreamEventData(output=copy.deepcopy(payload.get(EventPayload.FUNCTION_OUTPUT))),
|
|
191
|
+
usage_info=UsageInfo(token_usage=TokenUsageBaseModel()))
|
|
192
|
+
|
|
193
|
+
self.step_manager.push_intermediate_step(stats)
|
|
194
|
+
|
|
195
|
+
def start_trace(self, trace_id: str | None = None) -> None:
|
|
196
|
+
"""Run when an overall trace is launched."""
|
|
197
|
+
pass
|
|
198
|
+
|
|
199
|
+
def end_trace(
|
|
200
|
+
self,
|
|
201
|
+
trace_id: str | None = None,
|
|
202
|
+
trace_map: dict[str, list[str]] | None = None,
|
|
203
|
+
) -> None:
|
|
204
|
+
"""Run when an overall trace is exited."""
|
|
205
|
+
pass
|