nvidia-nat 1.2.0rc5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aiq/agent/__init__.py +0 -0
- aiq/agent/base.py +239 -0
- aiq/agent/dual_node.py +67 -0
- aiq/agent/react_agent/__init__.py +0 -0
- aiq/agent/react_agent/agent.py +355 -0
- aiq/agent/react_agent/output_parser.py +104 -0
- aiq/agent/react_agent/prompt.py +41 -0
- aiq/agent/react_agent/register.py +149 -0
- aiq/agent/reasoning_agent/__init__.py +0 -0
- aiq/agent/reasoning_agent/reasoning_agent.py +225 -0
- aiq/agent/register.py +23 -0
- aiq/agent/rewoo_agent/__init__.py +0 -0
- aiq/agent/rewoo_agent/agent.py +411 -0
- aiq/agent/rewoo_agent/prompt.py +108 -0
- aiq/agent/rewoo_agent/register.py +158 -0
- aiq/agent/tool_calling_agent/__init__.py +0 -0
- aiq/agent/tool_calling_agent/agent.py +119 -0
- aiq/agent/tool_calling_agent/register.py +106 -0
- aiq/authentication/__init__.py +14 -0
- aiq/authentication/api_key/__init__.py +14 -0
- aiq/authentication/api_key/api_key_auth_provider.py +96 -0
- aiq/authentication/api_key/api_key_auth_provider_config.py +124 -0
- aiq/authentication/api_key/register.py +26 -0
- aiq/authentication/exceptions/__init__.py +14 -0
- aiq/authentication/exceptions/api_key_exceptions.py +38 -0
- aiq/authentication/http_basic_auth/__init__.py +0 -0
- aiq/authentication/http_basic_auth/http_basic_auth_provider.py +81 -0
- aiq/authentication/http_basic_auth/register.py +30 -0
- aiq/authentication/interfaces.py +93 -0
- aiq/authentication/oauth2/__init__.py +14 -0
- aiq/authentication/oauth2/oauth2_auth_code_flow_provider.py +107 -0
- aiq/authentication/oauth2/oauth2_auth_code_flow_provider_config.py +39 -0
- aiq/authentication/oauth2/register.py +25 -0
- aiq/authentication/register.py +21 -0
- aiq/builder/__init__.py +0 -0
- aiq/builder/builder.py +285 -0
- aiq/builder/component_utils.py +316 -0
- aiq/builder/context.py +264 -0
- aiq/builder/embedder.py +24 -0
- aiq/builder/eval_builder.py +161 -0
- aiq/builder/evaluator.py +29 -0
- aiq/builder/framework_enum.py +24 -0
- aiq/builder/front_end.py +73 -0
- aiq/builder/function.py +344 -0
- aiq/builder/function_base.py +380 -0
- aiq/builder/function_info.py +627 -0
- aiq/builder/intermediate_step_manager.py +174 -0
- aiq/builder/llm.py +25 -0
- aiq/builder/retriever.py +25 -0
- aiq/builder/user_interaction_manager.py +74 -0
- aiq/builder/workflow.py +148 -0
- aiq/builder/workflow_builder.py +1117 -0
- aiq/cli/__init__.py +14 -0
- aiq/cli/cli_utils/__init__.py +0 -0
- aiq/cli/cli_utils/config_override.py +231 -0
- aiq/cli/cli_utils/validation.py +37 -0
- aiq/cli/commands/__init__.py +0 -0
- aiq/cli/commands/configure/__init__.py +0 -0
- aiq/cli/commands/configure/channel/__init__.py +0 -0
- aiq/cli/commands/configure/channel/add.py +28 -0
- aiq/cli/commands/configure/channel/channel.py +36 -0
- aiq/cli/commands/configure/channel/remove.py +30 -0
- aiq/cli/commands/configure/channel/update.py +30 -0
- aiq/cli/commands/configure/configure.py +33 -0
- aiq/cli/commands/evaluate.py +139 -0
- aiq/cli/commands/info/__init__.py +14 -0
- aiq/cli/commands/info/info.py +39 -0
- aiq/cli/commands/info/list_channels.py +32 -0
- aiq/cli/commands/info/list_components.py +129 -0
- aiq/cli/commands/info/list_mcp.py +213 -0
- aiq/cli/commands/registry/__init__.py +14 -0
- aiq/cli/commands/registry/publish.py +88 -0
- aiq/cli/commands/registry/pull.py +118 -0
- aiq/cli/commands/registry/registry.py +38 -0
- aiq/cli/commands/registry/remove.py +108 -0
- aiq/cli/commands/registry/search.py +155 -0
- aiq/cli/commands/sizing/__init__.py +14 -0
- aiq/cli/commands/sizing/calc.py +297 -0
- aiq/cli/commands/sizing/sizing.py +27 -0
- aiq/cli/commands/start.py +246 -0
- aiq/cli/commands/uninstall.py +81 -0
- aiq/cli/commands/validate.py +47 -0
- aiq/cli/commands/workflow/__init__.py +14 -0
- aiq/cli/commands/workflow/templates/__init__.py.j2 +0 -0
- aiq/cli/commands/workflow/templates/config.yml.j2 +16 -0
- aiq/cli/commands/workflow/templates/pyproject.toml.j2 +22 -0
- aiq/cli/commands/workflow/templates/register.py.j2 +5 -0
- aiq/cli/commands/workflow/templates/workflow.py.j2 +36 -0
- aiq/cli/commands/workflow/workflow.py +37 -0
- aiq/cli/commands/workflow/workflow_commands.py +313 -0
- aiq/cli/entrypoint.py +135 -0
- aiq/cli/main.py +44 -0
- aiq/cli/register_workflow.py +488 -0
- aiq/cli/type_registry.py +1000 -0
- aiq/data_models/__init__.py +14 -0
- aiq/data_models/api_server.py +694 -0
- aiq/data_models/authentication.py +231 -0
- aiq/data_models/common.py +171 -0
- aiq/data_models/component.py +54 -0
- aiq/data_models/component_ref.py +168 -0
- aiq/data_models/config.py +406 -0
- aiq/data_models/dataset_handler.py +123 -0
- aiq/data_models/discovery_metadata.py +335 -0
- aiq/data_models/embedder.py +27 -0
- aiq/data_models/evaluate.py +127 -0
- aiq/data_models/evaluator.py +26 -0
- aiq/data_models/front_end.py +26 -0
- aiq/data_models/function.py +30 -0
- aiq/data_models/function_dependencies.py +72 -0
- aiq/data_models/interactive.py +246 -0
- aiq/data_models/intermediate_step.py +302 -0
- aiq/data_models/invocation_node.py +38 -0
- aiq/data_models/llm.py +27 -0
- aiq/data_models/logging.py +26 -0
- aiq/data_models/memory.py +27 -0
- aiq/data_models/object_store.py +44 -0
- aiq/data_models/profiler.py +54 -0
- aiq/data_models/registry_handler.py +26 -0
- aiq/data_models/retriever.py +30 -0
- aiq/data_models/retry_mixin.py +35 -0
- aiq/data_models/span.py +187 -0
- aiq/data_models/step_adaptor.py +64 -0
- aiq/data_models/streaming.py +33 -0
- aiq/data_models/swe_bench_model.py +54 -0
- aiq/data_models/telemetry_exporter.py +26 -0
- aiq/data_models/ttc_strategy.py +30 -0
- aiq/embedder/__init__.py +0 -0
- aiq/embedder/langchain_client.py +41 -0
- aiq/embedder/nim_embedder.py +59 -0
- aiq/embedder/openai_embedder.py +43 -0
- aiq/embedder/register.py +24 -0
- aiq/eval/__init__.py +14 -0
- aiq/eval/config.py +60 -0
- aiq/eval/dataset_handler/__init__.py +0 -0
- aiq/eval/dataset_handler/dataset_downloader.py +106 -0
- aiq/eval/dataset_handler/dataset_filter.py +52 -0
- aiq/eval/dataset_handler/dataset_handler.py +254 -0
- aiq/eval/evaluate.py +506 -0
- aiq/eval/evaluator/__init__.py +14 -0
- aiq/eval/evaluator/base_evaluator.py +73 -0
- aiq/eval/evaluator/evaluator_model.py +45 -0
- aiq/eval/intermediate_step_adapter.py +99 -0
- aiq/eval/rag_evaluator/__init__.py +0 -0
- aiq/eval/rag_evaluator/evaluate.py +178 -0
- aiq/eval/rag_evaluator/register.py +143 -0
- aiq/eval/register.py +23 -0
- aiq/eval/remote_workflow.py +133 -0
- aiq/eval/runners/__init__.py +14 -0
- aiq/eval/runners/config.py +39 -0
- aiq/eval/runners/multi_eval_runner.py +54 -0
- aiq/eval/runtime_event_subscriber.py +52 -0
- aiq/eval/swe_bench_evaluator/__init__.py +0 -0
- aiq/eval/swe_bench_evaluator/evaluate.py +215 -0
- aiq/eval/swe_bench_evaluator/register.py +36 -0
- aiq/eval/trajectory_evaluator/__init__.py +0 -0
- aiq/eval/trajectory_evaluator/evaluate.py +75 -0
- aiq/eval/trajectory_evaluator/register.py +40 -0
- aiq/eval/tunable_rag_evaluator/__init__.py +0 -0
- aiq/eval/tunable_rag_evaluator/evaluate.py +245 -0
- aiq/eval/tunable_rag_evaluator/register.py +52 -0
- aiq/eval/usage_stats.py +41 -0
- aiq/eval/utils/__init__.py +0 -0
- aiq/eval/utils/output_uploader.py +140 -0
- aiq/eval/utils/tqdm_position_registry.py +40 -0
- aiq/eval/utils/weave_eval.py +184 -0
- aiq/experimental/__init__.py +0 -0
- aiq/experimental/decorators/__init__.py +0 -0
- aiq/experimental/decorators/experimental_warning_decorator.py +130 -0
- aiq/experimental/test_time_compute/__init__.py +0 -0
- aiq/experimental/test_time_compute/editing/__init__.py +0 -0
- aiq/experimental/test_time_compute/editing/iterative_plan_refinement_editor.py +147 -0
- aiq/experimental/test_time_compute/editing/llm_as_a_judge_editor.py +204 -0
- aiq/experimental/test_time_compute/editing/motivation_aware_summarization.py +107 -0
- aiq/experimental/test_time_compute/functions/__init__.py +0 -0
- aiq/experimental/test_time_compute/functions/execute_score_select_function.py +105 -0
- aiq/experimental/test_time_compute/functions/its_tool_orchestration_function.py +205 -0
- aiq/experimental/test_time_compute/functions/its_tool_wrapper_function.py +146 -0
- aiq/experimental/test_time_compute/functions/plan_select_execute_function.py +224 -0
- aiq/experimental/test_time_compute/models/__init__.py +0 -0
- aiq/experimental/test_time_compute/models/editor_config.py +132 -0
- aiq/experimental/test_time_compute/models/scoring_config.py +112 -0
- aiq/experimental/test_time_compute/models/search_config.py +120 -0
- aiq/experimental/test_time_compute/models/selection_config.py +154 -0
- aiq/experimental/test_time_compute/models/stage_enums.py +43 -0
- aiq/experimental/test_time_compute/models/strategy_base.py +66 -0
- aiq/experimental/test_time_compute/models/tool_use_config.py +41 -0
- aiq/experimental/test_time_compute/models/ttc_item.py +48 -0
- aiq/experimental/test_time_compute/register.py +36 -0
- aiq/experimental/test_time_compute/scoring/__init__.py +0 -0
- aiq/experimental/test_time_compute/scoring/llm_based_agent_scorer.py +168 -0
- aiq/experimental/test_time_compute/scoring/llm_based_plan_scorer.py +168 -0
- aiq/experimental/test_time_compute/scoring/motivation_aware_scorer.py +111 -0
- aiq/experimental/test_time_compute/search/__init__.py +0 -0
- aiq/experimental/test_time_compute/search/multi_llm_planner.py +128 -0
- aiq/experimental/test_time_compute/search/multi_query_retrieval_search.py +122 -0
- aiq/experimental/test_time_compute/search/single_shot_multi_plan_planner.py +128 -0
- aiq/experimental/test_time_compute/selection/__init__.py +0 -0
- aiq/experimental/test_time_compute/selection/best_of_n_selector.py +63 -0
- aiq/experimental/test_time_compute/selection/llm_based_agent_output_selector.py +131 -0
- aiq/experimental/test_time_compute/selection/llm_based_output_merging_selector.py +159 -0
- aiq/experimental/test_time_compute/selection/llm_based_plan_selector.py +128 -0
- aiq/experimental/test_time_compute/selection/threshold_selector.py +58 -0
- aiq/front_ends/__init__.py +14 -0
- aiq/front_ends/console/__init__.py +14 -0
- aiq/front_ends/console/authentication_flow_handler.py +233 -0
- aiq/front_ends/console/console_front_end_config.py +32 -0
- aiq/front_ends/console/console_front_end_plugin.py +96 -0
- aiq/front_ends/console/register.py +25 -0
- aiq/front_ends/cron/__init__.py +14 -0
- aiq/front_ends/fastapi/__init__.py +14 -0
- aiq/front_ends/fastapi/auth_flow_handlers/__init__.py +0 -0
- aiq/front_ends/fastapi/auth_flow_handlers/http_flow_handler.py +27 -0
- aiq/front_ends/fastapi/auth_flow_handlers/websocket_flow_handler.py +107 -0
- aiq/front_ends/fastapi/fastapi_front_end_config.py +234 -0
- aiq/front_ends/fastapi/fastapi_front_end_controller.py +68 -0
- aiq/front_ends/fastapi/fastapi_front_end_plugin.py +116 -0
- aiq/front_ends/fastapi/fastapi_front_end_plugin_worker.py +1092 -0
- aiq/front_ends/fastapi/html_snippets/__init__.py +14 -0
- aiq/front_ends/fastapi/html_snippets/auth_code_grant_success.py +35 -0
- aiq/front_ends/fastapi/intermediate_steps_subscriber.py +80 -0
- aiq/front_ends/fastapi/job_store.py +183 -0
- aiq/front_ends/fastapi/main.py +72 -0
- aiq/front_ends/fastapi/message_handler.py +298 -0
- aiq/front_ends/fastapi/message_validator.py +345 -0
- aiq/front_ends/fastapi/register.py +25 -0
- aiq/front_ends/fastapi/response_helpers.py +195 -0
- aiq/front_ends/fastapi/step_adaptor.py +321 -0
- aiq/front_ends/mcp/__init__.py +14 -0
- aiq/front_ends/mcp/mcp_front_end_config.py +32 -0
- aiq/front_ends/mcp/mcp_front_end_plugin.py +93 -0
- aiq/front_ends/mcp/register.py +27 -0
- aiq/front_ends/mcp/tool_converter.py +242 -0
- aiq/front_ends/register.py +22 -0
- aiq/front_ends/simple_base/__init__.py +14 -0
- aiq/front_ends/simple_base/simple_front_end_plugin_base.py +54 -0
- aiq/llm/__init__.py +0 -0
- aiq/llm/aws_bedrock_llm.py +57 -0
- aiq/llm/nim_llm.py +46 -0
- aiq/llm/openai_llm.py +46 -0
- aiq/llm/register.py +23 -0
- aiq/llm/utils/__init__.py +14 -0
- aiq/llm/utils/env_config_value.py +94 -0
- aiq/llm/utils/error.py +17 -0
- aiq/memory/__init__.py +20 -0
- aiq/memory/interfaces.py +183 -0
- aiq/memory/models.py +112 -0
- aiq/meta/module_to_distro.json +3 -0
- aiq/meta/pypi.md +58 -0
- aiq/object_store/__init__.py +20 -0
- aiq/object_store/in_memory_object_store.py +76 -0
- aiq/object_store/interfaces.py +84 -0
- aiq/object_store/models.py +36 -0
- aiq/object_store/register.py +20 -0
- aiq/observability/__init__.py +14 -0
- aiq/observability/exporter/__init__.py +14 -0
- aiq/observability/exporter/base_exporter.py +449 -0
- aiq/observability/exporter/exporter.py +78 -0
- aiq/observability/exporter/file_exporter.py +33 -0
- aiq/observability/exporter/processing_exporter.py +322 -0
- aiq/observability/exporter/raw_exporter.py +52 -0
- aiq/observability/exporter/span_exporter.py +265 -0
- aiq/observability/exporter_manager.py +335 -0
- aiq/observability/mixin/__init__.py +14 -0
- aiq/observability/mixin/batch_config_mixin.py +26 -0
- aiq/observability/mixin/collector_config_mixin.py +23 -0
- aiq/observability/mixin/file_mixin.py +288 -0
- aiq/observability/mixin/file_mode.py +23 -0
- aiq/observability/mixin/resource_conflict_mixin.py +134 -0
- aiq/observability/mixin/serialize_mixin.py +61 -0
- aiq/observability/mixin/type_introspection_mixin.py +183 -0
- aiq/observability/processor/__init__.py +14 -0
- aiq/observability/processor/batching_processor.py +310 -0
- aiq/observability/processor/callback_processor.py +42 -0
- aiq/observability/processor/intermediate_step_serializer.py +28 -0
- aiq/observability/processor/processor.py +71 -0
- aiq/observability/register.py +96 -0
- aiq/observability/utils/__init__.py +14 -0
- aiq/observability/utils/dict_utils.py +236 -0
- aiq/observability/utils/time_utils.py +31 -0
- aiq/plugins/.namespace +1 -0
- aiq/profiler/__init__.py +0 -0
- aiq/profiler/calc/__init__.py +14 -0
- aiq/profiler/calc/calc_runner.py +627 -0
- aiq/profiler/calc/calculations.py +288 -0
- aiq/profiler/calc/data_models.py +188 -0
- aiq/profiler/calc/plot.py +345 -0
- aiq/profiler/callbacks/__init__.py +0 -0
- aiq/profiler/callbacks/agno_callback_handler.py +295 -0
- aiq/profiler/callbacks/base_callback_class.py +20 -0
- aiq/profiler/callbacks/langchain_callback_handler.py +290 -0
- aiq/profiler/callbacks/llama_index_callback_handler.py +205 -0
- aiq/profiler/callbacks/semantic_kernel_callback_handler.py +238 -0
- aiq/profiler/callbacks/token_usage_base_model.py +27 -0
- aiq/profiler/data_frame_row.py +51 -0
- aiq/profiler/data_models.py +24 -0
- aiq/profiler/decorators/__init__.py +0 -0
- aiq/profiler/decorators/framework_wrapper.py +131 -0
- aiq/profiler/decorators/function_tracking.py +254 -0
- aiq/profiler/forecasting/__init__.py +0 -0
- aiq/profiler/forecasting/config.py +18 -0
- aiq/profiler/forecasting/model_trainer.py +75 -0
- aiq/profiler/forecasting/models/__init__.py +22 -0
- aiq/profiler/forecasting/models/forecasting_base_model.py +40 -0
- aiq/profiler/forecasting/models/linear_model.py +196 -0
- aiq/profiler/forecasting/models/random_forest_regressor.py +268 -0
- aiq/profiler/inference_metrics_model.py +28 -0
- aiq/profiler/inference_optimization/__init__.py +0 -0
- aiq/profiler/inference_optimization/bottleneck_analysis/__init__.py +0 -0
- aiq/profiler/inference_optimization/bottleneck_analysis/nested_stack_analysis.py +460 -0
- aiq/profiler/inference_optimization/bottleneck_analysis/simple_stack_analysis.py +258 -0
- aiq/profiler/inference_optimization/data_models.py +386 -0
- aiq/profiler/inference_optimization/experimental/__init__.py +0 -0
- aiq/profiler/inference_optimization/experimental/concurrency_spike_analysis.py +468 -0
- aiq/profiler/inference_optimization/experimental/prefix_span_analysis.py +405 -0
- aiq/profiler/inference_optimization/llm_metrics.py +212 -0
- aiq/profiler/inference_optimization/prompt_caching.py +163 -0
- aiq/profiler/inference_optimization/token_uniqueness.py +107 -0
- aiq/profiler/inference_optimization/workflow_runtimes.py +72 -0
- aiq/profiler/intermediate_property_adapter.py +102 -0
- aiq/profiler/profile_runner.py +473 -0
- aiq/profiler/utils.py +184 -0
- aiq/registry_handlers/__init__.py +0 -0
- aiq/registry_handlers/local/__init__.py +0 -0
- aiq/registry_handlers/local/local_handler.py +176 -0
- aiq/registry_handlers/local/register_local.py +37 -0
- aiq/registry_handlers/metadata_factory.py +60 -0
- aiq/registry_handlers/package_utils.py +567 -0
- aiq/registry_handlers/pypi/__init__.py +0 -0
- aiq/registry_handlers/pypi/pypi_handler.py +251 -0
- aiq/registry_handlers/pypi/register_pypi.py +40 -0
- aiq/registry_handlers/register.py +21 -0
- aiq/registry_handlers/registry_handler_base.py +157 -0
- aiq/registry_handlers/rest/__init__.py +0 -0
- aiq/registry_handlers/rest/register_rest.py +56 -0
- aiq/registry_handlers/rest/rest_handler.py +237 -0
- aiq/registry_handlers/schemas/__init__.py +0 -0
- aiq/registry_handlers/schemas/headers.py +42 -0
- aiq/registry_handlers/schemas/package.py +68 -0
- aiq/registry_handlers/schemas/publish.py +63 -0
- aiq/registry_handlers/schemas/pull.py +82 -0
- aiq/registry_handlers/schemas/remove.py +36 -0
- aiq/registry_handlers/schemas/search.py +91 -0
- aiq/registry_handlers/schemas/status.py +47 -0
- aiq/retriever/__init__.py +0 -0
- aiq/retriever/interface.py +37 -0
- aiq/retriever/milvus/__init__.py +14 -0
- aiq/retriever/milvus/register.py +81 -0
- aiq/retriever/milvus/retriever.py +228 -0
- aiq/retriever/models.py +74 -0
- aiq/retriever/nemo_retriever/__init__.py +14 -0
- aiq/retriever/nemo_retriever/register.py +60 -0
- aiq/retriever/nemo_retriever/retriever.py +190 -0
- aiq/retriever/register.py +22 -0
- aiq/runtime/__init__.py +14 -0
- aiq/runtime/loader.py +215 -0
- aiq/runtime/runner.py +190 -0
- aiq/runtime/session.py +158 -0
- aiq/runtime/user_metadata.py +130 -0
- aiq/settings/__init__.py +0 -0
- aiq/settings/global_settings.py +318 -0
- aiq/test/.namespace +1 -0
- aiq/tool/__init__.py +0 -0
- aiq/tool/chat_completion.py +74 -0
- aiq/tool/code_execution/README.md +151 -0
- aiq/tool/code_execution/__init__.py +0 -0
- aiq/tool/code_execution/code_sandbox.py +267 -0
- aiq/tool/code_execution/local_sandbox/.gitignore +1 -0
- aiq/tool/code_execution/local_sandbox/Dockerfile.sandbox +60 -0
- aiq/tool/code_execution/local_sandbox/__init__.py +13 -0
- aiq/tool/code_execution/local_sandbox/local_sandbox_server.py +198 -0
- aiq/tool/code_execution/local_sandbox/sandbox.requirements.txt +6 -0
- aiq/tool/code_execution/local_sandbox/start_local_sandbox.sh +50 -0
- aiq/tool/code_execution/register.py +74 -0
- aiq/tool/code_execution/test_code_execution_sandbox.py +414 -0
- aiq/tool/code_execution/utils.py +100 -0
- aiq/tool/datetime_tools.py +42 -0
- aiq/tool/document_search.py +141 -0
- aiq/tool/github_tools/__init__.py +0 -0
- aiq/tool/github_tools/create_github_commit.py +133 -0
- aiq/tool/github_tools/create_github_issue.py +87 -0
- aiq/tool/github_tools/create_github_pr.py +106 -0
- aiq/tool/github_tools/get_github_file.py +106 -0
- aiq/tool/github_tools/get_github_issue.py +166 -0
- aiq/tool/github_tools/get_github_pr.py +256 -0
- aiq/tool/github_tools/update_github_issue.py +100 -0
- aiq/tool/mcp/__init__.py +14 -0
- aiq/tool/mcp/exceptions.py +142 -0
- aiq/tool/mcp/mcp_client.py +255 -0
- aiq/tool/mcp/mcp_tool.py +96 -0
- aiq/tool/memory_tools/__init__.py +0 -0
- aiq/tool/memory_tools/add_memory_tool.py +79 -0
- aiq/tool/memory_tools/delete_memory_tool.py +67 -0
- aiq/tool/memory_tools/get_memory_tool.py +72 -0
- aiq/tool/nvidia_rag.py +95 -0
- aiq/tool/register.py +38 -0
- aiq/tool/retriever.py +89 -0
- aiq/tool/server_tools.py +66 -0
- aiq/utils/__init__.py +0 -0
- aiq/utils/data_models/__init__.py +0 -0
- aiq/utils/data_models/schema_validator.py +58 -0
- aiq/utils/debugging_utils.py +43 -0
- aiq/utils/dump_distro_mapping.py +32 -0
- aiq/utils/exception_handlers/__init__.py +0 -0
- aiq/utils/exception_handlers/automatic_retries.py +289 -0
- aiq/utils/exception_handlers/mcp.py +211 -0
- aiq/utils/exception_handlers/schemas.py +114 -0
- aiq/utils/io/__init__.py +0 -0
- aiq/utils/io/model_processing.py +28 -0
- aiq/utils/io/yaml_tools.py +119 -0
- aiq/utils/log_utils.py +37 -0
- aiq/utils/metadata_utils.py +74 -0
- aiq/utils/optional_imports.py +142 -0
- aiq/utils/producer_consumer_queue.py +178 -0
- aiq/utils/reactive/__init__.py +0 -0
- aiq/utils/reactive/base/__init__.py +0 -0
- aiq/utils/reactive/base/observable_base.py +65 -0
- aiq/utils/reactive/base/observer_base.py +55 -0
- aiq/utils/reactive/base/subject_base.py +79 -0
- aiq/utils/reactive/observable.py +59 -0
- aiq/utils/reactive/observer.py +76 -0
- aiq/utils/reactive/subject.py +131 -0
- aiq/utils/reactive/subscription.py +49 -0
- aiq/utils/settings/__init__.py +0 -0
- aiq/utils/settings/global_settings.py +197 -0
- aiq/utils/string_utils.py +38 -0
- aiq/utils/type_converter.py +290 -0
- aiq/utils/type_utils.py +484 -0
- aiq/utils/url_utils.py +27 -0
- nvidia_nat-1.2.0rc5.dist-info/METADATA +363 -0
- nvidia_nat-1.2.0rc5.dist-info/RECORD +435 -0
- nvidia_nat-1.2.0rc5.dist-info/WHEEL +5 -0
- nvidia_nat-1.2.0rc5.dist-info/entry_points.txt +20 -0
- nvidia_nat-1.2.0rc5.dist-info/licenses/LICENSE-3rd-party.txt +3686 -0
- nvidia_nat-1.2.0rc5.dist-info/licenses/LICENSE.md +201 -0
- nvidia_nat-1.2.0rc5.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
from aiq.data_models.intermediate_step import IntermediateStep
|
|
17
|
+
from aiq.profiler.inference_optimization.data_models import CommonPrefixesOutput
|
|
18
|
+
from aiq.profiler.inference_optimization.data_models import FrameworkLLMPrefixData
|
|
19
|
+
from aiq.profiler.inference_optimization.data_models import PrefixInfo
|
|
20
|
+
from aiq.profiler.utils import create_standardized_dataframe
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
# -----------------------------------------------------------
|
|
24
|
+
# 1. Helper: Build a prefix trie
|
|
25
|
+
# -----------------------------------------------------------
|
|
26
|
+
def build_prefix_trie(strings: list[str]) -> dict:
|
|
27
|
+
"""
|
|
28
|
+
Build a trie from a list of strings.
|
|
29
|
+
|
|
30
|
+
Returns a nested dictionary with::
|
|
31
|
+
|
|
32
|
+
{
|
|
33
|
+
'count': int, # number of strings passing through this node
|
|
34
|
+
'children': dict[str, TrieNode]
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
"""
|
|
38
|
+
root = {'count': 0, 'children': {}}
|
|
39
|
+
for s in strings:
|
|
40
|
+
node = root
|
|
41
|
+
node['count'] += 1 # every string passes through the root
|
|
42
|
+
for ch in s:
|
|
43
|
+
if ch not in node['children']:
|
|
44
|
+
node['children'][ch] = {'count': 0, 'children': {}}
|
|
45
|
+
node = node['children'][ch]
|
|
46
|
+
node['count'] += 1
|
|
47
|
+
return root
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
# -----------------------------------------------------------
|
|
51
|
+
# 2. Helper: Iterative traversal of the trie
|
|
52
|
+
# -----------------------------------------------------------
|
|
53
|
+
def collect_prefixes_iterative(root: dict, total_calls: int) -> list[dict]:
|
|
54
|
+
"""
|
|
55
|
+
Iteratively traverse the trie to collect prefix statistics,
|
|
56
|
+
avoiding recursion depth limits.
|
|
57
|
+
|
|
58
|
+
:param root: Trie node with 'count' and 'children'
|
|
59
|
+
:param total_calls: Number of total calls in this group (denominator for percentages)
|
|
60
|
+
:return: A list of dicts, each dict containing prefix info
|
|
61
|
+
"""
|
|
62
|
+
results = []
|
|
63
|
+
# stack holds (node, prefix_so_far)
|
|
64
|
+
stack = [(root, "")]
|
|
65
|
+
|
|
66
|
+
while stack:
|
|
67
|
+
node, prefix = stack.pop()
|
|
68
|
+
|
|
69
|
+
# Skip storing the empty root prefix
|
|
70
|
+
if prefix:
|
|
71
|
+
calls_count = node['count']
|
|
72
|
+
calls_percentage = calls_count / total_calls
|
|
73
|
+
results.append({
|
|
74
|
+
'prefix': prefix,
|
|
75
|
+
'prefix_length': len(prefix),
|
|
76
|
+
'calls_count': calls_count,
|
|
77
|
+
'calls_percentage': calls_percentage
|
|
78
|
+
})
|
|
79
|
+
|
|
80
|
+
# Add children to the stack
|
|
81
|
+
for ch, child_node in node['children'].items():
|
|
82
|
+
stack.append((child_node, prefix + ch))
|
|
83
|
+
|
|
84
|
+
return results
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
# -----------------------------------------------------------
|
|
88
|
+
# 3. Main Function
|
|
89
|
+
# -----------------------------------------------------------
|
|
90
|
+
def get_common_prefixes(all_steps: list[list[IntermediateStep]],
|
|
91
|
+
min_call_percentage: float = 0.0) -> CommonPrefixesOutput:
|
|
92
|
+
"""
|
|
93
|
+
Given a pandas DataFrame with columns 'framework', 'llm_name',
|
|
94
|
+
and 'llm_text_input', return a Pydantic-validated RootModel
|
|
95
|
+
keyed by "<llm_name>" with a sorted list of
|
|
96
|
+
common prefix statistics.
|
|
97
|
+
|
|
98
|
+
1) Only includes prefixes with calls_percentage >= `min_call_percentage`.
|
|
99
|
+
2) Excludes any prefix that is a substring of another (longer) prefix
|
|
100
|
+
that already meets the threshold and is retained.
|
|
101
|
+
3) Optionally writes the resulting dictionary to JSON if `output_path` is provided.
|
|
102
|
+
|
|
103
|
+
:param all_steps: Intermediate Steps
|
|
104
|
+
:param min_call_percentage: Exclude prefixes that appear in fewer than this fraction
|
|
105
|
+
of total calls. (Default 0.0 = no filtering)
|
|
106
|
+
|
|
107
|
+
Sorting: primarily by prefix length (descending),
|
|
108
|
+
secondarily by frequency (descending).
|
|
109
|
+
"""
|
|
110
|
+
# Validate necessary columns
|
|
111
|
+
df = create_standardized_dataframe(all_steps)
|
|
112
|
+
|
|
113
|
+
required_cols = {'framework', 'llm_name', 'llm_text_input'}
|
|
114
|
+
if not required_cols.issubset(df.columns):
|
|
115
|
+
missing = required_cols - set(df.columns)
|
|
116
|
+
raise ValueError(f"DataFrame missing required columns: {missing}")
|
|
117
|
+
|
|
118
|
+
output_data: dict[str, FrameworkLLMPrefixData] = {}
|
|
119
|
+
|
|
120
|
+
# Group DataFrame by (framework, llm_name)
|
|
121
|
+
grouped = df.groupby(['llm_name'])
|
|
122
|
+
for llm_name, group_df in grouped:
|
|
123
|
+
# Unpack llm_name Tuple
|
|
124
|
+
llm_name = llm_name[0]
|
|
125
|
+
|
|
126
|
+
text_inputs = group_df['llm_text_input'].astype(str).tolist()
|
|
127
|
+
total_calls = len(text_inputs)
|
|
128
|
+
|
|
129
|
+
# Build trie for all text inputs
|
|
130
|
+
trie = build_prefix_trie(text_inputs)
|
|
131
|
+
|
|
132
|
+
# Collect prefix info using iterative traversal
|
|
133
|
+
results = collect_prefixes_iterative(trie, total_calls=total_calls)
|
|
134
|
+
|
|
135
|
+
# 1) Filter out prefixes below min_call_percentage
|
|
136
|
+
results_filtered = [r for r in results if r['calls_percentage'] >= min_call_percentage]
|
|
137
|
+
|
|
138
|
+
# 2) Sort results: prefix_length desc, then calls_count desc
|
|
139
|
+
results_sorted = sorted(results_filtered, key=lambda x: (x['prefix_length'], x['calls_count']), reverse=True)
|
|
140
|
+
|
|
141
|
+
# 3) Substring filtering:
|
|
142
|
+
# Because results_sorted is in descending length order,
|
|
143
|
+
# if we keep a prefix, we exclude any shorter prefix that
|
|
144
|
+
# is a substring of that already-kept prefix.
|
|
145
|
+
final_results = []
|
|
146
|
+
for r in results_sorted:
|
|
147
|
+
pfx = r['prefix']
|
|
148
|
+
# Check if this prefix is contained in any longer prefix we have kept
|
|
149
|
+
if not any(pfx in kept['prefix'] for kept in final_results):
|
|
150
|
+
final_results.append(r)
|
|
151
|
+
|
|
152
|
+
# Convert each dict to a PrefixInfo model
|
|
153
|
+
prefix_info_list = [PrefixInfo(**res) for res in final_results]
|
|
154
|
+
|
|
155
|
+
# Construct the dictionary key
|
|
156
|
+
framework_llm_key = f"{llm_name}"
|
|
157
|
+
|
|
158
|
+
# Save the data for this group
|
|
159
|
+
output_data[framework_llm_key] = FrameworkLLMPrefixData(total_calls=total_calls, prefix_info=prefix_info_list)
|
|
160
|
+
|
|
161
|
+
# Package the final result in a validated RootModel
|
|
162
|
+
result_model = CommonPrefixesOutput(root=output_data)
|
|
163
|
+
return result_model
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
import re
|
|
17
|
+
|
|
18
|
+
import numpy as np
|
|
19
|
+
|
|
20
|
+
from aiq.data_models.intermediate_step import IntermediateStep
|
|
21
|
+
from aiq.profiler.inference_optimization.data_models import LLMUniquenessMetrics
|
|
22
|
+
from aiq.profiler.inference_optimization.data_models import LLMUniquenessMetricsByLLM
|
|
23
|
+
from aiq.profiler.utils import create_standardized_dataframe
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
# ----------------------------------------------------------------
|
|
27
|
+
# 1. Main Function
|
|
28
|
+
# ----------------------------------------------------------------
|
|
29
|
+
def compute_inter_query_token_uniqueness_by_llm(all_steps: list[list[IntermediateStep]]) -> LLMUniquenessMetricsByLLM:
|
|
30
|
+
"""
|
|
31
|
+
Computes p90, p95, and p99 of 'new words added' between consecutive llm_start events,
|
|
32
|
+
grouped by (llm_name, example_number).
|
|
33
|
+
|
|
34
|
+
Steps:
|
|
35
|
+
|
|
36
|
+
1. Filter df to only llm_start events.
|
|
37
|
+
2. Group first by (llm_name, example_number), then sort by event_timestamp in each group.
|
|
38
|
+
3. Compare each llm_text_input to the previous one in the same group to find how many new words appear.
|
|
39
|
+
4. Aggregate all 'new words count' across each llm_name, compute p90/p95/p99 for each LLM.
|
|
40
|
+
5. Return a Pydantic RootModel containing a dictionary::
|
|
41
|
+
|
|
42
|
+
{ llm_name -> LLMUniquenessMetrics(p90, p95, p99) }.
|
|
43
|
+
"""
|
|
44
|
+
df = create_standardized_dataframe(all_steps)
|
|
45
|
+
# Validate that the necessary columns exist
|
|
46
|
+
required_cols = {'event_type', 'llm_name', 'example_number', 'event_timestamp', 'llm_text_input'}
|
|
47
|
+
missing = required_cols - set(df.columns)
|
|
48
|
+
if missing:
|
|
49
|
+
raise ValueError(f"DataFrame missing required columns: {missing}")
|
|
50
|
+
|
|
51
|
+
# 1) Filter to llm_start events
|
|
52
|
+
cdf = df[df['event_type'] == 'LLM_START'].copy()
|
|
53
|
+
if cdf.empty:
|
|
54
|
+
# Return an empty dictionary if no llm_start events
|
|
55
|
+
return LLMUniquenessMetricsByLLM(root={})
|
|
56
|
+
|
|
57
|
+
# Helper to tokenize text into a set of words
|
|
58
|
+
def tokenize_to_set(text: str) -> set:
|
|
59
|
+
if not isinstance(text, str):
|
|
60
|
+
return set()
|
|
61
|
+
return set(re.findall(r"\w+", text.lower()))
|
|
62
|
+
|
|
63
|
+
# We'll store new_words counts for each llm_name
|
|
64
|
+
llm_to_counts: dict[str, list[int]] = {}
|
|
65
|
+
|
|
66
|
+
# 2) Group by (llm_name, example_number), then sort each group
|
|
67
|
+
grouped = cdf.groupby(['llm_name', 'example_number'], as_index=False, group_keys=True)
|
|
68
|
+
|
|
69
|
+
for (llm, ex_num), group_df in grouped: # pylint: disable=unused-variable
|
|
70
|
+
# Sort by event_timestamp
|
|
71
|
+
group_df = group_df.sort_values('event_timestamp', ascending=True)
|
|
72
|
+
|
|
73
|
+
# Shift the llm_text_input to compare consecutive calls
|
|
74
|
+
group_df['prev_llm_text_input'] = group_df['llm_text_input'].shift(1)
|
|
75
|
+
|
|
76
|
+
# Compute new words for each row (excluding the first in the group)
|
|
77
|
+
def compute_new_words(row):
|
|
78
|
+
current_tokens = tokenize_to_set(row['llm_text_input'])
|
|
79
|
+
prev_tokens = tokenize_to_set(row['prev_llm_text_input'])
|
|
80
|
+
return len(current_tokens - prev_tokens)
|
|
81
|
+
|
|
82
|
+
group_df['new_words_count'] = group_df.apply(compute_new_words, axis=1)
|
|
83
|
+
|
|
84
|
+
# Drop rows where there's no 'previous' call
|
|
85
|
+
valid_rows = group_df.dropna(subset=['prev_llm_text_input'])
|
|
86
|
+
|
|
87
|
+
# Gather the new_words_count
|
|
88
|
+
counts = valid_rows['new_words_count'].tolist()
|
|
89
|
+
if counts:
|
|
90
|
+
# Accumulate them in llm_to_counts
|
|
91
|
+
if llm not in llm_to_counts:
|
|
92
|
+
llm_to_counts[llm] = []
|
|
93
|
+
llm_to_counts[llm].extend(counts)
|
|
94
|
+
|
|
95
|
+
# 4) For each llm_name, compute p90, p95, p99
|
|
96
|
+
output_dict = {}
|
|
97
|
+
for llm_name, counts_list in llm_to_counts.items():
|
|
98
|
+
arr = np.array(counts_list)
|
|
99
|
+
p90_val = float(np.percentile(arr, 90))
|
|
100
|
+
p95_val = float(np.percentile(arr, 95))
|
|
101
|
+
p99_val = float(np.percentile(arr, 99))
|
|
102
|
+
|
|
103
|
+
output_dict[llm_name] = LLMUniquenessMetrics(p90=p90_val, p95=p95_val, p99=p99_val)
|
|
104
|
+
|
|
105
|
+
ret_val = LLMUniquenessMetricsByLLM(root=output_dict)
|
|
106
|
+
# Validate & return as a RootModel
|
|
107
|
+
return ret_val
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
import numpy as np
|
|
17
|
+
|
|
18
|
+
from aiq.data_models.intermediate_step import IntermediateStep
|
|
19
|
+
from aiq.profiler.inference_optimization.data_models import WorkflowRuntimeMetrics
|
|
20
|
+
from aiq.profiler.utils import create_standardized_dataframe
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def compute_workflow_runtime_metrics(all_steps: list[list[IntermediateStep]]) -> WorkflowRuntimeMetrics:
|
|
24
|
+
"""
|
|
25
|
+
Computes the p90, p95, and p99 of workflow runtime for each example_number.
|
|
26
|
+
|
|
27
|
+
The 'workflow runtime' per example is::
|
|
28
|
+
|
|
29
|
+
max(event_timestamp) - min(event_timestamp)
|
|
30
|
+
|
|
31
|
+
for that example_number.
|
|
32
|
+
|
|
33
|
+
Parameters
|
|
34
|
+
----------
|
|
35
|
+
all_steps : IntermediateStep
|
|
36
|
+
Must contain at least two columns:
|
|
37
|
+
- 'example_number'
|
|
38
|
+
- 'event_timestamp'
|
|
39
|
+
|
|
40
|
+
Returns
|
|
41
|
+
-------
|
|
42
|
+
WorkflowRuntimeMetrics
|
|
43
|
+
A Pydantic model with 'p90', 'p95', and 'p99' attributes.
|
|
44
|
+
"""
|
|
45
|
+
df = create_standardized_dataframe(all_steps)
|
|
46
|
+
required_cols = {"example_number", "event_timestamp"}
|
|
47
|
+
missing = required_cols - set(df.columns)
|
|
48
|
+
if missing:
|
|
49
|
+
raise ValueError(f"DataFrame is missing required columns: {missing}")
|
|
50
|
+
|
|
51
|
+
# Group by example_number, then find min and max timestamp
|
|
52
|
+
grouped = df.groupby("example_number")["event_timestamp"]
|
|
53
|
+
min_timestamps = grouped.min()
|
|
54
|
+
max_timestamps = grouped.max()
|
|
55
|
+
|
|
56
|
+
# Workflow runtime is difference between max and min
|
|
57
|
+
runtimes = max_timestamps - min_timestamps
|
|
58
|
+
|
|
59
|
+
# Convert to a NumPy array for percentile calculations
|
|
60
|
+
runtimes_arr = runtimes.values
|
|
61
|
+
|
|
62
|
+
# Edge case: if there's only one example or no data
|
|
63
|
+
# (NumPy percentile can handle 1-element arrays, but let's guard for empties)
|
|
64
|
+
if len(runtimes_arr) == 0:
|
|
65
|
+
return WorkflowRuntimeMetrics(p90=0.0, p95=0.0, p99=0.0)
|
|
66
|
+
|
|
67
|
+
# Compute p90, p95, p99
|
|
68
|
+
p90_val = float(np.percentile(runtimes_arr, 90))
|
|
69
|
+
p95_val = float(np.percentile(runtimes_arr, 95))
|
|
70
|
+
p99_val = float(np.percentile(runtimes_arr, 99))
|
|
71
|
+
|
|
72
|
+
return WorkflowRuntimeMetrics(p90=p90_val, p95=p95_val, p99=p99_val)
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
from aiq.data_models.intermediate_step import IntermediateStep
|
|
17
|
+
from aiq.data_models.intermediate_step import IntermediateStepType
|
|
18
|
+
from aiq.data_models.intermediate_step import TokenUsageBaseModel
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class IntermediatePropertyAdaptor(IntermediateStep):
|
|
22
|
+
|
|
23
|
+
@classmethod
|
|
24
|
+
def from_intermediate_step(cls, step: IntermediateStep) -> "IntermediatePropertyAdaptor":
|
|
25
|
+
"""
|
|
26
|
+
Create an adaptor instance from an existing IntermediateStep.
|
|
27
|
+
Uses the dict() representation of the instance to initialize the adaptor.
|
|
28
|
+
"""
|
|
29
|
+
return cls(**step.model_dump())
|
|
30
|
+
|
|
31
|
+
@property
|
|
32
|
+
def token_usage(self) -> TokenUsageBaseModel:
|
|
33
|
+
return self.payload.usage_info.token_usage if self.payload.usage_info else TokenUsageBaseModel()
|
|
34
|
+
|
|
35
|
+
@property
|
|
36
|
+
def seconds_between_calls(self) -> int:
|
|
37
|
+
return self.payload.usage_info.seconds_between_calls if self.payload.usage_info else 0
|
|
38
|
+
|
|
39
|
+
@property
|
|
40
|
+
def llm_text_input(self) -> str:
|
|
41
|
+
ret = ""
|
|
42
|
+
if self.payload.data and self.event_type == IntermediateStepType.LLM_START:
|
|
43
|
+
ret = self.payload.data.input
|
|
44
|
+
return ret
|
|
45
|
+
|
|
46
|
+
@property
|
|
47
|
+
def llm_text_output(self) -> str:
|
|
48
|
+
ret = ""
|
|
49
|
+
if self.payload.data and self.event_type == IntermediateStepType.LLM_END:
|
|
50
|
+
ret = self.payload.data.output
|
|
51
|
+
return ret
|
|
52
|
+
|
|
53
|
+
@property
|
|
54
|
+
def llm_text_chunk(self) -> str:
|
|
55
|
+
ret = ""
|
|
56
|
+
if self.payload.data and self.event_type == IntermediateStepType.LLM_NEW_TOKEN:
|
|
57
|
+
ret = self.payload.data.chunk
|
|
58
|
+
return ret
|
|
59
|
+
|
|
60
|
+
@property
|
|
61
|
+
def tool_input(self) -> str:
|
|
62
|
+
ret = ""
|
|
63
|
+
if self.payload.data and self.event_type == IntermediateStepType.TOOL_START:
|
|
64
|
+
ret = self.payload.data.input
|
|
65
|
+
return ret
|
|
66
|
+
|
|
67
|
+
@property
|
|
68
|
+
def tool_output(self) -> str:
|
|
69
|
+
ret = ""
|
|
70
|
+
if self.payload.data and self.event_type == IntermediateStepType.TOOL_END:
|
|
71
|
+
ret = self.payload.data.output
|
|
72
|
+
return ret
|
|
73
|
+
|
|
74
|
+
@property
|
|
75
|
+
def llm_name(self) -> str:
|
|
76
|
+
ret = ""
|
|
77
|
+
if self.payload.name and self.event_type in [IntermediateStepType.LLM_START, IntermediateStepType.LLM_END]:
|
|
78
|
+
ret = self.payload.name
|
|
79
|
+
return ret
|
|
80
|
+
|
|
81
|
+
@property
|
|
82
|
+
def tool_name(self) -> str:
|
|
83
|
+
ret = ""
|
|
84
|
+
if self.payload.name and self.event_type in [IntermediateStepType.TOOL_START, IntermediateStepType.TOOL_END]:
|
|
85
|
+
ret = self.payload.name
|
|
86
|
+
return ret
|
|
87
|
+
|
|
88
|
+
@property
|
|
89
|
+
def function_name(self) -> str:
|
|
90
|
+
return self.function_ancestry.function_name
|
|
91
|
+
|
|
92
|
+
@property
|
|
93
|
+
def function_id(self) -> str:
|
|
94
|
+
return self.function_ancestry.function_id
|
|
95
|
+
|
|
96
|
+
@property
|
|
97
|
+
def parent_function_id(self) -> str:
|
|
98
|
+
return self.function_ancestry.parent_id
|
|
99
|
+
|
|
100
|
+
@property
|
|
101
|
+
def parent_function_name(self) -> str:
|
|
102
|
+
return self.function_ancestry.parent_name
|